From fb73f231ab34dd6409ba2005ada62b028435e20d Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Thu, 4 Aug 2022 15:06:45 -0400 Subject: [PATCH] feat: Update Compute Engine API to revision 20220720 (#723) (#312) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * feat: Update Compute Engine API to revision 20220720 (#723) Source-Link: https://github.com/googleapis/googleapis/commit/60a0fa73c0dd0b0e5ace935fdaecdf2182a7bfd2 Source-Link: https://github.com/googleapis/googleapis-gen/commit/ba1df1be93f5a1aa5c647fc2f195d41b0075aa93 Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiYmExZGYxYmU5M2Y1YTFhYTVjNjQ3ZmMyZjE5NWQ0MWIwMDc1YWE5MyJ9 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * add replacements in owlbot.py to fix docs build Co-authored-by: Owl Bot Co-authored-by: Anthonios Partheniou --- google/cloud/compute/__init__.py | 16 + google/cloud/compute_v1/__init__.py | 14 + .../instance_group_managers/client.py | 18 +- .../compute_v1/services/instances/client.py | 6 +- .../region_instance_group_managers/client.py | 12 +- google/cloud/compute_v1/types/__init__.py | 14 + google/cloud/compute_v1/types/compute.py | 590 +++++++++++++++++- owlbot.py | 2 + tests/unit/gapic/compute_v1/test_disks.py | 42 +- tests/unit/gapic/compute_v1/test_images.py | 28 +- .../compute_v1/test_instance_templates.py | 12 + tests/unit/gapic/compute_v1/test_instances.py | 62 +- .../gapic/compute_v1/test_machine_images.py | 20 + tests/unit/gapic/compute_v1/test_networks.py | 10 + .../gapic/compute_v1/test_region_disks.py | 42 +- .../gapic/compute_v1/test_region_instances.py | 12 + tests/unit/gapic/compute_v1/test_snapshots.py | 44 +- 17 files changed, 870 insertions(+), 74 deletions(-) diff --git a/google/cloud/compute/__init__.py b/google/cloud/compute/__init__.py index 5c5700cb0..2fed295ca 100644 --- a/google/cloud/compute/__init__.py +++ b/google/cloud/compute/__init__.py @@ -556,6 +556,8 @@ from google.cloud.compute_v1.types.compute import EnableXpnHostProjectRequest from google.cloud.compute_v1.types.compute import EnableXpnResourceProjectRequest from google.cloud.compute_v1.types.compute import Error +from google.cloud.compute_v1.types.compute import ErrorDetails +from google.cloud.compute_v1.types.compute import ErrorInfo from google.cloud.compute_v1.types.compute import Errors from google.cloud.compute_v1.types.compute import ExchangedPeeringRoute from google.cloud.compute_v1.types.compute import ExchangedPeeringRoutesList @@ -751,6 +753,8 @@ from google.cloud.compute_v1.types.compute import HealthChecksScopedList from google.cloud.compute_v1.types.compute import HealthStatus from google.cloud.compute_v1.types.compute import HealthStatusForNetworkEndpoint +from google.cloud.compute_v1.types.compute import Help +from google.cloud.compute_v1.types.compute import HelpLink from google.cloud.compute_v1.types.compute import HostRule from google.cloud.compute_v1.types.compute import HTTP2HealthCheck from google.cloud.compute_v1.types.compute import HttpFaultAbort @@ -1103,8 +1107,10 @@ from google.cloud.compute_v1.types.compute import ListZoneOperationsRequest from google.cloud.compute_v1.types.compute import ListZonesRequest from google.cloud.compute_v1.types.compute import LocalDisk +from google.cloud.compute_v1.types.compute import LocalizedMessage from google.cloud.compute_v1.types.compute import LocationPolicy from google.cloud.compute_v1.types.compute import LocationPolicyLocation +from google.cloud.compute_v1.types.compute import LocationPolicyLocationConstraints from google.cloud.compute_v1.types.compute import LogConfig from google.cloud.compute_v1.types.compute import LogConfigCloudAuditOptions from google.cloud.compute_v1.types.compute import LogConfigCounterOptions @@ -1806,6 +1812,9 @@ from google.cloud.compute_v1.types.compute import ( VmEndpointNatMappingsInterfaceNatMappings, ) +from google.cloud.compute_v1.types.compute import ( + VmEndpointNatMappingsInterfaceNatMappingsNatRuleMappings, +) from google.cloud.compute_v1.types.compute import VmEndpointNatMappingsList from google.cloud.compute_v1.types.compute import VpnGateway from google.cloud.compute_v1.types.compute import VpnGatewayAggregatedList @@ -2184,6 +2193,8 @@ "EnableXpnHostProjectRequest", "EnableXpnResourceProjectRequest", "Error", + "ErrorDetails", + "ErrorInfo", "Errors", "ExchangedPeeringRoute", "ExchangedPeeringRoutesList", @@ -2357,6 +2368,8 @@ "HealthChecksScopedList", "HealthStatus", "HealthStatusForNetworkEndpoint", + "Help", + "HelpLink", "HostRule", "HTTP2HealthCheck", "HttpFaultAbort", @@ -2631,8 +2644,10 @@ "ListZoneOperationsRequest", "ListZonesRequest", "LocalDisk", + "LocalizedMessage", "LocationPolicy", "LocationPolicyLocation", + "LocationPolicyLocationConstraints", "LogConfig", "LogConfigCloudAuditOptions", "LogConfigCounterOptions", @@ -3152,6 +3167,7 @@ "ValidateUrlMapRequest", "VmEndpointNatMappings", "VmEndpointNatMappingsInterfaceNatMappings", + "VmEndpointNatMappingsInterfaceNatMappingsNatRuleMappings", "VmEndpointNatMappingsList", "VpnGateway", "VpnGatewayAggregatedList", diff --git a/google/cloud/compute_v1/__init__.py b/google/cloud/compute_v1/__init__.py index 9e6504a41..766e78a9b 100644 --- a/google/cloud/compute_v1/__init__.py +++ b/google/cloud/compute_v1/__init__.py @@ -366,6 +366,8 @@ from .types.compute import EnableXpnHostProjectRequest from .types.compute import EnableXpnResourceProjectRequest from .types.compute import Error +from .types.compute import ErrorDetails +from .types.compute import ErrorInfo from .types.compute import Errors from .types.compute import ExchangedPeeringRoute from .types.compute import ExchangedPeeringRoutesList @@ -539,6 +541,8 @@ from .types.compute import HealthChecksScopedList from .types.compute import HealthStatus from .types.compute import HealthStatusForNetworkEndpoint +from .types.compute import Help +from .types.compute import HelpLink from .types.compute import HostRule from .types.compute import HTTP2HealthCheck from .types.compute import HttpFaultAbort @@ -813,8 +817,10 @@ from .types.compute import ListZoneOperationsRequest from .types.compute import ListZonesRequest from .types.compute import LocalDisk +from .types.compute import LocalizedMessage from .types.compute import LocationPolicy from .types.compute import LocationPolicyLocation +from .types.compute import LocationPolicyLocationConstraints from .types.compute import LogConfig from .types.compute import LogConfigCloudAuditOptions from .types.compute import LogConfigCounterOptions @@ -1336,6 +1342,7 @@ from .types.compute import ValidateUrlMapRequest from .types.compute import VmEndpointNatMappings from .types.compute import VmEndpointNatMappingsInterfaceNatMappings +from .types.compute import VmEndpointNatMappingsInterfaceNatMappingsNatRuleMappings from .types.compute import VmEndpointNatMappingsList from .types.compute import VpnGateway from .types.compute import VpnGatewayAggregatedList @@ -1634,6 +1641,8 @@ "EnableXpnHostProjectRequest", "EnableXpnResourceProjectRequest", "Error", + "ErrorDetails", + "ErrorInfo", "Errors", "ExchangedPeeringRoute", "ExchangedPeeringRoutesList", @@ -1821,6 +1830,8 @@ "HealthChecksScopedList", "HealthStatus", "HealthStatusForNetworkEndpoint", + "Help", + "HelpLink", "HostRule", "HttpFaultAbort", "HttpFaultDelay", @@ -2103,8 +2114,10 @@ "ListZoneOperationsRequest", "ListZonesRequest", "LocalDisk", + "LocalizedMessage", "LocationPolicy", "LocationPolicyLocation", + "LocationPolicyLocationConstraints", "LogConfig", "LogConfigCloudAuditOptions", "LogConfigCounterOptions", @@ -2676,6 +2689,7 @@ "ValidateUrlMapRequest", "VmEndpointNatMappings", "VmEndpointNatMappingsInterfaceNatMappings", + "VmEndpointNatMappingsInterfaceNatMappingsNatRuleMappings", "VmEndpointNatMappingsList", "VpnGateway", "VpnGatewayAggregatedList", diff --git a/google/cloud/compute_v1/services/instance_group_managers/client.py b/google/cloud/compute_v1/services/instance_group_managers/client.py index 3307f91a3..b49d7b676 100644 --- a/google/cloud/compute_v1/services/instance_group_managers/client.py +++ b/google/cloud/compute_v1/services/instance_group_managers/client.py @@ -2744,14 +2744,16 @@ def list_managed_instances( timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListManagedInstancesPager: - r"""Lists all of the instances in the managed instance - group. Each instance in the list has a currentAction, - which indicates the action that the managed instance - group is performing on the instance. For example, if the - group is still creating an instance, the currentAction - is CREATING. If a previous action failed, the list - displays the errors for that failed action. The orderBy - query parameter is not supported. + r"""Lists all of the instances in the managed instance group. Each + instance in the list has a currentAction, which indicates the + action that the managed instance group is performing on the + instance. For example, if the group is still creating an + instance, the currentAction is CREATING. If a previous action + failed, the list displays the errors for that failed action. The + orderBy query parameter is not supported. The ``pageToken`` + query parameter is supported only in the alpha and beta API and + only if the group's ``listManagedInstancesResults`` field is set + to ``PAGINATED``. Args: request (Union[google.cloud.compute_v1.types.ListManagedInstancesInstanceGroupManagersRequest, dict]): diff --git a/google/cloud/compute_v1/services/instances/client.py b/google/cloud/compute_v1/services/instances/client.py index b18aec666..875e480f4 100644 --- a/google/cloud/compute_v1/services/instances/client.py +++ b/google/cloud/compute_v1/services/instances/client.py @@ -1268,7 +1268,8 @@ def bulk_insert_unary( metadata: Sequence[Tuple[str, str]] = (), ) -> compute.Operation: r"""Creates multiple instances. Count specifies the - number of instances to create. + number of instances to create. For more information, see + About bulk creation of VMs. Args: request (Union[google.cloud.compute_v1.types.BulkInsertInstanceRequest, dict]): @@ -1371,7 +1372,8 @@ def bulk_insert( metadata: Sequence[Tuple[str, str]] = (), ) -> extended_operation.ExtendedOperation: r"""Creates multiple instances. Count specifies the - number of instances to create. + number of instances to create. For more information, see + About bulk creation of VMs. Args: request (Union[google.cloud.compute_v1.types.BulkInsertInstanceRequest, dict]): diff --git a/google/cloud/compute_v1/services/region_instance_group_managers/client.py b/google/cloud/compute_v1/services/region_instance_group_managers/client.py index ad6005696..4699016b8 100644 --- a/google/cloud/compute_v1/services/region_instance_group_managers/client.py +++ b/google/cloud/compute_v1/services/region_instance_group_managers/client.py @@ -2685,11 +2685,13 @@ def list_managed_instances( timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListManagedInstancesPager: - r"""Lists the instances in the managed instance group and - instances that are scheduled to be created. The list - includes any current actions that the group has - scheduled for its instances. The orderBy query parameter - is not supported. + r"""Lists the instances in the managed instance group and instances + that are scheduled to be created. The list includes any current + actions that the group has scheduled for its instances. The + orderBy query parameter is not supported. The ``pageToken`` + query parameter is supported only in the alpha and beta API and + only if the group's ``listManagedInstancesResults`` field is set + to ``PAGINATED``. Args: request (Union[google.cloud.compute_v1.types.ListManagedInstancesRegionInstanceGroupManagersRequest, dict]): diff --git a/google/cloud/compute_v1/types/__init__.py b/google/cloud/compute_v1/types/__init__.py index a852f3517..a255f18f2 100644 --- a/google/cloud/compute_v1/types/__init__.py +++ b/google/cloud/compute_v1/types/__init__.py @@ -274,6 +274,8 @@ EnableXpnHostProjectRequest, EnableXpnResourceProjectRequest, Error, + ErrorDetails, + ErrorInfo, Errors, ExchangedPeeringRoute, ExchangedPeeringRoutesList, @@ -447,6 +449,8 @@ HealthChecksScopedList, HealthStatus, HealthStatusForNetworkEndpoint, + Help, + HelpLink, HostRule, HTTP2HealthCheck, HttpFaultAbort, @@ -721,8 +725,10 @@ ListZoneOperationsRequest, ListZonesRequest, LocalDisk, + LocalizedMessage, LocationPolicy, LocationPolicyLocation, + LocationPolicyLocationConstraints, LogConfig, LogConfigCloudAuditOptions, LogConfigCounterOptions, @@ -1242,6 +1248,7 @@ ValidateUrlMapRequest, VmEndpointNatMappings, VmEndpointNatMappingsInterfaceNatMappings, + VmEndpointNatMappingsInterfaceNatMappingsNatRuleMappings, VmEndpointNatMappingsList, VpnGateway, VpnGatewayAggregatedList, @@ -1534,6 +1541,8 @@ "EnableXpnHostProjectRequest", "EnableXpnResourceProjectRequest", "Error", + "ErrorDetails", + "ErrorInfo", "Errors", "ExchangedPeeringRoute", "ExchangedPeeringRoutesList", @@ -1707,6 +1716,8 @@ "HealthChecksScopedList", "HealthStatus", "HealthStatusForNetworkEndpoint", + "Help", + "HelpLink", "HostRule", "HTTP2HealthCheck", "HttpFaultAbort", @@ -1981,8 +1992,10 @@ "ListZoneOperationsRequest", "ListZonesRequest", "LocalDisk", + "LocalizedMessage", "LocationPolicy", "LocationPolicyLocation", + "LocationPolicyLocationConstraints", "LogConfig", "LogConfigCloudAuditOptions", "LogConfigCounterOptions", @@ -2502,6 +2515,7 @@ "ValidateUrlMapRequest", "VmEndpointNatMappings", "VmEndpointNatMappingsInterfaceNatMappings", + "VmEndpointNatMappingsInterfaceNatMappingsNatRuleMappings", "VmEndpointNatMappingsList", "VpnGateway", "VpnGatewayAggregatedList", diff --git a/google/cloud/compute_v1/types/compute.py b/google/cloud/compute_v1/types/compute.py index 0eb47fbeb..e45707741 100644 --- a/google/cloud/compute_v1/types/compute.py +++ b/google/cloud/compute_v1/types/compute.py @@ -279,6 +279,8 @@ "EnableXpnHostProjectRequest", "EnableXpnResourceProjectRequest", "Error", + "ErrorDetails", + "ErrorInfo", "Errors", "ExchangedPeeringRoute", "ExchangedPeeringRoutesList", @@ -455,6 +457,8 @@ "HealthChecksScopedList", "HealthStatus", "HealthStatusForNetworkEndpoint", + "Help", + "HelpLink", "HostRule", "HttpFaultAbort", "HttpFaultDelay", @@ -726,8 +730,10 @@ "ListZoneOperationsRequest", "ListZonesRequest", "LocalDisk", + "LocalizedMessage", "LocationPolicy", "LocationPolicyLocation", + "LocationPolicyLocationConstraints", "LogConfig", "LogConfigCloudAuditOptions", "LogConfigCounterOptions", @@ -1247,6 +1253,7 @@ "ValidateUrlMapRequest", "VmEndpointNatMappings", "VmEndpointNatMappingsInterfaceNatMappings", + "VmEndpointNatMappingsInterfaceNatMappingsNatRuleMappings", "VmEndpointNatMappingsList", "VpnGateway", "VpnGatewayAggregatedList", @@ -8989,6 +8996,12 @@ class AttachedDisk(proto.Message): r"""An instance-attached disk resource. Attributes: + architecture (str): + [Output Only] The architecture of the attached disk. Valid + values are ARM64 or X86_64. Check the Architecture enum for + the list of possible values. + + This field is a member of `oneof`_ ``_architecture``. auto_delete (bool): Specifies whether the disk will be auto-deleted when the instance is deleted (but @@ -9114,6 +9127,15 @@ class AttachedDisk(proto.Message): This field is a member of `oneof`_ ``_type``. """ + class Architecture(proto.Enum): + r"""[Output Only] The architecture of the attached disk. Valid values + are ARM64 or X86_64. + """ + UNDEFINED_ARCHITECTURE = 0 + ARCHITECTURE_UNSPECIFIED = 394750507 + ARM64 = 62547450 + X86_64 = 425300551 + class Interface(proto.Enum): r"""Specifies the disk interface to use for attaching this disk, which is either SCSI or NVME. The default is SCSI. Persistent @@ -9143,6 +9165,11 @@ class Type(proto.Enum): PERSISTENT = 460683927 SCRATCH = 496778970 + architecture = proto.Field( + proto.STRING, + number=302803283, + optional=True, + ) auto_delete = proto.Field( proto.BOOL, number=464761403, @@ -9232,6 +9259,12 @@ class AttachedDiskInitializeParams(proto.Message): other, but not both. Attributes: + architecture (str): + The architecture of the attached disk. Valid values are + arm64 or x86_64. Check the Architecture enum for the list of + possible values. + + This field is a member of `oneof`_ ``_architecture``. description (str): An optional description. Provide this property when creating the disk. @@ -9359,6 +9392,15 @@ class AttachedDiskInitializeParams(proto.Message): This field is a member of `oneof`_ ``_source_snapshot_encryption_key``. """ + class Architecture(proto.Enum): + r"""The architecture of the attached disk. Valid values are arm64 or + x86_64. + """ + UNDEFINED_ARCHITECTURE = 0 + ARCHITECTURE_UNSPECIFIED = 394750507 + ARM64 = 62547450 + X86_64 = 425300551 + class OnUpdateAction(proto.Enum): r"""Specifies which action to take on instance update with this disk. Default is to use the existing disk. @@ -9368,6 +9410,11 @@ class OnUpdateAction(proto.Enum): RECREATE_DISK_IF_SOURCE_CHANGED = 398099712 USE_EXISTING_DISK = 232682233 + architecture = proto.Field( + proto.STRING, + number=302803283, + optional=True, + ) description = proto.Field( proto.STRING, number=422937596, @@ -10616,7 +10663,7 @@ class Backend(proto.Message): This field is a member of `oneof`_ ``_max_rate_per_instance``. max_utilization (float): Optional parameter to define a target capacity for the - UTILIZATIONbalancing mode. The valid range is [0.0, 1.0]. + UTILIZATION balancing mode. The valid range is [0.0, 1.0]. For usage guidelines, see Utilization balancing mode. This field is a member of `oneof`_ ``_max_utilization``. @@ -13316,7 +13363,8 @@ class BulkInsertInstanceResource(proto.Message): This field is a member of `oneof`_ ``_instance_properties``. location_policy (google.cloud.compute_v1.types.LocationPolicy): - Policy for chosing target zone. + Policy for chosing target zone. For more + information, see Create VMs in bulk . This field is a member of `oneof`_ ``_location_policy``. min_count (int): @@ -13971,6 +14019,7 @@ class Status(proto.Enum): """ UNDEFINED_STATUS = 0 ACTIVE = 314733318 + CANCELLED = 41957681 CREATING = 455564985 EXPIRED = 482489093 NOT_YET_ACTIVE = 20607337 @@ -19151,6 +19200,12 @@ class Disk(proto.Message): information, read Regional resources. Attributes: + architecture (str): + The architecture of the disk. Valid values are ARM64 or + X86_64. Check the Architecture enum for the list of possible + values. + + This field is a member of `oneof`_ ``_architecture``. creation_timestamp (str): [Output Only] Creation timestamp in RFC3339 text format. @@ -19434,6 +19489,13 @@ class Disk(proto.Message): This field is a member of `oneof`_ ``_zone``. """ + class Architecture(proto.Enum): + r"""The architecture of the disk. Valid values are ARM64 or X86_64.""" + UNDEFINED_ARCHITECTURE = 0 + ARCHITECTURE_UNSPECIFIED = 394750507 + ARM64 = 62547450 + X86_64 = 425300551 + class Status(proto.Enum): r"""[Output Only] The status of disk creation. - CREATING: Disk is provisioning. - RESTORING: Source data is being copied into the @@ -19447,6 +19509,11 @@ class Status(proto.Enum): READY = 77848963 RESTORING = 404263851 + architecture = proto.Field( + proto.STRING, + number=302803283, + optional=True, + ) creation_timestamp = proto.Field( proto.STRING, number=30525366, @@ -20499,6 +20566,103 @@ class Error(proto.Message): ) +class ErrorDetails(proto.Message): + r""" + + Attributes: + error_info (google.cloud.compute_v1.types.ErrorInfo): + + This field is a member of `oneof`_ ``_error_info``. + help_ (google.cloud.compute_v1.types.Help): + + This field is a member of `oneof`_ ``_help``. + localized_message (google.cloud.compute_v1.types.LocalizedMessage): + + This field is a member of `oneof`_ ``_localized_message``. + """ + + error_info = proto.Field( + proto.MESSAGE, + number=25251973, + optional=True, + message="ErrorInfo", + ) + help_ = proto.Field( + proto.MESSAGE, + number=3198785, + optional=True, + message="Help", + ) + localized_message = proto.Field( + proto.MESSAGE, + number=404537155, + optional=True, + message="LocalizedMessage", + ) + + +class ErrorInfo(proto.Message): + r"""Describes the cause of the error with structured details. Example of + an error when contacting the "pubsub.googleapis.com" API when it is + not enabled: { "reason": "API_DISABLED" "domain": "googleapis.com" + "metadata": { "resource": "projects/123", "service": + "pubsub.googleapis.com" } } This response indicates that the + pubsub.googleapis.com API is not enabled. Example of an error that + is returned when attempting to create a Spanner instance in a region + that is out of stock: { "reason": "STOCKOUT" "domain": + "spanner.googleapis.com", "metadata": { "availableRegions": + "us-central1,us-east2" } } + + Attributes: + domain (str): + The logical grouping to which the "reason" + belongs. The error domain is typically the + registered service name of the tool or product + that generates the error. Example: + "pubsub.googleapis.com". If the error is + generated by some common infrastructure, the + error domain must be a globally unique value + that identifies the infrastructure. For Google + API infrastructure, the error domain is + "googleapis.com". + + This field is a member of `oneof`_ ``_domain``. + metadatas (Mapping[str, str]): + Additional structured details about this error. Keys should + match `[a-zA-Z0-9-_]` and be limited to 64 characters in + length. When identifying the current value of an exceeded + limit, the units should be contained in the key, not the + value. For example, rather than {"instanceLimit": + "100/request"}, should be returned as, + {"instanceLimitPerRequest": "100"}, if the client exceeds + the number of instances that can be created in a single + (batch) request. + reason (str): + The reason of the error. This is a constant value that + identifies the proximate cause of the error. Error reasons + are unique within a particular domain of errors. This should + be at most 63 characters and match `[A-Z0-9_]+`. + + This field is a member of `oneof`_ ``_reason``. + """ + + domain = proto.Field( + proto.STRING, + number=284415172, + optional=True, + ) + metadatas = proto.MapField( + proto.STRING, + proto.STRING, + number=8514340, + ) + reason = proto.Field( + proto.STRING, + number=138777156, + optional=True, + ) + + class Errors(proto.Message): r""" @@ -20507,6 +20671,12 @@ class Errors(proto.Message): [Output Only] The error type identifier for this error. This field is a member of `oneof`_ ``_code``. + error_details (Sequence[google.cloud.compute_v1.types.ErrorDetails]): + [Output Only] An optional list of messages that contain the + error details. There is a set of defined message types to + use for providing details.The syntax depends on the error + code. For example, QuotaExceededInfo will have details when + the error code is QUOTA_EXCEEDED. location (str): [Output Only] Indicates the field in the request that caused the error. This property is optional. @@ -20523,6 +20693,11 @@ class Errors(proto.Message): number=3059181, optional=True, ) + error_details = proto.RepeatedField( + proto.MESSAGE, + number=274653963, + message="ErrorDetails", + ) location = proto.Field( proto.STRING, number=290430901, @@ -22254,9 +22429,8 @@ class ForwardingRule(proto.Message): ip_version (str): The IP Version that will be used by this forwarding rule. Valid options are IPV4 or IPV6. - This can only be specified for an external - global forwarding rule. Check the IpVersion enum - for the list of possible values. + Check the IpVersion enum for the list of + possible values. This field is a member of `oneof`_ ``_ip_version``. is_mirroring_collector (bool): @@ -22476,8 +22650,7 @@ class IPProtocolEnum(proto.Enum): class IpVersion(proto.Enum): r"""The IP Version that will be used by this forwarding rule. - Valid options are IPV4 or IPV6. This can only be specified for - an external global forwarding rule. + Valid options are IPV4 or IPV6. """ UNDEFINED_IP_VERSION = 0 IPV4 = 2254341 @@ -22516,6 +22689,7 @@ class PscConnectionStatus(proto.Enum): UNDEFINED_PSC_CONNECTION_STATUS = 0 ACCEPTED = 246714279 CLOSED = 380163436 + NEEDS_ATTENTION = 344491452 PENDING = 35394935 REJECTED = 174130302 STATUS_UNSPECIFIED = 42133066 @@ -28130,6 +28304,52 @@ class HealthState(proto.Enum): ) +class Help(proto.Message): + r"""Provides links to documentation or for performing an out of + band action. For example, if a quota check failed with an error + indicating the calling project hasn't enabled the accessed + service, this can contain a URL pointing directly to the right + place in the developer console to flip the bit. + + Attributes: + links (Sequence[google.cloud.compute_v1.types.HelpLink]): + URL(s) pointing to additional information on + handling the current error. + """ + + links = proto.RepeatedField( + proto.MESSAGE, + number=102977465, + message="HelpLink", + ) + + +class HelpLink(proto.Message): + r"""Describes a URL link. + + Attributes: + description (str): + Describes what the link offers. + + This field is a member of `oneof`_ ``_description``. + url (str): + The URL of the link. + + This field is a member of `oneof`_ ``_url``. + """ + + description = proto.Field( + proto.STRING, + number=422937596, + optional=True, + ) + url = proto.Field( + proto.STRING, + number=116079, + optional=True, + ) + + class HostRule(proto.Message): r"""UrlMaps A host-matching rule for a URL. If matched, will use the named PathMatcher to select the BackendService. @@ -29137,6 +29357,12 @@ class Image(proto.Message): Images. Attributes: + architecture (str): + The architecture of the image. Valid values are ARM64 or + X86_64. Check the Architecture enum for the list of possible + values. + + This field is a member of `oneof`_ ``_architecture``. archive_size_bytes (int): Size of the image tar.gz archive stored in Google Cloud Storage (in bytes). @@ -29347,6 +29573,13 @@ class Image(proto.Message): image (regional or multi-regional). """ + class Architecture(proto.Enum): + r"""The architecture of the image. Valid values are ARM64 or X86_64.""" + UNDEFINED_ARCHITECTURE = 0 + ARCHITECTURE_UNSPECIFIED = 394750507 + ARM64 = 62547450 + X86_64 = 425300551 + class SourceType(proto.Enum): r"""The type of the image used to create this disk. The default and only valid value is RAW. @@ -29366,6 +29599,11 @@ class Status(proto.Enum): PENDING = 35394935 READY = 77848963 + architecture = proto.Field( + proto.STRING, + number=302803283, + optional=True, + ) archive_size_bytes = proto.Field( proto.INT64, number=381093450, @@ -33141,6 +33379,14 @@ class Instance(proto.Message): identifier is defined by the server. This field is a member of `oneof`_ ``_id``. + key_revocation_action_type (str): + KeyRevocationActionType of the instance. + Supported options are "STOP" and "NONE". The + default value is "NONE" if it is not specified. + Check the KeyRevocationActionType enum for the + list of possible values. + + This field is a member of `oneof`_ ``_key_revocation_action_type``. kind (str): [Output Only] Type of the resource. Always compute#instance for instances. @@ -33323,6 +33569,16 @@ class Instance(proto.Message): This field is a member of `oneof`_ ``_zone``. """ + class KeyRevocationActionType(proto.Enum): + r"""KeyRevocationActionType of the instance. Supported options + are "STOP" and "NONE". The default value is "NONE" if it is not + specified. + """ + UNDEFINED_KEY_REVOCATION_ACTION_TYPE = 0 + KEY_REVOCATION_ACTION_TYPE_UNSPECIFIED = 467110106 + NONE = 2402104 + STOP = 2555906 + class PrivateIpv6GoogleAccess(proto.Enum): r"""The private IPv6 google access type for the VM. If not specified, use INHERIT_FROM_SUBNETWORK as default. @@ -33418,6 +33674,11 @@ class Status(proto.Enum): number=3355, optional=True, ) + key_revocation_action_type = proto.Field( + proto.STRING, + number=235941474, + optional=True, + ) kind = proto.Field( proto.STRING, number=3292052, @@ -35979,6 +36240,14 @@ class InstanceProperties(proto.Message): A list of guest accelerator cards' type and count to use for instances created from these properties. + key_revocation_action_type (str): + KeyRevocationActionType of the instance. + Supported options are "STOP" and "NONE". The + default value is "NONE" if it is not specified. + Check the KeyRevocationActionType enum for the + list of possible values. + + This field is a member of `oneof`_ ``_key_revocation_action_type``. labels (Mapping[str, str]): Labels to apply to instances that are created from these properties. @@ -36067,6 +36336,16 @@ class InstanceProperties(proto.Message): This field is a member of `oneof`_ ``_tags``. """ + class KeyRevocationActionType(proto.Enum): + r"""KeyRevocationActionType of the instance. Supported options + are "STOP" and "NONE". The default value is "NONE" if it is not + specified. + """ + UNDEFINED_KEY_REVOCATION_ACTION_TYPE = 0 + KEY_REVOCATION_ACTION_TYPE_UNSPECIFIED = 467110106 + NONE = 2402104 + STOP = 2555906 + class PrivateIpv6GoogleAccess(proto.Enum): r"""The private IPv6 google access type for VMs. If not specified, use INHERIT_FROM_SUBNETWORK as default. Note that for MachineImage, this @@ -36109,6 +36388,11 @@ class PrivateIpv6GoogleAccess(proto.Enum): number=463595119, message="AcceleratorConfig", ) + key_revocation_action_type = proto.Field( + proto.STRING, + number=235941474, + optional=True, + ) labels = proto.MapField( proto.STRING, proto.STRING, @@ -51258,6 +51542,37 @@ class LocalDisk(proto.Message): ) +class LocalizedMessage(proto.Message): + r"""Provides a localized error message that is safe to return to + the user which can be attached to an RPC error. + + Attributes: + locale (str): + The locale used following the specification + defined at + http://www.rfc-editor.org/rfc/bcp/bcp47.txt. + Examples are: "en-US", "fr-CH", "es-MX". + + This field is a member of `oneof`_ ``_locale``. + message (str): + The localized error message in the above + locale. + + This field is a member of `oneof`_ ``_message``. + """ + + locale = proto.Field( + proto.STRING, + number=513150554, + optional=True, + ) + message = proto.Field( + proto.STRING, + number=418054151, + optional=True, + ) + + class LocationPolicy(proto.Message): r"""Configuration for location policy among multiple possible locations (e.g. preferences for zone selection among zones in a @@ -51301,21 +51616,32 @@ class LocationPolicyLocation(proto.Message): r""" Attributes: + constraints (google.cloud.compute_v1.types.LocationPolicyLocationConstraints): + Constraints that the caller requires on the + result distribution in this zone. + + This field is a member of `oneof`_ ``_constraints``. preference (str): - Preference for a given location. - Check the Preference enum for the list of - possible values. + Preference for a given location. Set to + either ALLOW or DENY. Check the Preference enum + for the list of possible values. This field is a member of `oneof`_ ``_preference``. """ class Preference(proto.Enum): - r"""Preference for a given location.""" + r"""Preference for a given location. Set to either ALLOW or DENY.""" UNDEFINED_PREFERENCE = 0 ALLOW = 62368553 DENY = 2094604 PREFERENCE_UNSPECIFIED = 496219571 + constraints = proto.Field( + proto.MESSAGE, + number=3909174, + optional=True, + message="LocationPolicyLocationConstraints", + ) preference = proto.Field( proto.STRING, number=150781147, @@ -51323,6 +51649,25 @@ class Preference(proto.Enum): ) +class LocationPolicyLocationConstraints(proto.Message): + r"""Per-zone constraints on location policy for this zone. + + Attributes: + max_count (int): + Maximum number of items that are allowed to + be placed in this zone. The value must be + non-negative. + + This field is a member of `oneof`_ ``_max_count``. + """ + + max_count = proto.Field( + proto.INT32, + number=287620724, + optional=True, + ) + + class LogConfig(proto.Message): r"""This is deprecated and has no effect. Do not use. @@ -52728,6 +53073,11 @@ class Network(proto.Message): google defined ULA prefix fd20::/20. . This field is a member of `oneof`_ ``_enable_ula_internal_ipv6``. + firewall_policy (str): + [Output Only] URL of the firewall policy the network is + associated with. + + This field is a member of `oneof`_ ``_firewall_policy``. gateway_i_pv4 (str): [Output Only] The gateway address for default routing out of the network, selected by GCP. @@ -52839,6 +53189,11 @@ class NetworkFirewallPolicyEnforcementOrder(proto.Enum): number=423757720, optional=True, ) + firewall_policy = proto.Field( + proto.STRING, + number=498173265, + optional=True, + ) gateway_i_pv4 = proto.Field( proto.STRING, number=178678877, @@ -53540,10 +53895,10 @@ class NetworkEndpointGroupAppEngine(proto.Message): request URLs "foo1-dot-appname.appspot.com/v1" and "foo1-dot-appname.appspot.com/v2" can be backed by the same Serverless NEG with URL mask - "-dot-appname.appspot.com/". The URL mask will - parse them to { service = "foo1", version = "v1" - } and { service = "foo1", version = "v2" } - respectively. + "-dot-appname.appspot.com/". + The URL mask will parse them to { service = + "foo1", version = "v1" } and { service = "foo1", + version = "v2" } respectively. This field is a member of `oneof`_ ``_url_mask``. version (str): @@ -53592,9 +53947,10 @@ class NetworkEndpointGroupCloudFunction(proto.Message): backend services. For example, request URLs " mydomain.com/function1" and "mydomain.com/function2" can be backed by the - same Serverless NEG with URL mask "/". The URL - mask will parse them to { function = "function1" - } and { function = "function2" } respectively. + same Serverless NEG with URL mask "/". + The URL mask will parse them to { function = + "function1" } and { function = "function2" } + respectively. This field is a member of `oneof`_ ``_url_mask``. """ @@ -63224,8 +63580,6 @@ class RegionTargetHttpsProxiesSetSslCertificatesRequest(proto.Message): ssl_certificates (Sequence[str]): New set of SslCertificate resources to associate with this TargetHttpsProxy resource. - Currently exactly one SslCertificate resource - must be specified. """ ssl_certificates = proto.RepeatedField( @@ -64016,7 +64370,12 @@ class RequestMirrorPolicy(proto.Message): Attributes: backend_service (str): The full or partial URL to the BackendService - resource being mirrored to. + resource being mirrored to. The backend service + configured for a mirroring policy must reference + backends that are of the same type as the + original backend service matched in the URL map. + Serverless NEG backends are not currently + supported as a mirrored backend service. This field is a member of `oneof`_ ``_backend_service``. """ @@ -68230,6 +68589,11 @@ class SavedDisk(proto.Message): r"""An instance-attached disk resource. Attributes: + architecture (str): + [Output Only] The architecture of the attached disk. Check + the Architecture enum for the list of possible values. + + This field is a member of `oneof`_ ``_architecture``. kind (str): [Output Only] Type of the resource. Always compute#savedDisk for attached disks. @@ -68257,6 +68621,13 @@ class SavedDisk(proto.Message): This field is a member of `oneof`_ ``_storage_bytes_status``. """ + class Architecture(proto.Enum): + r"""[Output Only] The architecture of the attached disk.""" + UNDEFINED_ARCHITECTURE = 0 + ARCHITECTURE_UNSPECIFIED = 394750507 + ARM64 = 62547450 + X86_64 = 425300551 + class StorageBytesStatus(proto.Enum): r"""[Output Only] An indicator whether storageBytes is in a stable state or it is being adjusted as a result of shared storage reallocation. @@ -68268,6 +68639,11 @@ class StorageBytesStatus(proto.Enum): UPDATING = 494614342 UP_TO_DATE = 101306702 + architecture = proto.Field( + proto.STRING, + number=302803283, + optional=True, + ) kind = proto.Field( proto.STRING, number=3292052, @@ -68341,7 +68717,7 @@ class State(proto.Enum): class Scheduling(proto.Message): - r"""Sets the scheduling options for an Instance. NextID: 21 + r"""Sets the scheduling options for an Instance. Attributes: automatic_restart (bool): @@ -70097,6 +70473,7 @@ class Status(proto.Enum): UNDEFINED_STATUS = 0 ACCEPTED = 246714279 CLOSED = 380163436 + NEEDS_ATTENTION = 344491452 PENDING = 35394935 REJECTED = 174130302 STATUS_UNSPECIFIED = 42133066 @@ -73796,6 +74173,12 @@ class Snapshot(proto.Message): information, read Creating persistent disk snapshots. Attributes: + architecture (str): + [Output Only] The architecture of the snapshot. Valid values + are ARM64 or X86_64. Check the Architecture enum for the + list of possible values. + + This field is a member of `oneof`_ ``_architecture``. auto_created (bool): [Output Only] Set to true if snapshots are automatically created by applying resource policy on the target disk. @@ -73813,6 +74196,11 @@ class Snapshot(proto.Message): it has a non-empty value. This field is a member of `oneof`_ ``_chain_name``. + creation_size_bytes (int): + [Output Only] Size in bytes of the snapshot at creation + time. + + This field is a member of `oneof`_ ``_creation_size_bytes``. creation_timestamp (str): [Output Only] Creation timestamp in RFC3339 text format. @@ -73910,6 +74298,12 @@ class Snapshot(proto.Message): snapshot later. This field is a member of `oneof`_ ``_snapshot_encryption_key``. + snapshot_type (str): + Indicates the type of the snapshot. + Check the SnapshotType enum for the list of + possible values. + + This field is a member of `oneof`_ ``_snapshot_type``. source_disk (str): The source disk used to create this snapshot. @@ -73927,6 +74321,16 @@ class Snapshot(proto.Message): of a given disk name. This field is a member of `oneof`_ ``_source_disk_id``. + source_snapshot_schedule_policy (str): + [Output Only] URL of the resource policy which created this + scheduled snapshot. + + This field is a member of `oneof`_ ``_source_snapshot_schedule_policy``. + source_snapshot_schedule_policy_id (str): + [Output Only] ID of the resource policy which created this + scheduled snapshot. + + This field is a member of `oneof`_ ``_source_snapshot_schedule_policy_id``. status (str): [Output Only] The status of the snapshot. This can be CREATING, DELETING, FAILED, READY, or UPLOADING. Check the @@ -73954,6 +74358,21 @@ class Snapshot(proto.Message): snapshot (regional or multi-regional). """ + class Architecture(proto.Enum): + r"""[Output Only] The architecture of the snapshot. Valid values are + ARM64 or X86_64. + """ + UNDEFINED_ARCHITECTURE = 0 + ARCHITECTURE_UNSPECIFIED = 394750507 + ARM64 = 62547450 + X86_64 = 425300551 + + class SnapshotType(proto.Enum): + r"""Indicates the type of the snapshot.""" + UNDEFINED_SNAPSHOT_TYPE = 0 + ARCHIVE = 506752162 + STANDARD = 484642493 + class Status(proto.Enum): r"""[Output Only] The status of the snapshot. This can be CREATING, DELETING, FAILED, READY, or UPLOADING. @@ -73976,6 +74395,11 @@ class StorageBytesStatus(proto.Enum): UPDATING = 494614342 UP_TO_DATE = 101306702 + architecture = proto.Field( + proto.STRING, + number=302803283, + optional=True, + ) auto_created = proto.Field( proto.BOOL, number=463922264, @@ -73986,6 +74410,11 @@ class StorageBytesStatus(proto.Enum): number=68644169, optional=True, ) + creation_size_bytes = proto.Field( + proto.INT64, + number=125400077, + optional=True, + ) creation_timestamp = proto.Field( proto.STRING, number=30525366, @@ -74060,6 +74489,11 @@ class StorageBytesStatus(proto.Enum): optional=True, message="CustomerEncryptionKey", ) + snapshot_type = proto.Field( + proto.STRING, + number=124349653, + optional=True, + ) source_disk = proto.Field( proto.STRING, number=451753793, @@ -74076,6 +74510,16 @@ class StorageBytesStatus(proto.Enum): number=454190809, optional=True, ) + source_snapshot_schedule_policy = proto.Field( + proto.STRING, + number=235756291, + optional=True, + ) + source_snapshot_schedule_policy_id = proto.Field( + proto.STRING, + number=70489047, + optional=True, + ) status = proto.Field( proto.STRING, number=181260274, @@ -74262,6 +74706,14 @@ class SourceInstanceProperties(proto.Message): A list of guest accelerator cards' type and count to use for instances created from this machine image. + key_revocation_action_type (str): + KeyRevocationActionType of the instance. + Supported options are "STOP" and "NONE". The + default value is "NONE" if it is not specified. + Check the KeyRevocationActionType enum for the + list of possible values. + + This field is a member of `oneof`_ ``_key_revocation_action_type``. labels (Mapping[str, str]): Labels to apply to instances that are created from this machine image. @@ -74315,6 +74767,16 @@ class SourceInstanceProperties(proto.Message): This field is a member of `oneof`_ ``_tags``. """ + class KeyRevocationActionType(proto.Enum): + r"""KeyRevocationActionType of the instance. Supported options + are "STOP" and "NONE". The default value is "NONE" if it is not + specified. + """ + UNDEFINED_KEY_REVOCATION_ACTION_TYPE = 0 + KEY_REVOCATION_ACTION_TYPE_UNSPECIFIED = 467110106 + NONE = 2402104 + STOP = 2555906 + can_ip_forward = proto.Field( proto.BOOL, number=467731324, @@ -74340,6 +74802,11 @@ class SourceInstanceProperties(proto.Message): number=463595119, message="AcceleratorConfig", ) + key_revocation_action_type = proto.Field( + proto.STRING, + number=235941474, + optional=True, + ) labels = proto.MapField( proto.STRING, proto.STRING, @@ -77404,9 +77871,10 @@ class TargetInstance(proto.Message): This field is a member of `oneof`_ ``_name``. nat_policy (str): - NAT option controlling how IPs are NAT'ed to the instance. - Currently only NO_NAT (default value) is supported. Check - the NatPolicy enum for the list of possible values. + Must have a value of NO_NAT. Protocol forwarding delivers + packets while preserving the destination IP address of the + forwarding rule referencing the target instance. Check the + NatPolicy enum for the list of possible values. This field is a member of `oneof`_ ``_nat_policy``. network (str): @@ -77430,8 +77898,9 @@ class TargetInstance(proto.Message): """ class NatPolicy(proto.Enum): - r"""NAT option controlling how IPs are NAT'ed to the instance. Currently - only NO_NAT (default value) is supported. + r"""Must have a value of NO_NAT. Protocol forwarding delivers packets + while preserving the destination IP address of the forwarding rule + referencing the target instance. """ UNDEFINED_NAT_POLICY = 0 NO_NAT = 161455491 @@ -82217,6 +82686,9 @@ class VmEndpointNatMappingsInterfaceNatMappings(proto.Message): field nat_ip_port_ranges. This field is a member of `oneof`_ ``_num_total_nat_ports``. + rule_mappings (Sequence[google.cloud.compute_v1.types.VmEndpointNatMappingsInterfaceNatMappingsNatRuleMappings]): + Information about mappings provided by rules + in this NAT. source_alias_ip_range (str): Alias IP range for this interface endpoint. It will be a private (RFC 1918) IP range. @@ -82247,6 +82719,11 @@ class VmEndpointNatMappingsInterfaceNatMappings(proto.Message): number=299904384, optional=True, ) + rule_mappings = proto.RepeatedField( + proto.MESSAGE, + number=486192968, + message="VmEndpointNatMappingsInterfaceNatMappingsNatRuleMappings", + ) source_alias_ip_range = proto.Field( proto.STRING, number=440340952, @@ -82259,6 +82736,63 @@ class VmEndpointNatMappingsInterfaceNatMappings(proto.Message): ) +class VmEndpointNatMappingsInterfaceNatMappingsNatRuleMappings(proto.Message): + r"""Contains information of NAT Mappings provided by a NAT Rule. + + Attributes: + drain_nat_ip_port_ranges (Sequence[str]): + List of all drain IP:port-range mappings assigned to this + interface by this rule. These ranges are inclusive, that is, + both the first and the last ports can be used for NAT. + Example: ["2.2.2.2:12345-12355", "1.1.1.1:2234-2234"]. + nat_ip_port_ranges (Sequence[str]): + A list of all IP:port-range mappings assigned to this + interface by this rule. These ranges are inclusive, that is, + both the first and the last ports can be used for NAT. + Example: ["2.2.2.2:12345-12355", "1.1.1.1:2234-2234"]. + num_total_drain_nat_ports (int): + Total number of drain ports across all NAT IPs allocated to + this interface by this rule. It equals the aggregated port + number in the field drain_nat_ip_port_ranges. + + This field is a member of `oneof`_ ``_num_total_drain_nat_ports``. + num_total_nat_ports (int): + Total number of ports across all NAT IPs allocated to this + interface by this rule. It equals the aggregated port number + in the field nat_ip_port_ranges. + + This field is a member of `oneof`_ ``_num_total_nat_ports``. + rule_number (int): + Rule number of the NAT Rule. + + This field is a member of `oneof`_ ``_rule_number``. + """ + + drain_nat_ip_port_ranges = proto.RepeatedField( + proto.STRING, + number=395440577, + ) + nat_ip_port_ranges = proto.RepeatedField( + proto.STRING, + number=531830810, + ) + num_total_drain_nat_ports = proto.Field( + proto.INT32, + number=335532793, + optional=True, + ) + num_total_nat_ports = proto.Field( + proto.INT32, + number=299904384, + optional=True, + ) + rule_number = proto.Field( + proto.INT32, + number=535211500, + optional=True, + ) + + class VmEndpointNatMappingsList(proto.Message): r"""Contains a list of VmEndpointNatMappings. @@ -83554,6 +84088,7 @@ class Code(proto.Enum): EXTERNAL_API_WARNING = 175546307 FIELD_VALUE_OVERRIDEN = 329669423 INJECTED_KERNELS_DEPRECATED = 417377419 + INVALID_HEALTH_CHECK_FOR_DYNAMIC_WIEGHTED_LB = 401542606 LARGE_DEPLOYMENT_WARNING = 481440678 MISSING_TYPE_DEPENDENCY = 344505463 NEXT_HOP_ADDRESS_NOT_ASSIGNED = 324964999 @@ -83626,6 +84161,7 @@ class Code(proto.Enum): EXTERNAL_API_WARNING = 175546307 FIELD_VALUE_OVERRIDEN = 329669423 INJECTED_KERNELS_DEPRECATED = 417377419 + INVALID_HEALTH_CHECK_FOR_DYNAMIC_WIEGHTED_LB = 401542606 LARGE_DEPLOYMENT_WARNING = 481440678 MISSING_TYPE_DEPENDENCY = 344505463 NEXT_HOP_ADDRESS_NOT_ASSIGNED = 324964999 diff --git a/owlbot.py b/owlbot.py index 1bfb8b854..feaa46890 100644 --- a/owlbot.py +++ b/owlbot.py @@ -42,6 +42,8 @@ # Work around formatting issues with docstrings s.replace("google/cloud/**/types/compute.py", """\"IT_\"""", """`IT_`""") s.replace("google/cloud/**/types/compute.py", """\"NS_\"""", """`NS_`""") +s.replace("google/cloud/**/types/compute.py", """\/\[a-zA-Z0-9-_\]\/""", """`[a-zA-Z0-9-_]`""") +s.replace("google/cloud/**/types/compute.py", """\/\[A-Z0-9_\]\+\/""", """`[A-Z0-9_]+`""") # ---------------------------------------------------------------------------- # Add templated files diff --git a/tests/unit/gapic/compute_v1/test_disks.py b/tests/unit/gapic/compute_v1/test_disks.py index b12ae7c4c..d70b7ad3e 100644 --- a/tests/unit/gapic/compute_v1/test_disks.py +++ b/tests/unit/gapic/compute_v1/test_disks.py @@ -1545,8 +1545,10 @@ def test_create_snapshot_rest(request_type): # send a request that will satisfy transcoding request_init = {"project": "sample1", "zone": "sample2", "disk": "sample3"} request_init["snapshot_resource"] = { + "architecture": "architecture_value", "auto_created": True, "chain_name": "chain_name_value", + "creation_size_bytes": 2037, "creation_timestamp": "creation_timestamp_value", "description": "description_value", "disk_size_gb": 1261, @@ -1568,9 +1570,12 @@ def test_create_snapshot_rest(request_type): "rsa_encrypted_key": "rsa_encrypted_key_value", "sha256": "sha256_value", }, + "snapshot_type": "snapshot_type_value", "source_disk": "source_disk_value", "source_disk_encryption_key": {}, "source_disk_id": "source_disk_id_value", + "source_snapshot_schedule_policy": "source_snapshot_schedule_policy_value", + "source_snapshot_schedule_policy_id": "source_snapshot_schedule_policy_id_value", "status": "status_value", "storage_bytes": 1403, "storage_bytes_status": "storage_bytes_status_value", @@ -1812,8 +1817,10 @@ def test_create_snapshot_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "zone": "sample2", "disk": "sample3"} request_init["snapshot_resource"] = { + "architecture": "architecture_value", "auto_created": True, "chain_name": "chain_name_value", + "creation_size_bytes": 2037, "creation_timestamp": "creation_timestamp_value", "description": "description_value", "disk_size_gb": 1261, @@ -1835,9 +1842,12 @@ def test_create_snapshot_rest_bad_request( "rsa_encrypted_key": "rsa_encrypted_key_value", "sha256": "sha256_value", }, + "snapshot_type": "snapshot_type_value", "source_disk": "source_disk_value", "source_disk_encryption_key": {}, "source_disk_id": "source_disk_id_value", + "source_snapshot_schedule_policy": "source_snapshot_schedule_policy_value", + "source_snapshot_schedule_policy_id": "source_snapshot_schedule_policy_id_value", "status": "status_value", "storage_bytes": 1403, "storage_bytes_status": "storage_bytes_status_value", @@ -1876,7 +1886,7 @@ def test_create_snapshot_rest_flattened(): project="project_value", zone="zone_value", disk="disk_value", - snapshot_resource=compute.Snapshot(auto_created=True), + snapshot_resource=compute.Snapshot(architecture="architecture_value"), ) mock_args.update(sample_request) @@ -1915,7 +1925,7 @@ def test_create_snapshot_rest_flattened_error(transport: str = "rest"): project="project_value", zone="zone_value", disk="disk_value", - snapshot_resource=compute.Snapshot(auto_created=True), + snapshot_resource=compute.Snapshot(architecture="architecture_value"), ) @@ -1941,8 +1951,10 @@ def test_create_snapshot_unary_rest(request_type): # send a request that will satisfy transcoding request_init = {"project": "sample1", "zone": "sample2", "disk": "sample3"} request_init["snapshot_resource"] = { + "architecture": "architecture_value", "auto_created": True, "chain_name": "chain_name_value", + "creation_size_bytes": 2037, "creation_timestamp": "creation_timestamp_value", "description": "description_value", "disk_size_gb": 1261, @@ -1964,9 +1976,12 @@ def test_create_snapshot_unary_rest(request_type): "rsa_encrypted_key": "rsa_encrypted_key_value", "sha256": "sha256_value", }, + "snapshot_type": "snapshot_type_value", "source_disk": "source_disk_value", "source_disk_encryption_key": {}, "source_disk_id": "source_disk_id_value", + "source_snapshot_schedule_policy": "source_snapshot_schedule_policy_value", + "source_snapshot_schedule_policy_id": "source_snapshot_schedule_policy_id_value", "status": "status_value", "storage_bytes": 1403, "storage_bytes_status": "storage_bytes_status_value", @@ -2186,8 +2201,10 @@ def test_create_snapshot_unary_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "zone": "sample2", "disk": "sample3"} request_init["snapshot_resource"] = { + "architecture": "architecture_value", "auto_created": True, "chain_name": "chain_name_value", + "creation_size_bytes": 2037, "creation_timestamp": "creation_timestamp_value", "description": "description_value", "disk_size_gb": 1261, @@ -2209,9 +2226,12 @@ def test_create_snapshot_unary_rest_bad_request( "rsa_encrypted_key": "rsa_encrypted_key_value", "sha256": "sha256_value", }, + "snapshot_type": "snapshot_type_value", "source_disk": "source_disk_value", "source_disk_encryption_key": {}, "source_disk_id": "source_disk_id_value", + "source_snapshot_schedule_policy": "source_snapshot_schedule_policy_value", + "source_snapshot_schedule_policy_id": "source_snapshot_schedule_policy_id_value", "status": "status_value", "storage_bytes": 1403, "storage_bytes_status": "storage_bytes_status_value", @@ -2250,7 +2270,7 @@ def test_create_snapshot_unary_rest_flattened(): project="project_value", zone="zone_value", disk="disk_value", - snapshot_resource=compute.Snapshot(auto_created=True), + snapshot_resource=compute.Snapshot(architecture="architecture_value"), ) mock_args.update(sample_request) @@ -2289,7 +2309,7 @@ def test_create_snapshot_unary_rest_flattened_error(transport: str = "rest"): project="project_value", zone="zone_value", disk="disk_value", - snapshot_resource=compute.Snapshot(auto_created=True), + snapshot_resource=compute.Snapshot(architecture="architecture_value"), ) @@ -2930,6 +2950,7 @@ def test_get_rest(request_type): with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. return_value = compute.Disk( + architecture="architecture_value", creation_timestamp="creation_timestamp_value", description="description_value", id=205, @@ -2973,6 +2994,7 @@ def test_get_rest(request_type): # Establish that the response is the type that we expect. assert isinstance(response, compute.Disk) + assert response.architecture == "architecture_value" assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" assert response.id == 205 @@ -3541,6 +3563,7 @@ def test_insert_rest(request_type): # send a request that will satisfy transcoding request_init = {"project": "sample1", "zone": "sample2"} request_init["disk_resource"] = { + "architecture": "architecture_value", "creation_timestamp": "creation_timestamp_value", "description": "description_value", "disk_encryption_key": { @@ -3813,6 +3836,7 @@ def test_insert_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "zone": "sample2"} request_init["disk_resource"] = { + "architecture": "architecture_value", "creation_timestamp": "creation_timestamp_value", "description": "description_value", "disk_encryption_key": { @@ -3888,7 +3912,7 @@ def test_insert_rest_flattened(): mock_args = dict( project="project_value", zone="zone_value", - disk_resource=compute.Disk(creation_timestamp="creation_timestamp_value"), + disk_resource=compute.Disk(architecture="architecture_value"), ) mock_args.update(sample_request) @@ -3926,7 +3950,7 @@ def test_insert_rest_flattened_error(transport: str = "rest"): compute.InsertDiskRequest(), project="project_value", zone="zone_value", - disk_resource=compute.Disk(creation_timestamp="creation_timestamp_value"), + disk_resource=compute.Disk(architecture="architecture_value"), ) @@ -3952,6 +3976,7 @@ def test_insert_unary_rest(request_type): # send a request that will satisfy transcoding request_init = {"project": "sample1", "zone": "sample2"} request_init["disk_resource"] = { + "architecture": "architecture_value", "creation_timestamp": "creation_timestamp_value", "description": "description_value", "disk_encryption_key": { @@ -4202,6 +4227,7 @@ def test_insert_unary_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "zone": "sample2"} request_init["disk_resource"] = { + "architecture": "architecture_value", "creation_timestamp": "creation_timestamp_value", "description": "description_value", "disk_encryption_key": { @@ -4277,7 +4303,7 @@ def test_insert_unary_rest_flattened(): mock_args = dict( project="project_value", zone="zone_value", - disk_resource=compute.Disk(creation_timestamp="creation_timestamp_value"), + disk_resource=compute.Disk(architecture="architecture_value"), ) mock_args.update(sample_request) @@ -4315,7 +4341,7 @@ def test_insert_unary_rest_flattened_error(transport: str = "rest"): compute.InsertDiskRequest(), project="project_value", zone="zone_value", - disk_resource=compute.Disk(creation_timestamp="creation_timestamp_value"), + disk_resource=compute.Disk(architecture="architecture_value"), ) diff --git a/tests/unit/gapic/compute_v1/test_images.py b/tests/unit/gapic/compute_v1/test_images.py index c0269bcd5..7a531ca3e 100644 --- a/tests/unit/gapic/compute_v1/test_images.py +++ b/tests/unit/gapic/compute_v1/test_images.py @@ -1796,6 +1796,7 @@ def test_get_rest(request_type): with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. return_value = compute.Image( + architecture="architecture_value", archive_size_bytes=1922, creation_timestamp="creation_timestamp_value", description="description_value", @@ -1830,6 +1831,7 @@ def test_get_rest(request_type): # Establish that the response is the type that we expect. assert isinstance(response, compute.Image) + assert response.architecture == "architecture_value" assert response.archive_size_bytes == 1922 assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" @@ -2103,6 +2105,7 @@ def test_get_from_family_rest(request_type): with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. return_value = compute.Image( + architecture="architecture_value", archive_size_bytes=1922, creation_timestamp="creation_timestamp_value", description="description_value", @@ -2137,6 +2140,7 @@ def test_get_from_family_rest(request_type): # Establish that the response is the type that we expect. assert isinstance(response, compute.Image) + assert response.architecture == "architecture_value" assert response.archive_size_bytes == 1922 assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" @@ -2680,6 +2684,7 @@ def test_insert_rest(request_type): # send a request that will satisfy transcoding request_init = {"project": "sample1"} request_init["image_resource"] = { + "architecture": "architecture_value", "archive_size_bytes": 1922, "creation_timestamp": "creation_timestamp_value", "deprecated": { @@ -2957,6 +2962,7 @@ def test_insert_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1"} request_init["image_resource"] = { + "architecture": "architecture_value", "archive_size_bytes": 1922, "creation_timestamp": "creation_timestamp_value", "deprecated": { @@ -3041,7 +3047,7 @@ def test_insert_rest_flattened(): # get truthy value for each flattened field mock_args = dict( project="project_value", - image_resource=compute.Image(archive_size_bytes=1922), + image_resource=compute.Image(architecture="architecture_value"), ) mock_args.update(sample_request) @@ -3077,7 +3083,7 @@ def test_insert_rest_flattened_error(transport: str = "rest"): client.insert( compute.InsertImageRequest(), project="project_value", - image_resource=compute.Image(archive_size_bytes=1922), + image_resource=compute.Image(architecture="architecture_value"), ) @@ -3103,6 +3109,7 @@ def test_insert_unary_rest(request_type): # send a request that will satisfy transcoding request_init = {"project": "sample1"} request_init["image_resource"] = { + "architecture": "architecture_value", "archive_size_bytes": 1922, "creation_timestamp": "creation_timestamp_value", "deprecated": { @@ -3358,6 +3365,7 @@ def test_insert_unary_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1"} request_init["image_resource"] = { + "architecture": "architecture_value", "archive_size_bytes": 1922, "creation_timestamp": "creation_timestamp_value", "deprecated": { @@ -3442,7 +3450,7 @@ def test_insert_unary_rest_flattened(): # get truthy value for each flattened field mock_args = dict( project="project_value", - image_resource=compute.Image(archive_size_bytes=1922), + image_resource=compute.Image(architecture="architecture_value"), ) mock_args.update(sample_request) @@ -3478,7 +3486,7 @@ def test_insert_unary_rest_flattened_error(transport: str = "rest"): client.insert_unary( compute.InsertImageRequest(), project="project_value", - image_resource=compute.Image(archive_size_bytes=1922), + image_resource=compute.Image(architecture="architecture_value"), ) @@ -3836,6 +3844,7 @@ def test_patch_rest(request_type): # send a request that will satisfy transcoding request_init = {"project": "sample1", "image": "sample2"} request_init["image_resource"] = { + "architecture": "architecture_value", "archive_size_bytes": 1922, "creation_timestamp": "creation_timestamp_value", "deprecated": { @@ -4108,6 +4117,7 @@ def test_patch_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "image": "sample2"} request_init["image_resource"] = { + "architecture": "architecture_value", "archive_size_bytes": 1922, "creation_timestamp": "creation_timestamp_value", "deprecated": { @@ -4193,7 +4203,7 @@ def test_patch_rest_flattened(): mock_args = dict( project="project_value", image="image_value", - image_resource=compute.Image(archive_size_bytes=1922), + image_resource=compute.Image(architecture="architecture_value"), ) mock_args.update(sample_request) @@ -4231,7 +4241,7 @@ def test_patch_rest_flattened_error(transport: str = "rest"): compute.PatchImageRequest(), project="project_value", image="image_value", - image_resource=compute.Image(archive_size_bytes=1922), + image_resource=compute.Image(architecture="architecture_value"), ) @@ -4257,6 +4267,7 @@ def test_patch_unary_rest(request_type): # send a request that will satisfy transcoding request_init = {"project": "sample1", "image": "sample2"} request_init["image_resource"] = { + "architecture": "architecture_value", "archive_size_bytes": 1922, "creation_timestamp": "creation_timestamp_value", "deprecated": { @@ -4507,6 +4518,7 @@ def test_patch_unary_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "image": "sample2"} request_init["image_resource"] = { + "architecture": "architecture_value", "archive_size_bytes": 1922, "creation_timestamp": "creation_timestamp_value", "deprecated": { @@ -4592,7 +4604,7 @@ def test_patch_unary_rest_flattened(): mock_args = dict( project="project_value", image="image_value", - image_resource=compute.Image(archive_size_bytes=1922), + image_resource=compute.Image(architecture="architecture_value"), ) mock_args.update(sample_request) @@ -4630,7 +4642,7 @@ def test_patch_unary_rest_flattened_error(transport: str = "rest"): compute.PatchImageRequest(), project="project_value", image="image_value", - image_resource=compute.Image(archive_size_bytes=1922), + image_resource=compute.Image(architecture="architecture_value"), ) diff --git a/tests/unit/gapic/compute_v1/test_instance_templates.py b/tests/unit/gapic/compute_v1/test_instance_templates.py index d66d6e6d1..790c12747 100644 --- a/tests/unit/gapic/compute_v1/test_instance_templates.py +++ b/tests/unit/gapic/compute_v1/test_instance_templates.py @@ -1765,6 +1765,7 @@ def test_insert_rest(request_type): "description": "description_value", "disks": [ { + "architecture": "architecture_value", "auto_delete": True, "boot": True, "device_name": "device_name_value", @@ -1779,6 +1780,7 @@ def test_insert_rest(request_type): "guest_os_features": [{"type_": "type__value"}], "index": 536, "initialize_params": { + "architecture": "architecture_value", "description": "description_value", "disk_name": "disk_name_value", "disk_size_gb": 1261, @@ -1818,6 +1820,7 @@ def test_insert_rest(request_type): "accelerator_type": "accelerator_type_value", } ], + "key_revocation_action_type": "key_revocation_action_type_value", "labels": {}, "machine_type": "machine_type_value", "metadata": { @@ -2152,6 +2155,7 @@ def test_insert_rest_bad_request( "description": "description_value", "disks": [ { + "architecture": "architecture_value", "auto_delete": True, "boot": True, "device_name": "device_name_value", @@ -2166,6 +2170,7 @@ def test_insert_rest_bad_request( "guest_os_features": [{"type_": "type__value"}], "index": 536, "initialize_params": { + "architecture": "architecture_value", "description": "description_value", "disk_name": "disk_name_value", "disk_size_gb": 1261, @@ -2205,6 +2210,7 @@ def test_insert_rest_bad_request( "accelerator_type": "accelerator_type_value", } ], + "key_revocation_action_type": "key_revocation_action_type_value", "labels": {}, "machine_type": "machine_type_value", "metadata": { @@ -2419,6 +2425,7 @@ def test_insert_unary_rest(request_type): "description": "description_value", "disks": [ { + "architecture": "architecture_value", "auto_delete": True, "boot": True, "device_name": "device_name_value", @@ -2433,6 +2440,7 @@ def test_insert_unary_rest(request_type): "guest_os_features": [{"type_": "type__value"}], "index": 536, "initialize_params": { + "architecture": "architecture_value", "description": "description_value", "disk_name": "disk_name_value", "disk_size_gb": 1261, @@ -2472,6 +2480,7 @@ def test_insert_unary_rest(request_type): "accelerator_type": "accelerator_type_value", } ], + "key_revocation_action_type": "key_revocation_action_type_value", "labels": {}, "machine_type": "machine_type_value", "metadata": { @@ -2784,6 +2793,7 @@ def test_insert_unary_rest_bad_request( "description": "description_value", "disks": [ { + "architecture": "architecture_value", "auto_delete": True, "boot": True, "device_name": "device_name_value", @@ -2798,6 +2808,7 @@ def test_insert_unary_rest_bad_request( "guest_os_features": [{"type_": "type__value"}], "index": 536, "initialize_params": { + "architecture": "architecture_value", "description": "description_value", "disk_name": "disk_name_value", "disk_size_gb": 1261, @@ -2837,6 +2848,7 @@ def test_insert_unary_rest_bad_request( "accelerator_type": "accelerator_type_value", } ], + "key_revocation_action_type": "key_revocation_action_type_value", "labels": {}, "machine_type": "machine_type_value", "metadata": { diff --git a/tests/unit/gapic/compute_v1/test_instances.py b/tests/unit/gapic/compute_v1/test_instances.py index d74a9bf1c..8083db61c 100644 --- a/tests/unit/gapic/compute_v1/test_instances.py +++ b/tests/unit/gapic/compute_v1/test_instances.py @@ -2288,6 +2288,7 @@ def test_attach_disk_rest(request_type): # send a request that will satisfy transcoding request_init = {"project": "sample1", "zone": "sample2", "instance": "sample3"} request_init["attached_disk_resource"] = { + "architecture": "architecture_value", "auto_delete": True, "boot": True, "device_name": "device_name_value", @@ -2302,6 +2303,7 @@ def test_attach_disk_rest(request_type): "guest_os_features": [{"type_": "type__value"}], "index": 536, "initialize_params": { + "architecture": "architecture_value", "description": "description_value", "disk_name": "disk_name_value", "disk_size_gb": 1261, @@ -2568,6 +2570,7 @@ def test_attach_disk_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "zone": "sample2", "instance": "sample3"} request_init["attached_disk_resource"] = { + "architecture": "architecture_value", "auto_delete": True, "boot": True, "device_name": "device_name_value", @@ -2582,6 +2585,7 @@ def test_attach_disk_rest_bad_request( "guest_os_features": [{"type_": "type__value"}], "index": 536, "initialize_params": { + "architecture": "architecture_value", "description": "description_value", "disk_name": "disk_name_value", "disk_size_gb": 1261, @@ -2649,7 +2653,9 @@ def test_attach_disk_rest_flattened(): project="project_value", zone="zone_value", instance="instance_value", - attached_disk_resource=compute.AttachedDisk(auto_delete=True), + attached_disk_resource=compute.AttachedDisk( + architecture="architecture_value" + ), ) mock_args.update(sample_request) @@ -2688,7 +2694,9 @@ def test_attach_disk_rest_flattened_error(transport: str = "rest"): project="project_value", zone="zone_value", instance="instance_value", - attached_disk_resource=compute.AttachedDisk(auto_delete=True), + attached_disk_resource=compute.AttachedDisk( + architecture="architecture_value" + ), ) @@ -2714,6 +2722,7 @@ def test_attach_disk_unary_rest(request_type): # send a request that will satisfy transcoding request_init = {"project": "sample1", "zone": "sample2", "instance": "sample3"} request_init["attached_disk_resource"] = { + "architecture": "architecture_value", "auto_delete": True, "boot": True, "device_name": "device_name_value", @@ -2728,6 +2737,7 @@ def test_attach_disk_unary_rest(request_type): "guest_os_features": [{"type_": "type__value"}], "index": 536, "initialize_params": { + "architecture": "architecture_value", "description": "description_value", "disk_name": "disk_name_value", "disk_size_gb": 1261, @@ -2972,6 +2982,7 @@ def test_attach_disk_unary_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "zone": "sample2", "instance": "sample3"} request_init["attached_disk_resource"] = { + "architecture": "architecture_value", "auto_delete": True, "boot": True, "device_name": "device_name_value", @@ -2986,6 +2997,7 @@ def test_attach_disk_unary_rest_bad_request( "guest_os_features": [{"type_": "type__value"}], "index": 536, "initialize_params": { + "architecture": "architecture_value", "description": "description_value", "disk_name": "disk_name_value", "disk_size_gb": 1261, @@ -3053,7 +3065,9 @@ def test_attach_disk_unary_rest_flattened(): project="project_value", zone="zone_value", instance="instance_value", - attached_disk_resource=compute.AttachedDisk(auto_delete=True), + attached_disk_resource=compute.AttachedDisk( + architecture="architecture_value" + ), ) mock_args.update(sample_request) @@ -3092,7 +3106,9 @@ def test_attach_disk_unary_rest_flattened_error(transport: str = "rest"): project="project_value", zone="zone_value", instance="instance_value", - attached_disk_resource=compute.AttachedDisk(auto_delete=True), + attached_disk_resource=compute.AttachedDisk( + architecture="architecture_value" + ), ) @@ -3130,6 +3146,7 @@ def test_bulk_insert_rest(request_type): "description": "description_value", "disks": [ { + "architecture": "architecture_value", "auto_delete": True, "boot": True, "device_name": "device_name_value", @@ -3144,6 +3161,7 @@ def test_bulk_insert_rest(request_type): "guest_os_features": [{"type_": "type__value"}], "index": 536, "initialize_params": { + "architecture": "architecture_value", "description": "description_value", "disk_name": "disk_name_value", "disk_size_gb": 1261, @@ -3183,6 +3201,7 @@ def test_bulk_insert_rest(request_type): "accelerator_type": "accelerator_type_value", } ], + "key_revocation_action_type": "key_revocation_action_type_value", "labels": {}, "machine_type": "machine_type_value", "metadata": { @@ -3509,6 +3528,7 @@ def test_bulk_insert_rest_bad_request( "description": "description_value", "disks": [ { + "architecture": "architecture_value", "auto_delete": True, "boot": True, "device_name": "device_name_value", @@ -3523,6 +3543,7 @@ def test_bulk_insert_rest_bad_request( "guest_os_features": [{"type_": "type__value"}], "index": 536, "initialize_params": { + "architecture": "architecture_value", "description": "description_value", "disk_name": "disk_name_value", "disk_size_gb": 1261, @@ -3562,6 +3583,7 @@ def test_bulk_insert_rest_bad_request( "accelerator_type": "accelerator_type_value", } ], + "key_revocation_action_type": "key_revocation_action_type_value", "labels": {}, "machine_type": "machine_type_value", "metadata": { @@ -3767,6 +3789,7 @@ def test_bulk_insert_unary_rest(request_type): "description": "description_value", "disks": [ { + "architecture": "architecture_value", "auto_delete": True, "boot": True, "device_name": "device_name_value", @@ -3781,6 +3804,7 @@ def test_bulk_insert_unary_rest(request_type): "guest_os_features": [{"type_": "type__value"}], "index": 536, "initialize_params": { + "architecture": "architecture_value", "description": "description_value", "disk_name": "disk_name_value", "disk_size_gb": 1261, @@ -3820,6 +3844,7 @@ def test_bulk_insert_unary_rest(request_type): "accelerator_type": "accelerator_type_value", } ], + "key_revocation_action_type": "key_revocation_action_type_value", "labels": {}, "machine_type": "machine_type_value", "metadata": { @@ -4124,6 +4149,7 @@ def test_bulk_insert_unary_rest_bad_request( "description": "description_value", "disks": [ { + "architecture": "architecture_value", "auto_delete": True, "boot": True, "device_name": "device_name_value", @@ -4138,6 +4164,7 @@ def test_bulk_insert_unary_rest_bad_request( "guest_os_features": [{"type_": "type__value"}], "index": 536, "initialize_params": { + "architecture": "architecture_value", "description": "description_value", "disk_name": "disk_name_value", "disk_size_gb": 1261, @@ -4177,6 +4204,7 @@ def test_bulk_insert_unary_rest_bad_request( "accelerator_type": "accelerator_type_value", } ], + "key_revocation_action_type": "key_revocation_action_type_value", "labels": {}, "machine_type": "machine_type_value", "metadata": { @@ -6377,6 +6405,7 @@ def test_get_rest(request_type): fingerprint="fingerprint_value", hostname="hostname_value", id=205, + key_revocation_action_type="key_revocation_action_type_value", kind="kind_value", label_fingerprint="label_fingerprint_value", last_start_timestamp="last_start_timestamp_value", @@ -6414,6 +6443,7 @@ def test_get_rest(request_type): assert response.fingerprint == "fingerprint_value" assert response.hostname == "hostname_value" assert response.id == 205 + assert response.key_revocation_action_type == "key_revocation_action_type_value" assert response.kind == "kind_value" assert response.label_fingerprint == "label_fingerprint_value" assert response.last_start_timestamp == "last_start_timestamp_value" @@ -8450,6 +8480,7 @@ def test_insert_rest(request_type): "description": "description_value", "disks": [ { + "architecture": "architecture_value", "auto_delete": True, "boot": True, "device_name": "device_name_value", @@ -8464,6 +8495,7 @@ def test_insert_rest(request_type): "guest_os_features": [{"type_": "type__value"}], "index": 536, "initialize_params": { + "architecture": "architecture_value", "description": "description_value", "disk_name": "disk_name_value", "disk_size_gb": 1261, @@ -8504,6 +8536,7 @@ def test_insert_rest(request_type): ], "hostname": "hostname_value", "id": 205, + "key_revocation_action_type": "key_revocation_action_type_value", "kind": "kind_value", "label_fingerprint": "label_fingerprint_value", "labels": {}, @@ -8847,6 +8880,7 @@ def test_insert_rest_bad_request( "description": "description_value", "disks": [ { + "architecture": "architecture_value", "auto_delete": True, "boot": True, "device_name": "device_name_value", @@ -8861,6 +8895,7 @@ def test_insert_rest_bad_request( "guest_os_features": [{"type_": "type__value"}], "index": 536, "initialize_params": { + "architecture": "architecture_value", "description": "description_value", "disk_name": "disk_name_value", "disk_size_gb": 1261, @@ -8901,6 +8936,7 @@ def test_insert_rest_bad_request( ], "hostname": "hostname_value", "id": 205, + "key_revocation_action_type": "key_revocation_action_type_value", "kind": "kind_value", "label_fingerprint": "label_fingerprint_value", "labels": {}, @@ -9117,6 +9153,7 @@ def test_insert_unary_rest(request_type): "description": "description_value", "disks": [ { + "architecture": "architecture_value", "auto_delete": True, "boot": True, "device_name": "device_name_value", @@ -9131,6 +9168,7 @@ def test_insert_unary_rest(request_type): "guest_os_features": [{"type_": "type__value"}], "index": 536, "initialize_params": { + "architecture": "architecture_value", "description": "description_value", "disk_name": "disk_name_value", "disk_size_gb": 1261, @@ -9171,6 +9209,7 @@ def test_insert_unary_rest(request_type): ], "hostname": "hostname_value", "id": 205, + "key_revocation_action_type": "key_revocation_action_type_value", "kind": "kind_value", "label_fingerprint": "label_fingerprint_value", "labels": {}, @@ -9492,6 +9531,7 @@ def test_insert_unary_rest_bad_request( "description": "description_value", "disks": [ { + "architecture": "architecture_value", "auto_delete": True, "boot": True, "device_name": "device_name_value", @@ -9506,6 +9546,7 @@ def test_insert_unary_rest_bad_request( "guest_os_features": [{"type_": "type__value"}], "index": 536, "initialize_params": { + "architecture": "architecture_value", "description": "description_value", "disk_name": "disk_name_value", "disk_size_gb": 1261, @@ -9546,6 +9587,7 @@ def test_insert_unary_rest_bad_request( ], "hostname": "hostname_value", "id": 205, + "key_revocation_action_type": "key_revocation_action_type_value", "kind": "kind_value", "label_fingerprint": "label_fingerprint_value", "labels": {}, @@ -23844,6 +23886,7 @@ def test_update_rest(request_type): "description": "description_value", "disks": [ { + "architecture": "architecture_value", "auto_delete": True, "boot": True, "device_name": "device_name_value", @@ -23858,6 +23901,7 @@ def test_update_rest(request_type): "guest_os_features": [{"type_": "type__value"}], "index": 536, "initialize_params": { + "architecture": "architecture_value", "description": "description_value", "disk_name": "disk_name_value", "disk_size_gb": 1261, @@ -23898,6 +23942,7 @@ def test_update_rest(request_type): ], "hostname": "hostname_value", "id": 205, + "key_revocation_action_type": "key_revocation_action_type_value", "kind": "kind_value", "label_fingerprint": "label_fingerprint_value", "labels": {}, @@ -24246,6 +24291,7 @@ def test_update_rest_bad_request( "description": "description_value", "disks": [ { + "architecture": "architecture_value", "auto_delete": True, "boot": True, "device_name": "device_name_value", @@ -24260,6 +24306,7 @@ def test_update_rest_bad_request( "guest_os_features": [{"type_": "type__value"}], "index": 536, "initialize_params": { + "architecture": "architecture_value", "description": "description_value", "disk_name": "disk_name_value", "disk_size_gb": 1261, @@ -24300,6 +24347,7 @@ def test_update_rest_bad_request( ], "hostname": "hostname_value", "id": 205, + "key_revocation_action_type": "key_revocation_action_type_value", "kind": "kind_value", "label_fingerprint": "label_fingerprint_value", "labels": {}, @@ -24522,6 +24570,7 @@ def test_update_unary_rest(request_type): "description": "description_value", "disks": [ { + "architecture": "architecture_value", "auto_delete": True, "boot": True, "device_name": "device_name_value", @@ -24536,6 +24585,7 @@ def test_update_unary_rest(request_type): "guest_os_features": [{"type_": "type__value"}], "index": 536, "initialize_params": { + "architecture": "architecture_value", "description": "description_value", "disk_name": "disk_name_value", "disk_size_gb": 1261, @@ -24576,6 +24626,7 @@ def test_update_unary_rest(request_type): ], "hostname": "hostname_value", "id": 205, + "key_revocation_action_type": "key_revocation_action_type_value", "kind": "kind_value", "label_fingerprint": "label_fingerprint_value", "labels": {}, @@ -24902,6 +24953,7 @@ def test_update_unary_rest_bad_request( "description": "description_value", "disks": [ { + "architecture": "architecture_value", "auto_delete": True, "boot": True, "device_name": "device_name_value", @@ -24916,6 +24968,7 @@ def test_update_unary_rest_bad_request( "guest_os_features": [{"type_": "type__value"}], "index": 536, "initialize_params": { + "architecture": "architecture_value", "description": "description_value", "disk_name": "disk_name_value", "disk_size_gb": 1261, @@ -24956,6 +25009,7 @@ def test_update_unary_rest_bad_request( ], "hostname": "hostname_value", "id": 205, + "key_revocation_action_type": "key_revocation_action_type_value", "kind": "kind_value", "label_fingerprint": "label_fingerprint_value", "labels": {}, diff --git a/tests/unit/gapic/compute_v1/test_machine_images.py b/tests/unit/gapic/compute_v1/test_machine_images.py index 48a3e4712..c33e74788 100644 --- a/tests/unit/gapic/compute_v1/test_machine_images.py +++ b/tests/unit/gapic/compute_v1/test_machine_images.py @@ -1750,6 +1750,7 @@ def test_insert_rest(request_type): "description": "description_value", "disks": [ { + "architecture": "architecture_value", "auto_delete": True, "boot": True, "device_name": "device_name_value", @@ -1764,6 +1765,7 @@ def test_insert_rest(request_type): "guest_os_features": [{"type_": "type__value"}], "index": 536, "initialize_params": { + "architecture": "architecture_value", "description": "description_value", "disk_name": "disk_name_value", "disk_size_gb": 1261, @@ -1803,6 +1805,7 @@ def test_insert_rest(request_type): "accelerator_type": "accelerator_type_value", } ], + "key_revocation_action_type": "key_revocation_action_type_value", "labels": {}, "machine_type": "machine_type_value", "metadata": { @@ -1896,6 +1899,7 @@ def test_insert_rest(request_type): "satisfies_pzs": True, "saved_disks": [ { + "architecture": "architecture_value", "kind": "kind_value", "source_disk": "source_disk_value", "storage_bytes": 1403, @@ -1932,6 +1936,7 @@ def test_insert_rest(request_type): } ], "guest_accelerators": {}, + "key_revocation_action_type": "key_revocation_action_type_value", "labels": {}, "machine_type": "machine_type_value", "metadata": {}, @@ -2186,6 +2191,7 @@ def test_insert_rest_bad_request( "description": "description_value", "disks": [ { + "architecture": "architecture_value", "auto_delete": True, "boot": True, "device_name": "device_name_value", @@ -2200,6 +2206,7 @@ def test_insert_rest_bad_request( "guest_os_features": [{"type_": "type__value"}], "index": 536, "initialize_params": { + "architecture": "architecture_value", "description": "description_value", "disk_name": "disk_name_value", "disk_size_gb": 1261, @@ -2239,6 +2246,7 @@ def test_insert_rest_bad_request( "accelerator_type": "accelerator_type_value", } ], + "key_revocation_action_type": "key_revocation_action_type_value", "labels": {}, "machine_type": "machine_type_value", "metadata": { @@ -2332,6 +2340,7 @@ def test_insert_rest_bad_request( "satisfies_pzs": True, "saved_disks": [ { + "architecture": "architecture_value", "kind": "kind_value", "source_disk": "source_disk_value", "storage_bytes": 1403, @@ -2368,6 +2377,7 @@ def test_insert_rest_bad_request( } ], "guest_accelerators": {}, + "key_revocation_action_type": "key_revocation_action_type_value", "labels": {}, "machine_type": "machine_type_value", "metadata": {}, @@ -2494,6 +2504,7 @@ def test_insert_unary_rest(request_type): "description": "description_value", "disks": [ { + "architecture": "architecture_value", "auto_delete": True, "boot": True, "device_name": "device_name_value", @@ -2508,6 +2519,7 @@ def test_insert_unary_rest(request_type): "guest_os_features": [{"type_": "type__value"}], "index": 536, "initialize_params": { + "architecture": "architecture_value", "description": "description_value", "disk_name": "disk_name_value", "disk_size_gb": 1261, @@ -2547,6 +2559,7 @@ def test_insert_unary_rest(request_type): "accelerator_type": "accelerator_type_value", } ], + "key_revocation_action_type": "key_revocation_action_type_value", "labels": {}, "machine_type": "machine_type_value", "metadata": { @@ -2640,6 +2653,7 @@ def test_insert_unary_rest(request_type): "satisfies_pzs": True, "saved_disks": [ { + "architecture": "architecture_value", "kind": "kind_value", "source_disk": "source_disk_value", "storage_bytes": 1403, @@ -2676,6 +2690,7 @@ def test_insert_unary_rest(request_type): } ], "guest_accelerators": {}, + "key_revocation_action_type": "key_revocation_action_type_value", "labels": {}, "machine_type": "machine_type_value", "metadata": {}, @@ -2910,6 +2925,7 @@ def test_insert_unary_rest_bad_request( "description": "description_value", "disks": [ { + "architecture": "architecture_value", "auto_delete": True, "boot": True, "device_name": "device_name_value", @@ -2924,6 +2940,7 @@ def test_insert_unary_rest_bad_request( "guest_os_features": [{"type_": "type__value"}], "index": 536, "initialize_params": { + "architecture": "architecture_value", "description": "description_value", "disk_name": "disk_name_value", "disk_size_gb": 1261, @@ -2963,6 +2980,7 @@ def test_insert_unary_rest_bad_request( "accelerator_type": "accelerator_type_value", } ], + "key_revocation_action_type": "key_revocation_action_type_value", "labels": {}, "machine_type": "machine_type_value", "metadata": { @@ -3056,6 +3074,7 @@ def test_insert_unary_rest_bad_request( "satisfies_pzs": True, "saved_disks": [ { + "architecture": "architecture_value", "kind": "kind_value", "source_disk": "source_disk_value", "storage_bytes": 1403, @@ -3092,6 +3111,7 @@ def test_insert_unary_rest_bad_request( } ], "guest_accelerators": {}, + "key_revocation_action_type": "key_revocation_action_type_value", "labels": {}, "machine_type": "machine_type_value", "metadata": {}, diff --git a/tests/unit/gapic/compute_v1/test_networks.py b/tests/unit/gapic/compute_v1/test_networks.py index 2c8f93eb5..788bf912d 100644 --- a/tests/unit/gapic/compute_v1/test_networks.py +++ b/tests/unit/gapic/compute_v1/test_networks.py @@ -1854,6 +1854,7 @@ def test_get_rest(request_type): creation_timestamp="creation_timestamp_value", description="description_value", enable_ula_internal_ipv6=True, + firewall_policy="firewall_policy_value", gateway_i_pv4="gateway_i_pv4_value", id=205, internal_ipv6_range="internal_ipv6_range_value", @@ -1881,6 +1882,7 @@ def test_get_rest(request_type): assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" assert response.enable_ula_internal_ipv6 is True + assert response.firewall_policy == "firewall_policy_value" assert response.gateway_i_pv4 == "gateway_i_pv4_value" assert response.id == 205 assert response.internal_ipv6_range == "internal_ipv6_range_value" @@ -2419,6 +2421,7 @@ def test_insert_rest(request_type): "creation_timestamp": "creation_timestamp_value", "description": "description_value", "enable_ula_internal_ipv6": True, + "firewall_policy": "firewall_policy_value", "gateway_i_pv4": "gateway_i_pv4_value", "id": 205, "internal_ipv6_range": "internal_ipv6_range_value", @@ -2666,6 +2669,7 @@ def test_insert_rest_bad_request( "creation_timestamp": "creation_timestamp_value", "description": "description_value", "enable_ula_internal_ipv6": True, + "firewall_policy": "firewall_policy_value", "gateway_i_pv4": "gateway_i_pv4_value", "id": 205, "internal_ipv6_range": "internal_ipv6_range_value", @@ -2792,6 +2796,7 @@ def test_insert_unary_rest(request_type): "creation_timestamp": "creation_timestamp_value", "description": "description_value", "enable_ula_internal_ipv6": True, + "firewall_policy": "firewall_policy_value", "gateway_i_pv4": "gateway_i_pv4_value", "id": 205, "internal_ipv6_range": "internal_ipv6_range_value", @@ -3017,6 +3022,7 @@ def test_insert_unary_rest_bad_request( "creation_timestamp": "creation_timestamp_value", "description": "description_value", "enable_ula_internal_ipv6": True, + "firewall_policy": "firewall_policy_value", "gateway_i_pv4": "gateway_i_pv4_value", "id": 205, "internal_ipv6_range": "internal_ipv6_range_value", @@ -3831,6 +3837,7 @@ def test_patch_rest(request_type): "creation_timestamp": "creation_timestamp_value", "description": "description_value", "enable_ula_internal_ipv6": True, + "firewall_policy": "firewall_policy_value", "gateway_i_pv4": "gateway_i_pv4_value", "id": 205, "internal_ipv6_range": "internal_ipv6_range_value", @@ -4083,6 +4090,7 @@ def test_patch_rest_bad_request( "creation_timestamp": "creation_timestamp_value", "description": "description_value", "enable_ula_internal_ipv6": True, + "firewall_policy": "firewall_policy_value", "gateway_i_pv4": "gateway_i_pv4_value", "id": 205, "internal_ipv6_range": "internal_ipv6_range_value", @@ -4212,6 +4220,7 @@ def test_patch_unary_rest(request_type): "creation_timestamp": "creation_timestamp_value", "description": "description_value", "enable_ula_internal_ipv6": True, + "firewall_policy": "firewall_policy_value", "gateway_i_pv4": "gateway_i_pv4_value", "id": 205, "internal_ipv6_range": "internal_ipv6_range_value", @@ -4442,6 +4451,7 @@ def test_patch_unary_rest_bad_request( "creation_timestamp": "creation_timestamp_value", "description": "description_value", "enable_ula_internal_ipv6": True, + "firewall_policy": "firewall_policy_value", "gateway_i_pv4": "gateway_i_pv4_value", "id": 205, "internal_ipv6_range": "internal_ipv6_range_value", diff --git a/tests/unit/gapic/compute_v1/test_region_disks.py b/tests/unit/gapic/compute_v1/test_region_disks.py index 63267b1df..35295080c 100644 --- a/tests/unit/gapic/compute_v1/test_region_disks.py +++ b/tests/unit/gapic/compute_v1/test_region_disks.py @@ -1206,8 +1206,10 @@ def test_create_snapshot_rest(request_type): # send a request that will satisfy transcoding request_init = {"project": "sample1", "region": "sample2", "disk": "sample3"} request_init["snapshot_resource"] = { + "architecture": "architecture_value", "auto_created": True, "chain_name": "chain_name_value", + "creation_size_bytes": 2037, "creation_timestamp": "creation_timestamp_value", "description": "description_value", "disk_size_gb": 1261, @@ -1229,9 +1231,12 @@ def test_create_snapshot_rest(request_type): "rsa_encrypted_key": "rsa_encrypted_key_value", "sha256": "sha256_value", }, + "snapshot_type": "snapshot_type_value", "source_disk": "source_disk_value", "source_disk_encryption_key": {}, "source_disk_id": "source_disk_id_value", + "source_snapshot_schedule_policy": "source_snapshot_schedule_policy_value", + "source_snapshot_schedule_policy_id": "source_snapshot_schedule_policy_id_value", "status": "status_value", "storage_bytes": 1403, "storage_bytes_status": "storage_bytes_status_value", @@ -1465,8 +1470,10 @@ def test_create_snapshot_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "region": "sample2", "disk": "sample3"} request_init["snapshot_resource"] = { + "architecture": "architecture_value", "auto_created": True, "chain_name": "chain_name_value", + "creation_size_bytes": 2037, "creation_timestamp": "creation_timestamp_value", "description": "description_value", "disk_size_gb": 1261, @@ -1488,9 +1495,12 @@ def test_create_snapshot_rest_bad_request( "rsa_encrypted_key": "rsa_encrypted_key_value", "sha256": "sha256_value", }, + "snapshot_type": "snapshot_type_value", "source_disk": "source_disk_value", "source_disk_encryption_key": {}, "source_disk_id": "source_disk_id_value", + "source_snapshot_schedule_policy": "source_snapshot_schedule_policy_value", + "source_snapshot_schedule_policy_id": "source_snapshot_schedule_policy_id_value", "status": "status_value", "storage_bytes": 1403, "storage_bytes_status": "storage_bytes_status_value", @@ -1529,7 +1539,7 @@ def test_create_snapshot_rest_flattened(): project="project_value", region="region_value", disk="disk_value", - snapshot_resource=compute.Snapshot(auto_created=True), + snapshot_resource=compute.Snapshot(architecture="architecture_value"), ) mock_args.update(sample_request) @@ -1568,7 +1578,7 @@ def test_create_snapshot_rest_flattened_error(transport: str = "rest"): project="project_value", region="region_value", disk="disk_value", - snapshot_resource=compute.Snapshot(auto_created=True), + snapshot_resource=compute.Snapshot(architecture="architecture_value"), ) @@ -1594,8 +1604,10 @@ def test_create_snapshot_unary_rest(request_type): # send a request that will satisfy transcoding request_init = {"project": "sample1", "region": "sample2", "disk": "sample3"} request_init["snapshot_resource"] = { + "architecture": "architecture_value", "auto_created": True, "chain_name": "chain_name_value", + "creation_size_bytes": 2037, "creation_timestamp": "creation_timestamp_value", "description": "description_value", "disk_size_gb": 1261, @@ -1617,9 +1629,12 @@ def test_create_snapshot_unary_rest(request_type): "rsa_encrypted_key": "rsa_encrypted_key_value", "sha256": "sha256_value", }, + "snapshot_type": "snapshot_type_value", "source_disk": "source_disk_value", "source_disk_encryption_key": {}, "source_disk_id": "source_disk_id_value", + "source_snapshot_schedule_policy": "source_snapshot_schedule_policy_value", + "source_snapshot_schedule_policy_id": "source_snapshot_schedule_policy_id_value", "status": "status_value", "storage_bytes": 1403, "storage_bytes_status": "storage_bytes_status_value", @@ -1831,8 +1846,10 @@ def test_create_snapshot_unary_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "region": "sample2", "disk": "sample3"} request_init["snapshot_resource"] = { + "architecture": "architecture_value", "auto_created": True, "chain_name": "chain_name_value", + "creation_size_bytes": 2037, "creation_timestamp": "creation_timestamp_value", "description": "description_value", "disk_size_gb": 1261, @@ -1854,9 +1871,12 @@ def test_create_snapshot_unary_rest_bad_request( "rsa_encrypted_key": "rsa_encrypted_key_value", "sha256": "sha256_value", }, + "snapshot_type": "snapshot_type_value", "source_disk": "source_disk_value", "source_disk_encryption_key": {}, "source_disk_id": "source_disk_id_value", + "source_snapshot_schedule_policy": "source_snapshot_schedule_policy_value", + "source_snapshot_schedule_policy_id": "source_snapshot_schedule_policy_id_value", "status": "status_value", "storage_bytes": 1403, "storage_bytes_status": "storage_bytes_status_value", @@ -1895,7 +1915,7 @@ def test_create_snapshot_unary_rest_flattened(): project="project_value", region="region_value", disk="disk_value", - snapshot_resource=compute.Snapshot(auto_created=True), + snapshot_resource=compute.Snapshot(architecture="architecture_value"), ) mock_args.update(sample_request) @@ -1934,7 +1954,7 @@ def test_create_snapshot_unary_rest_flattened_error(transport: str = "rest"): project="project_value", region="region_value", disk="disk_value", - snapshot_resource=compute.Snapshot(auto_created=True), + snapshot_resource=compute.Snapshot(architecture="architecture_value"), ) @@ -2581,6 +2601,7 @@ def test_get_rest(request_type): with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. return_value = compute.Disk( + architecture="architecture_value", creation_timestamp="creation_timestamp_value", description="description_value", id=205, @@ -2624,6 +2645,7 @@ def test_get_rest(request_type): # Establish that the response is the type that we expect. assert isinstance(response, compute.Disk) + assert response.architecture == "architecture_value" assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" assert response.id == 205 @@ -3196,6 +3218,7 @@ def test_insert_rest(request_type): # send a request that will satisfy transcoding request_init = {"project": "sample1", "region": "sample2"} request_init["disk_resource"] = { + "architecture": "architecture_value", "creation_timestamp": "creation_timestamp_value", "description": "description_value", "disk_encryption_key": { @@ -3470,6 +3493,7 @@ def test_insert_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "region": "sample2"} request_init["disk_resource"] = { + "architecture": "architecture_value", "creation_timestamp": "creation_timestamp_value", "description": "description_value", "disk_encryption_key": { @@ -3545,7 +3569,7 @@ def test_insert_rest_flattened(): mock_args = dict( project="project_value", region="region_value", - disk_resource=compute.Disk(creation_timestamp="creation_timestamp_value"), + disk_resource=compute.Disk(architecture="architecture_value"), ) mock_args.update(sample_request) @@ -3583,7 +3607,7 @@ def test_insert_rest_flattened_error(transport: str = "rest"): compute.InsertRegionDiskRequest(), project="project_value", region="region_value", - disk_resource=compute.Disk(creation_timestamp="creation_timestamp_value"), + disk_resource=compute.Disk(architecture="architecture_value"), ) @@ -3609,6 +3633,7 @@ def test_insert_unary_rest(request_type): # send a request that will satisfy transcoding request_init = {"project": "sample1", "region": "sample2"} request_init["disk_resource"] = { + "architecture": "architecture_value", "creation_timestamp": "creation_timestamp_value", "description": "description_value", "disk_encryption_key": { @@ -3863,6 +3888,7 @@ def test_insert_unary_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1", "region": "sample2"} request_init["disk_resource"] = { + "architecture": "architecture_value", "creation_timestamp": "creation_timestamp_value", "description": "description_value", "disk_encryption_key": { @@ -3938,7 +3964,7 @@ def test_insert_unary_rest_flattened(): mock_args = dict( project="project_value", region="region_value", - disk_resource=compute.Disk(creation_timestamp="creation_timestamp_value"), + disk_resource=compute.Disk(architecture="architecture_value"), ) mock_args.update(sample_request) @@ -3976,7 +4002,7 @@ def test_insert_unary_rest_flattened_error(transport: str = "rest"): compute.InsertRegionDiskRequest(), project="project_value", region="region_value", - disk_resource=compute.Disk(creation_timestamp="creation_timestamp_value"), + disk_resource=compute.Disk(architecture="architecture_value"), ) diff --git a/tests/unit/gapic/compute_v1/test_region_instances.py b/tests/unit/gapic/compute_v1/test_region_instances.py index d34c359a7..66ec042e1 100644 --- a/tests/unit/gapic/compute_v1/test_region_instances.py +++ b/tests/unit/gapic/compute_v1/test_region_instances.py @@ -594,6 +594,7 @@ def test_bulk_insert_rest(request_type): "description": "description_value", "disks": [ { + "architecture": "architecture_value", "auto_delete": True, "boot": True, "device_name": "device_name_value", @@ -608,6 +609,7 @@ def test_bulk_insert_rest(request_type): "guest_os_features": [{"type_": "type__value"}], "index": 536, "initialize_params": { + "architecture": "architecture_value", "description": "description_value", "disk_name": "disk_name_value", "disk_size_gb": 1261, @@ -647,6 +649,7 @@ def test_bulk_insert_rest(request_type): "accelerator_type": "accelerator_type_value", } ], + "key_revocation_action_type": "key_revocation_action_type_value", "labels": {}, "machine_type": "machine_type_value", "metadata": { @@ -975,6 +978,7 @@ def test_bulk_insert_rest_bad_request( "description": "description_value", "disks": [ { + "architecture": "architecture_value", "auto_delete": True, "boot": True, "device_name": "device_name_value", @@ -989,6 +993,7 @@ def test_bulk_insert_rest_bad_request( "guest_os_features": [{"type_": "type__value"}], "index": 536, "initialize_params": { + "architecture": "architecture_value", "description": "description_value", "disk_name": "disk_name_value", "disk_size_gb": 1261, @@ -1028,6 +1033,7 @@ def test_bulk_insert_rest_bad_request( "accelerator_type": "accelerator_type_value", } ], + "key_revocation_action_type": "key_revocation_action_type_value", "labels": {}, "machine_type": "machine_type_value", "metadata": { @@ -1233,6 +1239,7 @@ def test_bulk_insert_unary_rest(request_type): "description": "description_value", "disks": [ { + "architecture": "architecture_value", "auto_delete": True, "boot": True, "device_name": "device_name_value", @@ -1247,6 +1254,7 @@ def test_bulk_insert_unary_rest(request_type): "guest_os_features": [{"type_": "type__value"}], "index": 536, "initialize_params": { + "architecture": "architecture_value", "description": "description_value", "disk_name": "disk_name_value", "disk_size_gb": 1261, @@ -1286,6 +1294,7 @@ def test_bulk_insert_unary_rest(request_type): "accelerator_type": "accelerator_type_value", } ], + "key_revocation_action_type": "key_revocation_action_type_value", "labels": {}, "machine_type": "machine_type_value", "metadata": { @@ -1592,6 +1601,7 @@ def test_bulk_insert_unary_rest_bad_request( "description": "description_value", "disks": [ { + "architecture": "architecture_value", "auto_delete": True, "boot": True, "device_name": "device_name_value", @@ -1606,6 +1616,7 @@ def test_bulk_insert_unary_rest_bad_request( "guest_os_features": [{"type_": "type__value"}], "index": 536, "initialize_params": { + "architecture": "architecture_value", "description": "description_value", "disk_name": "disk_name_value", "disk_size_gb": 1261, @@ -1645,6 +1656,7 @@ def test_bulk_insert_unary_rest_bad_request( "accelerator_type": "accelerator_type_value", } ], + "key_revocation_action_type": "key_revocation_action_type_value", "labels": {}, "machine_type": "machine_type_value", "metadata": { diff --git a/tests/unit/gapic/compute_v1/test_snapshots.py b/tests/unit/gapic/compute_v1/test_snapshots.py index 45883952b..ea80a0c3c 100644 --- a/tests/unit/gapic/compute_v1/test_snapshots.py +++ b/tests/unit/gapic/compute_v1/test_snapshots.py @@ -1157,8 +1157,10 @@ def test_get_rest(request_type): with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. return_value = compute.Snapshot( + architecture="architecture_value", auto_created=True, chain_name="chain_name_value", + creation_size_bytes=2037, creation_timestamp="creation_timestamp_value", description="description_value", disk_size_gb=1261, @@ -1172,8 +1174,11 @@ def test_get_rest(request_type): name="name_value", satisfies_pzs=True, self_link="self_link_value", + snapshot_type="snapshot_type_value", source_disk="source_disk_value", source_disk_id="source_disk_id_value", + source_snapshot_schedule_policy="source_snapshot_schedule_policy_value", + source_snapshot_schedule_policy_id="source_snapshot_schedule_policy_id_value", status="status_value", storage_bytes=1403, storage_bytes_status="storage_bytes_status_value", @@ -1190,8 +1195,10 @@ def test_get_rest(request_type): # Establish that the response is the type that we expect. assert isinstance(response, compute.Snapshot) + assert response.architecture == "architecture_value" assert response.auto_created is True assert response.chain_name == "chain_name_value" + assert response.creation_size_bytes == 2037 assert response.creation_timestamp == "creation_timestamp_value" assert response.description == "description_value" assert response.disk_size_gb == 1261 @@ -1205,8 +1212,17 @@ def test_get_rest(request_type): assert response.name == "name_value" assert response.satisfies_pzs is True assert response.self_link == "self_link_value" + assert response.snapshot_type == "snapshot_type_value" assert response.source_disk == "source_disk_value" assert response.source_disk_id == "source_disk_id_value" + assert ( + response.source_snapshot_schedule_policy + == "source_snapshot_schedule_policy_value" + ) + assert ( + response.source_snapshot_schedule_policy_id + == "source_snapshot_schedule_policy_id_value" + ) assert response.status == "status_value" assert response.storage_bytes == 1403 assert response.storage_bytes_status == "storage_bytes_status_value" @@ -1730,8 +1746,10 @@ def test_insert_rest(request_type): # send a request that will satisfy transcoding request_init = {"project": "sample1"} request_init["snapshot_resource"] = { + "architecture": "architecture_value", "auto_created": True, "chain_name": "chain_name_value", + "creation_size_bytes": 2037, "creation_timestamp": "creation_timestamp_value", "description": "description_value", "disk_size_gb": 1261, @@ -1753,9 +1771,12 @@ def test_insert_rest(request_type): "rsa_encrypted_key": "rsa_encrypted_key_value", "sha256": "sha256_value", }, + "snapshot_type": "snapshot_type_value", "source_disk": "source_disk_value", "source_disk_encryption_key": {}, "source_disk_id": "source_disk_id_value", + "source_snapshot_schedule_policy": "source_snapshot_schedule_policy_value", + "source_snapshot_schedule_policy_id": "source_snapshot_schedule_policy_id_value", "status": "status_value", "storage_bytes": 1403, "storage_bytes_status": "storage_bytes_status_value", @@ -1975,8 +1996,10 @@ def test_insert_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1"} request_init["snapshot_resource"] = { + "architecture": "architecture_value", "auto_created": True, "chain_name": "chain_name_value", + "creation_size_bytes": 2037, "creation_timestamp": "creation_timestamp_value", "description": "description_value", "disk_size_gb": 1261, @@ -1998,9 +2021,12 @@ def test_insert_rest_bad_request( "rsa_encrypted_key": "rsa_encrypted_key_value", "sha256": "sha256_value", }, + "snapshot_type": "snapshot_type_value", "source_disk": "source_disk_value", "source_disk_encryption_key": {}, "source_disk_id": "source_disk_id_value", + "source_snapshot_schedule_policy": "source_snapshot_schedule_policy_value", + "source_snapshot_schedule_policy_id": "source_snapshot_schedule_policy_id_value", "status": "status_value", "storage_bytes": 1403, "storage_bytes_status": "storage_bytes_status_value", @@ -2037,7 +2063,7 @@ def test_insert_rest_flattened(): # get truthy value for each flattened field mock_args = dict( project="project_value", - snapshot_resource=compute.Snapshot(auto_created=True), + snapshot_resource=compute.Snapshot(architecture="architecture_value"), ) mock_args.update(sample_request) @@ -2074,7 +2100,7 @@ def test_insert_rest_flattened_error(transport: str = "rest"): client.insert( compute.InsertSnapshotRequest(), project="project_value", - snapshot_resource=compute.Snapshot(auto_created=True), + snapshot_resource=compute.Snapshot(architecture="architecture_value"), ) @@ -2100,8 +2126,10 @@ def test_insert_unary_rest(request_type): # send a request that will satisfy transcoding request_init = {"project": "sample1"} request_init["snapshot_resource"] = { + "architecture": "architecture_value", "auto_created": True, "chain_name": "chain_name_value", + "creation_size_bytes": 2037, "creation_timestamp": "creation_timestamp_value", "description": "description_value", "disk_size_gb": 1261, @@ -2123,9 +2151,12 @@ def test_insert_unary_rest(request_type): "rsa_encrypted_key": "rsa_encrypted_key_value", "sha256": "sha256_value", }, + "snapshot_type": "snapshot_type_value", "source_disk": "source_disk_value", "source_disk_encryption_key": {}, "source_disk_id": "source_disk_id_value", + "source_snapshot_schedule_policy": "source_snapshot_schedule_policy_value", + "source_snapshot_schedule_policy_id": "source_snapshot_schedule_policy_id_value", "status": "status_value", "storage_bytes": 1403, "storage_bytes_status": "storage_bytes_status_value", @@ -2323,8 +2354,10 @@ def test_insert_unary_rest_bad_request( # send a request that will satisfy transcoding request_init = {"project": "sample1"} request_init["snapshot_resource"] = { + "architecture": "architecture_value", "auto_created": True, "chain_name": "chain_name_value", + "creation_size_bytes": 2037, "creation_timestamp": "creation_timestamp_value", "description": "description_value", "disk_size_gb": 1261, @@ -2346,9 +2379,12 @@ def test_insert_unary_rest_bad_request( "rsa_encrypted_key": "rsa_encrypted_key_value", "sha256": "sha256_value", }, + "snapshot_type": "snapshot_type_value", "source_disk": "source_disk_value", "source_disk_encryption_key": {}, "source_disk_id": "source_disk_id_value", + "source_snapshot_schedule_policy": "source_snapshot_schedule_policy_value", + "source_snapshot_schedule_policy_id": "source_snapshot_schedule_policy_id_value", "status": "status_value", "storage_bytes": 1403, "storage_bytes_status": "storage_bytes_status_value", @@ -2385,7 +2421,7 @@ def test_insert_unary_rest_flattened(): # get truthy value for each flattened field mock_args = dict( project="project_value", - snapshot_resource=compute.Snapshot(auto_created=True), + snapshot_resource=compute.Snapshot(architecture="architecture_value"), ) mock_args.update(sample_request) @@ -2422,7 +2458,7 @@ def test_insert_unary_rest_flattened_error(transport: str = "rest"): client.insert_unary( compute.InsertSnapshotRequest(), project="project_value", - snapshot_resource=compute.Snapshot(auto_created=True), + snapshot_resource=compute.Snapshot(architecture="architecture_value"), )