diff --git a/librarian.yaml b/librarian.yaml index e5006e85aa71..9bbc855bf06c 100644 --- a/librarian.yaml +++ b/librarian.yaml @@ -16,8 +16,8 @@ version: v0.10.1 repo: googleapis/google-cloud-python sources: googleapis: - commit: 2233f63baf69c2a481f30180045fcf036242781d - sha256: fe0d4bb6d640fa6e0b48aa828c833c458f6835b6643b664062a288995b244c3c + commit: 4ad1b6750926701f94ae8a88525395fd17b42cfe + sha256: 4a54e2829977dea29fb0dde856b182a009873c9f1296e4df3ccb35c62511137b release: ignored_changes: - .repo-metadata.json diff --git a/packages/google-analytics-admin/google/analytics/admin/__init__.py b/packages/google-analytics-admin/google/analytics/admin/__init__.py index c156d83585cd..36e2720d2277 100644 --- a/packages/google-analytics-admin/google/analytics/admin/__init__.py +++ b/packages/google-analytics-admin/google/analytics/admin/__init__.py @@ -144,6 +144,7 @@ GetSKAdNetworkConversionValueSchemaRequest, GetSubpropertyEventFilterRequest, GetSubpropertySyncConfigRequest, + GetUserProvidedDataSettingsRequest, ListAccessBindingsRequest, ListAccessBindingsResponse, ListAccountsRequest, @@ -320,6 +321,7 @@ ServiceLevel, SKAdNetworkConversionValueSchema, SubpropertySyncConfig, + UserProvidedDataSettings, ) from google.analytics.admin_v1alpha.types.subproperty_event_filter import ( SubpropertyEventFilter, @@ -449,6 +451,7 @@ "GetSKAdNetworkConversionValueSchemaRequest", "GetSubpropertyEventFilterRequest", "GetSubpropertySyncConfigRequest", + "GetUserProvidedDataSettingsRequest", "ListAccessBindingsRequest", "ListAccessBindingsResponse", "ListAccountsRequest", @@ -604,6 +607,7 @@ "SearchAds360Link", "SKAdNetworkConversionValueSchema", "SubpropertySyncConfig", + "UserProvidedDataSettings", "ActionType", "ActorType", "ChangeHistoryResourceType", diff --git a/packages/google-analytics-admin/google/analytics/admin_v1alpha/__init__.py b/packages/google-analytics-admin/google/analytics/admin_v1alpha/__init__.py index 52535fd10219..2af82175ac14 100644 --- a/packages/google-analytics-admin/google/analytics/admin_v1alpha/__init__.py +++ b/packages/google-analytics-admin/google/analytics/admin_v1alpha/__init__.py @@ -153,6 +153,7 @@ GetSKAdNetworkConversionValueSchemaRequest, GetSubpropertyEventFilterRequest, GetSubpropertySyncConfigRequest, + GetUserProvidedDataSettingsRequest, ListAccessBindingsRequest, ListAccessBindingsResponse, ListAccountsRequest, @@ -329,6 +330,7 @@ ServiceLevel, SKAdNetworkConversionValueSchema, SubpropertySyncConfig, + UserProvidedDataSettings, ) from .types.subproperty_event_filter import ( SubpropertyEventFilter, @@ -597,6 +599,7 @@ def _get_version(dependency_name): "GetSearchAds360LinkRequest", "GetSubpropertyEventFilterRequest", "GetSubpropertySyncConfigRequest", + "GetUserProvidedDataSettingsRequest", "GlobalSiteTag", "GoogleAdsLink", "GoogleSignalsConsent", @@ -723,4 +726,5 @@ def _get_version(dependency_name): "UpdateSearchAds360LinkRequest", "UpdateSubpropertyEventFilterRequest", "UpdateSubpropertySyncConfigRequest", + "UserProvidedDataSettings", ) diff --git a/packages/google-analytics-admin/google/analytics/admin_v1alpha/gapic_metadata.json b/packages/google-analytics-admin/google/analytics/admin_v1alpha/gapic_metadata.json index d09baf88ee44..85e5a17017d0 100644 --- a/packages/google-analytics-admin/google/analytics/admin_v1alpha/gapic_metadata.json +++ b/packages/google-analytics-admin/google/analytics/admin_v1alpha/gapic_metadata.json @@ -470,6 +470,11 @@ "get_subproperty_sync_config" ] }, + "GetUserProvidedDataSettings": { + "methods": [ + "get_user_provided_data_settings" + ] + }, "ListAccessBindings": { "methods": [ "list_access_bindings" @@ -1245,6 +1250,11 @@ "get_subproperty_sync_config" ] }, + "GetUserProvidedDataSettings": { + "methods": [ + "get_user_provided_data_settings" + ] + }, "ListAccessBindings": { "methods": [ "list_access_bindings" @@ -2020,6 +2030,11 @@ "get_subproperty_sync_config" ] }, + "GetUserProvidedDataSettings": { + "methods": [ + "get_user_provided_data_settings" + ] + }, "ListAccessBindings": { "methods": [ "list_access_bindings" diff --git a/packages/google-analytics-admin/google/analytics/admin_v1alpha/services/analytics_admin_service/async_client.py b/packages/google-analytics-admin/google/analytics/admin_v1alpha/services/analytics_admin_service/async_client.py index d54c7760ccb1..571f6ec4fbc5 100644 --- a/packages/google-analytics-admin/google/analytics/admin_v1alpha/services/analytics_admin_service/async_client.py +++ b/packages/google-analytics-admin/google/analytics/admin_v1alpha/services/analytics_admin_service/async_client.py @@ -288,6 +288,12 @@ class AnalyticsAdminServiceAsyncClient: parse_subproperty_sync_config_path = staticmethod( AnalyticsAdminServiceClient.parse_subproperty_sync_config_path ) + user_provided_data_settings_path = staticmethod( + AnalyticsAdminServiceClient.user_provided_data_settings_path + ) + parse_user_provided_data_settings_path = staticmethod( + AnalyticsAdminServiceClient.parse_user_provided_data_settings_path + ) common_billing_account_path = staticmethod( AnalyticsAdminServiceClient.common_billing_account_path ) @@ -14672,8 +14678,8 @@ async def get_reporting_identity_settings( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> resources.ReportingIdentitySettings: - r"""Returns the singleton data retention settings for - this property. + r"""Returns the reporting identity settings for this + property. Args: request (Optional[Union[google.analytics.admin_v1alpha.types.GetReportingIdentitySettingsRequest, dict]]): @@ -14753,6 +14759,97 @@ async def get_reporting_identity_settings( # Done; return the response. return response + async def get_user_provided_data_settings( + self, + request: Optional[ + Union[analytics_admin.GetUserProvidedDataSettingsRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> resources.UserProvidedDataSettings: + r"""Looks up settings related to user-provided data for a + property. + + Args: + request (Optional[Union[google.analytics.admin_v1alpha.types.GetUserProvidedDataSettingsRequest, dict]]): + The request object. Request message for + GetUserProvidedDataSettings RPC + name (:class:`str`): + Required. The name of the user + provided data settings to retrieve. + Format: + properties/{property}/userProvidedDataSettings + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.analytics.admin_v1alpha.types.UserProvidedDataSettings: + Configuration for user-provided data + collection. This is a singleton resource + for a Google Analytics property. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, analytics_admin.GetUserProvidedDataSettingsRequest): + request = analytics_admin.GetUserProvidedDataSettingsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.get_user_provided_data_settings + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + async def __aenter__(self) -> "AnalyticsAdminServiceAsyncClient": return self diff --git a/packages/google-analytics-admin/google/analytics/admin_v1alpha/services/analytics_admin_service/client.py b/packages/google-analytics-admin/google/analytics/admin_v1alpha/services/analytics_admin_service/client.py index f9da1f4e1341..526b657fa9d3 100644 --- a/packages/google-analytics-admin/google/analytics/admin_v1alpha/services/analytics_admin_service/client.py +++ b/packages/google-analytics-admin/google/analytics/admin_v1alpha/services/analytics_admin_service/client.py @@ -944,6 +944,21 @@ def parse_subproperty_sync_config_path(path: str) -> Dict[str, str]: ) return m.groupdict() if m else {} + @staticmethod + def user_provided_data_settings_path( + property: str, + ) -> str: + """Returns a fully-qualified user_provided_data_settings string.""" + return "properties/{property}/userProvidedDataSettings".format( + property=property, + ) + + @staticmethod + def parse_user_provided_data_settings_path(path: str) -> Dict[str, str]: + """Parses a user_provided_data_settings path into its component segments.""" + m = re.match(r"^properties/(?P.+?)/userProvidedDataSettings$", path) + return m.groupdict() if m else {} + @staticmethod def common_billing_account_path( billing_account: str, @@ -15260,8 +15275,8 @@ def get_reporting_identity_settings( timeout: Union[float, object] = gapic_v1.method.DEFAULT, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> resources.ReportingIdentitySettings: - r"""Returns the singleton data retention settings for - this property. + r"""Returns the reporting identity settings for this + property. Args: request (Union[google.analytics.admin_v1alpha.types.GetReportingIdentitySettingsRequest, dict]): @@ -15340,6 +15355,96 @@ def get_reporting_identity_settings( # Done; return the response. return response + def get_user_provided_data_settings( + self, + request: Optional[ + Union[analytics_admin.GetUserProvidedDataSettingsRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> resources.UserProvidedDataSettings: + r"""Looks up settings related to user-provided data for a + property. + + Args: + request (Union[google.analytics.admin_v1alpha.types.GetUserProvidedDataSettingsRequest, dict]): + The request object. Request message for + GetUserProvidedDataSettings RPC + name (str): + Required. The name of the user + provided data settings to retrieve. + Format: + properties/{property}/userProvidedDataSettings + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.analytics.admin_v1alpha.types.UserProvidedDataSettings: + Configuration for user-provided data + collection. This is a singleton resource + for a Google Analytics property. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, analytics_admin.GetUserProvidedDataSettingsRequest): + request = analytics_admin.GetUserProvidedDataSettingsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.get_user_provided_data_settings + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + def __enter__(self) -> "AnalyticsAdminServiceClient": return self diff --git a/packages/google-analytics-admin/google/analytics/admin_v1alpha/services/analytics_admin_service/transports/base.py b/packages/google-analytics-admin/google/analytics/admin_v1alpha/services/analytics_admin_service/transports/base.py index 18f5d3be868b..be70c5f831d6 100644 --- a/packages/google-analytics-admin/google/analytics/admin_v1alpha/services/analytics_admin_service/transports/base.py +++ b/packages/google-analytics-admin/google/analytics/admin_v1alpha/services/analytics_admin_service/transports/base.py @@ -934,6 +934,11 @@ def _prep_wrapped_messages(self, client_info): default_timeout=None, client_info=client_info, ), + self.get_user_provided_data_settings: gapic_v1.method.wrap_method( + self.get_user_provided_data_settings, + default_timeout=None, + client_info=client_info, + ), } def close(self): @@ -2568,6 +2573,18 @@ def get_reporting_identity_settings( ]: raise NotImplementedError() + @property + def get_user_provided_data_settings( + self, + ) -> Callable[ + [analytics_admin.GetUserProvidedDataSettingsRequest], + Union[ + resources.UserProvidedDataSettings, + Awaitable[resources.UserProvidedDataSettings], + ], + ]: + raise NotImplementedError() + @property def kind(self) -> str: raise NotImplementedError() diff --git a/packages/google-analytics-admin/google/analytics/admin_v1alpha/services/analytics_admin_service/transports/grpc.py b/packages/google-analytics-admin/google/analytics/admin_v1alpha/services/analytics_admin_service/transports/grpc.py index f387123f4af6..3cffac6e26fa 100644 --- a/packages/google-analytics-admin/google/analytics/admin_v1alpha/services/analytics_admin_service/transports/grpc.py +++ b/packages/google-analytics-admin/google/analytics/admin_v1alpha/services/analytics_admin_service/transports/grpc.py @@ -4890,8 +4890,8 @@ def get_reporting_identity_settings( r"""Return a callable for the get reporting identity settings method over gRPC. - Returns the singleton data retention settings for - this property. + Returns the reporting identity settings for this + property. Returns: Callable[[~.GetReportingIdentitySettingsRequest], @@ -4913,6 +4913,39 @@ def get_reporting_identity_settings( ) return self._stubs["get_reporting_identity_settings"] + @property + def get_user_provided_data_settings( + self, + ) -> Callable[ + [analytics_admin.GetUserProvidedDataSettingsRequest], + resources.UserProvidedDataSettings, + ]: + r"""Return a callable for the get user provided data + settings method over gRPC. + + Looks up settings related to user-provided data for a + property. + + Returns: + Callable[[~.GetUserProvidedDataSettingsRequest], + ~.UserProvidedDataSettings]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_user_provided_data_settings" not in self._stubs: + self._stubs["get_user_provided_data_settings"] = ( + self._logged_channel.unary_unary( + "/google.analytics.admin.v1alpha.AnalyticsAdminService/GetUserProvidedDataSettings", + request_serializer=analytics_admin.GetUserProvidedDataSettingsRequest.serialize, + response_deserializer=resources.UserProvidedDataSettings.deserialize, + ) + ) + return self._stubs["get_user_provided_data_settings"] + def close(self): self._logged_channel.close() diff --git a/packages/google-analytics-admin/google/analytics/admin_v1alpha/services/analytics_admin_service/transports/grpc_asyncio.py b/packages/google-analytics-admin/google/analytics/admin_v1alpha/services/analytics_admin_service/transports/grpc_asyncio.py index ca63a0366a32..e144eb3ee828 100644 --- a/packages/google-analytics-admin/google/analytics/admin_v1alpha/services/analytics_admin_service/transports/grpc_asyncio.py +++ b/packages/google-analytics-admin/google/analytics/admin_v1alpha/services/analytics_admin_service/transports/grpc_asyncio.py @@ -5004,8 +5004,8 @@ def get_reporting_identity_settings( r"""Return a callable for the get reporting identity settings method over gRPC. - Returns the singleton data retention settings for - this property. + Returns the reporting identity settings for this + property. Returns: Callable[[~.GetReportingIdentitySettingsRequest], @@ -5027,6 +5027,39 @@ def get_reporting_identity_settings( ) return self._stubs["get_reporting_identity_settings"] + @property + def get_user_provided_data_settings( + self, + ) -> Callable[ + [analytics_admin.GetUserProvidedDataSettingsRequest], + Awaitable[resources.UserProvidedDataSettings], + ]: + r"""Return a callable for the get user provided data + settings method over gRPC. + + Looks up settings related to user-provided data for a + property. + + Returns: + Callable[[~.GetUserProvidedDataSettingsRequest], + Awaitable[~.UserProvidedDataSettings]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_user_provided_data_settings" not in self._stubs: + self._stubs["get_user_provided_data_settings"] = ( + self._logged_channel.unary_unary( + "/google.analytics.admin.v1alpha.AnalyticsAdminService/GetUserProvidedDataSettings", + request_serializer=analytics_admin.GetUserProvidedDataSettingsRequest.serialize, + response_deserializer=resources.UserProvidedDataSettings.deserialize, + ) + ) + return self._stubs["get_user_provided_data_settings"] + def _prep_wrapped_messages(self, client_info): """Precompute the wrapped methods, overriding the base class method to use async wrappers.""" self._wrapped_methods = { @@ -5800,6 +5833,11 @@ def _prep_wrapped_messages(self, client_info): default_timeout=None, client_info=client_info, ), + self.get_user_provided_data_settings: self._wrap_method( + self.get_user_provided_data_settings, + default_timeout=None, + client_info=client_info, + ), } def _wrap_method(self, func, *args, **kwargs): diff --git a/packages/google-analytics-admin/google/analytics/admin_v1alpha/services/analytics_admin_service/transports/rest.py b/packages/google-analytics-admin/google/analytics/admin_v1alpha/services/analytics_admin_service/transports/rest.py index 74089bdca563..d681be603707 100644 --- a/packages/google-analytics-admin/google/analytics/admin_v1alpha/services/analytics_admin_service/transports/rest.py +++ b/packages/google-analytics-admin/google/analytics/admin_v1alpha/services/analytics_admin_service/transports/rest.py @@ -721,6 +721,14 @@ def post_get_subproperty_sync_config(self, response): logging.log(f"Received response: {response}") return response + def pre_get_user_provided_data_settings(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_user_provided_data_settings(self, response): + logging.log(f"Received response: {response}") + return response + def pre_list_access_bindings(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -4888,6 +4896,57 @@ def post_get_subproperty_sync_config_with_metadata( """ return response, metadata + def pre_get_user_provided_data_settings( + self, + request: analytics_admin.GetUserProvidedDataSettingsRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + analytics_admin.GetUserProvidedDataSettingsRequest, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Pre-rpc interceptor for get_user_provided_data_settings + + Override in a subclass to manipulate the request or metadata + before they are sent to the AnalyticsAdminService server. + """ + return request, metadata + + def post_get_user_provided_data_settings( + self, response: resources.UserProvidedDataSettings + ) -> resources.UserProvidedDataSettings: + """Post-rpc interceptor for get_user_provided_data_settings + + DEPRECATED. Please use the `post_get_user_provided_data_settings_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the AnalyticsAdminService server but before + it is returned to user code. This `post_get_user_provided_data_settings` interceptor runs + before the `post_get_user_provided_data_settings_with_metadata` interceptor. + """ + return response + + def post_get_user_provided_data_settings_with_metadata( + self, + response: resources.UserProvidedDataSettings, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + resources.UserProvidedDataSettings, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for get_user_provided_data_settings + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AnalyticsAdminService server but before it is returned to user code. + + We recommend only using this `post_get_user_provided_data_settings_with_metadata` + interceptor in new development instead of the `post_get_user_provided_data_settings` interceptor. + When both interceptors are used, this `post_get_user_provided_data_settings_with_metadata` interceptor runs after the + `post_get_user_provided_data_settings` interceptor. The (possibly modified) response returned by + `post_get_user_provided_data_settings` will be passed to + `post_get_user_provided_data_settings_with_metadata`. + """ + return response, metadata + def pre_list_access_bindings( self, request: analytics_admin.ListAccessBindingsRequest, @@ -21260,6 +21319,162 @@ def __call__( ) return resp + class _GetUserProvidedDataSettings( + _BaseAnalyticsAdminServiceRestTransport._BaseGetUserProvidedDataSettings, + AnalyticsAdminServiceRestStub, + ): + def __hash__(self): + return hash( + "AnalyticsAdminServiceRestTransport.GetUserProvidedDataSettings" + ) + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: analytics_admin.GetUserProvidedDataSettingsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> resources.UserProvidedDataSettings: + r"""Call the get user provided data + settings method over HTTP. + + Args: + request (~.analytics_admin.GetUserProvidedDataSettingsRequest): + The request object. Request message for + GetUserProvidedDataSettings RPC + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.resources.UserProvidedDataSettings: + Configuration for user-provided data + collection. This is a singleton resource + for a Google Analytics property. + + """ + + http_options = _BaseAnalyticsAdminServiceRestTransport._BaseGetUserProvidedDataSettings._get_http_options() + + request, metadata = self._interceptor.pre_get_user_provided_data_settings( + request, metadata + ) + transcoded_request = _BaseAnalyticsAdminServiceRestTransport._BaseGetUserProvidedDataSettings._get_transcoded_request( + http_options, request + ) + + # Jsonify the query params + query_params = _BaseAnalyticsAdminServiceRestTransport._BaseGetUserProvidedDataSettings._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.analytics.admin_v1alpha.AnalyticsAdminServiceClient.GetUserProvidedDataSettings", + extra={ + "serviceName": "google.analytics.admin.v1alpha.AnalyticsAdminService", + "rpcName": "GetUserProvidedDataSettings", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = AnalyticsAdminServiceRestTransport._GetUserProvidedDataSettings._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = resources.UserProvidedDataSettings() + pb_resp = resources.UserProvidedDataSettings.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_get_user_provided_data_settings(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = ( + self._interceptor.post_get_user_provided_data_settings_with_metadata( + resp, response_metadata + ) + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = resources.UserProvidedDataSettings.to_json( + response + ) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.analytics.admin_v1alpha.AnalyticsAdminServiceClient.get_user_provided_data_settings", + extra={ + "serviceName": "google.analytics.admin.v1alpha.AnalyticsAdminService", + "rpcName": "GetUserProvidedDataSettings", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + class _ListAccessBindings( _BaseAnalyticsAdminServiceRestTransport._BaseListAccessBindings, AnalyticsAdminServiceRestStub, @@ -31850,6 +32065,19 @@ def get_subproperty_sync_config( self._session, self._host, self._interceptor ) # type: ignore + @property + def get_user_provided_data_settings( + self, + ) -> Callable[ + [analytics_admin.GetUserProvidedDataSettingsRequest], + resources.UserProvidedDataSettings, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetUserProvidedDataSettings( + self._session, self._host, self._interceptor + ) # type: ignore + @property def list_access_bindings( self, diff --git a/packages/google-analytics-admin/google/analytics/admin_v1alpha/services/analytics_admin_service/transports/rest_base.py b/packages/google-analytics-admin/google/analytics/admin_v1alpha/services/analytics_admin_service/transports/rest_base.py index 737034dadcec..c1480f36a745 100644 --- a/packages/google-analytics-admin/google/analytics/admin_v1alpha/services/analytics_admin_service/transports/rest_base.py +++ b/packages/google-analytics-admin/google/analytics/admin_v1alpha/services/analytics_admin_service/transports/rest_base.py @@ -4866,6 +4866,53 @@ def _get_query_params_json(transcoded_request): query_params["$alt"] = "json;enum-encoding=int" return query_params + class _BaseGetUserProvidedDataSettings: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1alpha/{name=properties/*/userProvidedDataSettings}", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = analytics_admin.GetUserProvidedDataSettingsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseAnalyticsAdminServiceRestTransport._BaseGetUserProvidedDataSettings._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + class _BaseListAccessBindings: def __hash__(self): # pragma: NO COVER return NotImplementedError("__hash__ must be implemented.") diff --git a/packages/google-analytics-admin/google/analytics/admin_v1alpha/types/__init__.py b/packages/google-analytics-admin/google/analytics/admin_v1alpha/types/__init__.py index 5f5771b61545..a4b015635d59 100644 --- a/packages/google-analytics-admin/google/analytics/admin_v1alpha/types/__init__.py +++ b/packages/google-analytics-admin/google/analytics/admin_v1alpha/types/__init__.py @@ -133,6 +133,7 @@ GetSKAdNetworkConversionValueSchemaRequest, GetSubpropertyEventFilterRequest, GetSubpropertySyncConfigRequest, + GetUserProvidedDataSettingsRequest, ListAccessBindingsRequest, ListAccessBindingsResponse, ListAccountsRequest, @@ -309,6 +310,7 @@ ServiceLevel, SKAdNetworkConversionValueSchema, SubpropertySyncConfig, + UserProvidedDataSettings, ) from .subproperty_event_filter import ( SubpropertyEventFilter, @@ -436,6 +438,7 @@ "GetSKAdNetworkConversionValueSchemaRequest", "GetSubpropertyEventFilterRequest", "GetSubpropertySyncConfigRequest", + "GetUserProvidedDataSettingsRequest", "ListAccessBindingsRequest", "ListAccessBindingsResponse", "ListAccountsRequest", @@ -591,6 +594,7 @@ "SearchAds360Link", "SKAdNetworkConversionValueSchema", "SubpropertySyncConfig", + "UserProvidedDataSettings", "ActionType", "ActorType", "ChangeHistoryResourceType", diff --git a/packages/google-analytics-admin/google/analytics/admin_v1alpha/types/analytics_admin.py b/packages/google-analytics-admin/google/analytics/admin_v1alpha/types/analytics_admin.py index a56b944eae24..71d58050fe82 100644 --- a/packages/google-analytics-admin/google/analytics/admin_v1alpha/types/analytics_admin.py +++ b/packages/google-analytics-admin/google/analytics/admin_v1alpha/types/analytics_admin.py @@ -231,6 +231,7 @@ "ListSubpropertySyncConfigsResponse", "UpdateSubpropertySyncConfigRequest", "GetReportingIdentitySettingsRequest", + "GetUserProvidedDataSettingsRequest", }, ) @@ -483,18 +484,18 @@ class ListAccountsRequest(proto.Message): Attributes: page_size (int): - The maximum number of resources to return. - The service may return fewer than this value, - even if there are additional pages. If + Optional. The maximum number of resources to + return. The service may return fewer than this + value, even if there are additional pages. If unspecified, at most 50 resources will be returned. The maximum value is 200; (higher values will be coerced to the maximum) page_token (str): - A page token, received from a previous ``ListAccounts`` - call. Provide this to retrieve the subsequent page. When - paginating, all other parameters provided to - ``ListAccounts`` must match the call that provided the page - token. + Optional. A page token, received from a previous + ``ListAccounts`` call. Provide this to retrieve the + subsequent page. When paginating, all other parameters + provided to ``ListAccounts`` must match the call that + provided the page token. show_deleted (bool): Whether to include soft-deleted (ie: "trashed") Accounts in the results. Accounts can @@ -660,18 +661,18 @@ class ListPropertiesRequest(proto.Message): | firebase_project:project-id | The firebase project with id: project-id. | | firebase_project:123 | The firebase project with number: 123. | page_size (int): - The maximum number of resources to return. - The service may return fewer than this value, - even if there are additional pages. If + Optional. The maximum number of resources to + return. The service may return fewer than this + value, even if there are additional pages. If unspecified, at most 50 resources will be returned. The maximum value is 200; (higher values will be coerced to the maximum) page_token (str): - A page token, received from a previous ``ListProperties`` - call. Provide this to retrieve the subsequent page. When - paginating, all other parameters provided to - ``ListProperties`` must match the call that provided the - page token. + Optional. A page token, received from a previous + ``ListProperties`` call. Provide this to retrieve the + subsequent page. When paginating, all other parameters + provided to ``ListProperties`` must match the call that + provided the page token. show_deleted (bool): Whether to include soft-deleted (ie: "trashed") Properties in the results. Properties @@ -832,18 +833,18 @@ class ListFirebaseLinksRequest(proto.Message): Example: ``properties/1234`` page_size (int): - The maximum number of resources to return. - The service may return fewer than this value, - even if there are additional pages. If + Optional. The maximum number of resources to + return. The service may return fewer than this + value, even if there are additional pages. If unspecified, at most 50 resources will be returned. The maximum value is 200; (higher values will be coerced to the maximum) page_token (str): - A page token, received from a previous ``ListFirebaseLinks`` - call. Provide this to retrieve the subsequent page. When - paginating, all other parameters provided to - ``ListFirebaseLinks`` must match the call that provided the - page token. + Optional. A page token, received from a previous + ``ListFirebaseLinks`` call. Provide this to retrieve the + subsequent page. When paginating, all other parameters + provided to ``ListFirebaseLinks`` must match the call that + provided the page token. """ parent: str = proto.Field( @@ -975,12 +976,12 @@ class ListGoogleAdsLinksRequest(proto.Message): parent (str): Required. Example format: properties/1234 page_size (int): - The maximum number of resources to return. - If unspecified, at most 50 resources will be - returned. The maximum value is 200 (higher - values will be coerced to the maximum). + Optional. The maximum number of resources to + return. If unspecified, at most 50 resources + will be returned. The maximum value is 200 + (higher values will be coerced to the maximum). page_token (str): - A page token, received from a previous + Optional. A page token, received from a previous ``ListGoogleAdsLinks`` call. Provide this to retrieve the subsequent page. @@ -1052,15 +1053,15 @@ class ListAccountSummariesRequest(proto.Message): Attributes: page_size (int): - The maximum number of AccountSummary - resources to return. The service may return - fewer than this value, even if there are - additional pages. If unspecified, at most 50 + Optional. The maximum number of + AccountSummary resources to return. The service + may return fewer than this value, even if there + are additional pages. If unspecified, at most 50 resources will be returned. The maximum value is 200; (higher values will be coerced to the maximum) page_token (str): - A page token, received from a previous + Optional. A page token, received from a previous ``ListAccountSummaries`` call. Provide this to retrieve the subsequent page. When paginating, all other parameters provided to ``ListAccountSummaries`` must match the call @@ -1357,12 +1358,12 @@ class ListMeasurementProtocolSecretsRequest(proto.Message): properties/{property}/dataStreams/{dataStream}/measurementProtocolSecrets page_size (int): - The maximum number of resources to return. - If unspecified, at most 10 resources will be - returned. The maximum value is 10. Higher values - will be coerced to the maximum. + Optional. The maximum number of resources to + return. If unspecified, at most 10 resources + will be returned. The maximum value is 10. + Higher values will be coerced to the maximum. page_token (str): - A page token, received from a previous + Optional. A page token, received from a previous ``ListMeasurementProtocolSecrets`` call. Provide this to retrieve the subsequent page. When paginating, all other parameters provided to ``ListMeasurementProtocolSecrets`` @@ -1509,14 +1510,14 @@ class ListSKAdNetworkConversionValueSchemasRequest(proto.Message): Format: properties/{property_id}/dataStreams/{dataStream} Example: properties/1234/dataStreams/5678 page_size (int): - The maximum number of resources to return. - The service may return fewer than this value, - even if there are additional pages. If + Optional. The maximum number of resources to + return. The service may return fewer than this + value, even if there are additional pages. If unspecified, at most 50 resources will be returned. The maximum value is 200; (higher values will be coerced to the maximum) page_token (str): - A page token, received from a previous + Optional. A page token, received from a previous ``ListSKAdNetworkConversionValueSchemas`` call. Provide this to retrieve the subsequent page. When paginating, all other parameters provided to @@ -1704,12 +1705,12 @@ class ListConversionEventsRequest(proto.Message): Required. The resource name of the parent property. Example: 'properties/123' page_size (int): - The maximum number of resources to return. - If unspecified, at most 50 resources will be - returned. The maximum value is 200; (higher - values will be coerced to the maximum) + Optional. The maximum number of resources to + return. If unspecified, at most 50 resources + will be returned. The maximum value is 200; + (higher values will be coerced to the maximum) page_token (str): - A page token, received from a previous + Optional. A page token, received from a previous ``ListConversionEvents`` call. Provide this to retrieve the subsequent page. When paginating, all other parameters provided to ``ListConversionEvents`` must match the call @@ -1846,16 +1847,16 @@ class ListKeyEventsRequest(proto.Message): Required. The resource name of the parent property. Example: 'properties/123' page_size (int): - The maximum number of resources to return. - If unspecified, at most 50 resources will be - returned. The maximum value is 200; (higher - values will be coerced to the maximum) + Optional. The maximum number of resources to + return. If unspecified, at most 50 resources + will be returned. The maximum value is 200; + (higher values will be coerced to the maximum) page_token (str): - A page token, received from a previous ``ListKeyEvents`` - call. Provide this to retrieve the subsequent page. When - paginating, all other parameters provided to - ``ListKeyEvents`` must match the call that provided the page - token. + Optional. A page token, received from a previous + ``ListKeyEvents`` call. Provide this to retrieve the + subsequent page. When paginating, all other parameters + provided to ``ListKeyEvents`` must match the call that + provided the page token. """ parent: str = proto.Field( @@ -2421,12 +2422,12 @@ class ListCustomDimensionsRequest(proto.Message): parent (str): Required. Example format: properties/1234 page_size (int): - The maximum number of resources to return. - If unspecified, at most 50 resources will be - returned. The maximum value is 200 (higher - values will be coerced to the maximum). + Optional. The maximum number of resources to + return. If unspecified, at most 50 resources + will be returned. The maximum value is 200 + (higher values will be coerced to the maximum). page_token (str): - A page token, received from a previous + Optional. A page token, received from a previous ``ListCustomDimensions`` call. Provide this to retrieve the subsequent page. @@ -5234,4 +5235,20 @@ class GetReportingIdentitySettingsRequest(proto.Message): ) +class GetUserProvidedDataSettingsRequest(proto.Message): + r"""Request message for GetUserProvidedDataSettings RPC + + Attributes: + name (str): + Required. The name of the user provided data + settings to retrieve. Format: + properties/{property}/userProvidedDataSettings + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-analytics-admin/google/analytics/admin_v1alpha/types/resources.py b/packages/google-analytics-admin/google/analytics/admin_v1alpha/types/resources.py index 9d50d4498249..52f270c530a3 100644 --- a/packages/google-analytics-admin/google/analytics/admin_v1alpha/types/resources.py +++ b/packages/google-analytics-admin/google/analytics/admin_v1alpha/types/resources.py @@ -80,6 +80,7 @@ "ReportingDataAnnotation", "SubpropertySyncConfig", "ReportingIdentitySettings", + "UserProvidedDataSettings", }, ) @@ -300,6 +301,8 @@ class ChangeHistoryResourceType(proto.Enum): SubpropertySyncConfig resource REPORTING_IDENTITY_SETTINGS (34): ReportingIdentitySettings resource + USER_PROVIDED_DATA_SETTINGS (35): + UserProvidedDataSettings resource """ CHANGE_HISTORY_RESOURCE_TYPE_UNSPECIFIED = 0 @@ -332,6 +335,7 @@ class ChangeHistoryResourceType(proto.Enum): REPORTING_DATA_ANNOTATION = 32 SUBPROPERTY_SYNC_CONFIG = 33 REPORTING_IDENTITY_SETTINGS = 34 + USER_PROVIDED_DATA_SETTINGS = 35 class GoogleSignalsState(proto.Enum): @@ -484,7 +488,7 @@ class Account(proto.Message): Attributes: name (str): - Output only. Resource name of this account. + Identifier. Resource name of this account. Format: accounts/{account} Example: "accounts/100". create_time (google.protobuf.timestamp_pb2.Timestamp): @@ -548,7 +552,7 @@ class Property(proto.Message): Attributes: name (str): - Output only. Resource name of this property. Format: + Identifier. Resource name of this property. Format: properties/{property_id} Example: "properties/1000". property_type (google.analytics.admin_v1alpha.types.PropertyType): Immutable. The property type for this Property resource. @@ -697,7 +701,7 @@ class DataStream(proto.Message): This field is a member of `oneof`_ ``stream_data``. name (str): - Output only. Resource name of this Data Stream. Format: + Identifier. Resource name of this Data Stream. Format: properties/{property_id}/dataStreams/{stream_id} Example: "properties/1000/dataStreams/2000". type_ (google.analytics.admin_v1alpha.types.DataStream.DataStreamType): @@ -862,7 +866,7 @@ class FirebaseLink(proto.Message): Attributes: name (str): - Output only. Example format: + Identifier. Example format: properties/1234/firebaseLinks/5678 project (str): Immutable. Firebase project resource name. When creating a @@ -899,7 +903,7 @@ class GlobalSiteTag(proto.Message): Attributes: name (str): - Output only. Resource name for this GlobalSiteTag resource. + Identifier. Resource name for this GlobalSiteTag resource. Format: properties/{property_id}/dataStreams/{stream_id}/globalSiteTag Example: "properties/123/dataStreams/456/globalSiteTag". @@ -925,7 +929,7 @@ class GoogleAdsLink(proto.Message): Attributes: name (str): - Output only. Format: + Identifier. Format: properties/{propertyId}/googleAdsLinks/{googleAdsLinkId} @@ -996,7 +1000,7 @@ class DataSharingSettings(proto.Message): Attributes: name (str): - Output only. Resource name. + Identifier. Resource name. Format: accounts/{account}/dataSharingSettings Example: "accounts/1000/dataSharingSettings". sharing_with_google_support_enabled (bool): @@ -1077,7 +1081,7 @@ class AccountSummary(proto.Message): Attributes: name (str): - Resource name for this account summary. Format: + Identifier. Resource name for this account summary. Format: accountSummaries/{account_id} Example: "accountSummaries/1000". account (str): @@ -1157,7 +1161,7 @@ class MeasurementProtocolSecret(proto.Message): Attributes: name (str): - Output only. Resource name of this secret. + Identifier. Resource name of this secret. This secret may be a child of any type of stream. Format: @@ -1191,9 +1195,9 @@ class SKAdNetworkConversionValueSchema(proto.Message): Attributes: name (str): - Output only. Resource name of the schema. - This will be child of ONLY an iOS stream, and - there can be at most one such child under an iOS + Identifier. Resource name of the schema. This + will be child of ONLY an iOS stream, and there + can be at most one such child under an iOS stream. Format: properties/{property}/dataStreams/{dataStream}/sKAdNetworkConversionValueSchema @@ -1664,6 +1668,11 @@ class ChangeHistoryResource(proto.Message): A snapshot of a ReportingIdentitySettings resource in change history. + This field is a member of `oneof`_ ``resource``. + user_provided_data_settings (google.analytics.admin_v1alpha.types.UserProvidedDataSettings): + A snapshot of a UserProvidedDataSettings + resource in change history. + This field is a member of `oneof`_ ``resource``. """ @@ -1845,6 +1854,12 @@ class ChangeHistoryResource(proto.Message): oneof="resource", message="ReportingIdentitySettings", ) + user_provided_data_settings: "UserProvidedDataSettings" = proto.Field( + proto.MESSAGE, + number=35, + oneof="resource", + message="UserProvidedDataSettings", + ) resource: str = proto.Field( proto.STRING, @@ -1873,7 +1888,7 @@ class DisplayVideo360AdvertiserLink(proto.Message): Attributes: name (str): - Output only. The resource name for this + Identifier. The resource name for this DisplayVideo360AdvertiserLink resource. Format: properties/{propertyId}/displayVideo360AdvertiserLinks/{linkId} @@ -1945,7 +1960,7 @@ class DisplayVideo360AdvertiserLinkProposal(proto.Message): Attributes: name (str): - Output only. The resource name for this + Identifier. The resource name for this DisplayVideo360AdvertiserLinkProposal resource. Format: @@ -2032,7 +2047,7 @@ class SearchAds360Link(proto.Message): Attributes: name (str): - Output only. The resource name for this + Identifier. The resource name for this SearchAds360Link resource. Format: properties/{propertyId}/searchAds360Links/{linkId} @@ -2140,7 +2155,7 @@ class ConversionEvent(proto.Message): Attributes: name (str): - Output only. Resource name of this conversion event. Format: + Identifier. Resource name of this conversion event. Format: properties/{property}/conversionEvents/{conversion_event} event_name (str): Immutable. The event name for this conversion @@ -2410,7 +2425,7 @@ class CustomDimension(proto.Message): Attributes: name (str): - Output only. Resource name for this + Identifier. Resource name for this CustomDimension resource. Format: properties/{property}/customDimensions/{customDimension} parameter_name (str): @@ -2502,7 +2517,7 @@ class CustomMetric(proto.Message): Attributes: name (str): - Output only. Resource name for this + Identifier. Resource name for this CustomMetric resource. Format: properties/{property}/customMetrics/{customMetric} parameter_name (str): @@ -2649,8 +2664,7 @@ class CalculatedMetric(proto.Message): Attributes: name (str): - Output only. Resource name for this CalculatedMetric. - Format: + Identifier. Resource name for this CalculatedMetric. Format: 'properties/{property_id}/calculatedMetrics/{calculated_metric_id}' description (str): Optional. Description for this calculated @@ -2790,7 +2804,7 @@ class DataRetentionSettings(proto.Message): Attributes: name (str): - Output only. Resource name for this + Identifier. Resource name for this DataRetentionSetting resource. Format: properties/{property}/dataRetentionSettings event_data_retention (google.analytics.admin_v1alpha.types.DataRetentionSettings.RetentionDuration): @@ -3609,4 +3623,39 @@ class ReportingIdentity(proto.Enum): ) +class UserProvidedDataSettings(proto.Message): + r"""Configuration for user-provided data collection. This is a + singleton resource for a Google Analytics property. + + Attributes: + name (str): + Identifier. Resource name of this setting. + Format: + properties/{property}/userProvidedDataSettings + Example: + "properties/1000/userProvidedDataSettings". + user_provided_data_collection_enabled (bool): + Optional. Whether this property accepts + user-provided data sent to it. + automatically_detected_data_collection_enabled (bool): + Optional. Whether this property allows a Google Tag to + automatically collect user-provided data from your website. + This setting only takes effect if + ``user_provided_data_collection_enabled`` is also true. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + user_provided_data_collection_enabled: bool = proto.Field( + proto.BOOL, + number=2, + ) + automatically_detected_data_collection_enabled: bool = proto.Field( + proto.BOOL, + number=3, + ) + + __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-analytics-admin/tests/unit/gapic/admin_v1alpha/test_analytics_admin_service.py b/packages/google-analytics-admin/tests/unit/gapic/admin_v1alpha/test_analytics_admin_service.py index 633bcc8c1c3b..8fe63f02a96a 100644 --- a/packages/google-analytics-admin/tests/unit/gapic/admin_v1alpha/test_analytics_admin_service.py +++ b/packages/google-analytics-admin/tests/unit/gapic/admin_v1alpha/test_analytics_admin_service.py @@ -59548,6 +59548,357 @@ async def test_get_reporting_identity_settings_flattened_error_async(): ) +@pytest.mark.parametrize( + "request_type", + [ + analytics_admin.GetUserProvidedDataSettingsRequest, + dict, + ], +) +def test_get_user_provided_data_settings(request_type, transport: str = "grpc"): + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_user_provided_data_settings), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = resources.UserProvidedDataSettings( + name="name_value", + user_provided_data_collection_enabled=True, + automatically_detected_data_collection_enabled=True, + ) + response = client.get_user_provided_data_settings(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = analytics_admin.GetUserProvidedDataSettingsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, resources.UserProvidedDataSettings) + assert response.name == "name_value" + assert response.user_provided_data_collection_enabled is True + assert response.automatically_detected_data_collection_enabled is True + + +def test_get_user_provided_data_settings_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = analytics_admin.GetUserProvidedDataSettingsRequest( + name="name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_user_provided_data_settings), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.get_user_provided_data_settings(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == analytics_admin.GetUserProvidedDataSettingsRequest( + name="name_value", + ) + + +def test_get_user_provided_data_settings_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.get_user_provided_data_settings + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.get_user_provided_data_settings + ] = mock_rpc + request = {} + client.get_user_provided_data_settings(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_user_provided_data_settings(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_get_user_provided_data_settings_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = AnalyticsAdminServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.get_user_provided_data_settings + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.get_user_provided_data_settings + ] = mock_rpc + + request = {} + await client.get_user_provided_data_settings(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.get_user_provided_data_settings(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_get_user_provided_data_settings_async( + transport: str = "grpc_asyncio", + request_type=analytics_admin.GetUserProvidedDataSettingsRequest, +): + client = AnalyticsAdminServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_user_provided_data_settings), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + resources.UserProvidedDataSettings( + name="name_value", + user_provided_data_collection_enabled=True, + automatically_detected_data_collection_enabled=True, + ) + ) + response = await client.get_user_provided_data_settings(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = analytics_admin.GetUserProvidedDataSettingsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, resources.UserProvidedDataSettings) + assert response.name == "name_value" + assert response.user_provided_data_collection_enabled is True + assert response.automatically_detected_data_collection_enabled is True + + +@pytest.mark.asyncio +async def test_get_user_provided_data_settings_async_from_dict(): + await test_get_user_provided_data_settings_async(request_type=dict) + + +def test_get_user_provided_data_settings_field_headers(): + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = analytics_admin.GetUserProvidedDataSettingsRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_user_provided_data_settings), "__call__" + ) as call: + call.return_value = resources.UserProvidedDataSettings() + client.get_user_provided_data_settings(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_user_provided_data_settings_field_headers_async(): + client = AnalyticsAdminServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = analytics_admin.GetUserProvidedDataSettingsRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_user_provided_data_settings), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + resources.UserProvidedDataSettings() + ) + await client.get_user_provided_data_settings(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_get_user_provided_data_settings_flattened(): + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_user_provided_data_settings), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = resources.UserProvidedDataSettings() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_user_provided_data_settings( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_get_user_provided_data_settings_flattened_error(): + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_user_provided_data_settings( + analytics_admin.GetUserProvidedDataSettingsRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_user_provided_data_settings_flattened_async(): + client = AnalyticsAdminServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_user_provided_data_settings), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = resources.UserProvidedDataSettings() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + resources.UserProvidedDataSettings() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_user_provided_data_settings( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_user_provided_data_settings_flattened_error_async(): + client = AnalyticsAdminServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_user_provided_data_settings( + analytics_admin.GetUserProvidedDataSettingsRequest(), + name="name_value", + ) + + def test_get_account_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -89332,6 +89683,191 @@ def test_get_reporting_identity_settings_rest_flattened_error(transport: str = " ) +def test_get_user_provided_data_settings_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.get_user_provided_data_settings + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.get_user_provided_data_settings + ] = mock_rpc + + request = {} + client.get_user_provided_data_settings(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_user_provided_data_settings(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_get_user_provided_data_settings_rest_required_fields( + request_type=analytics_admin.GetUserProvidedDataSettingsRequest, +): + transport_class = transports.AnalyticsAdminServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_user_provided_data_settings._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_user_provided_data_settings._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = resources.UserProvidedDataSettings() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = resources.UserProvidedDataSettings.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.get_user_provided_data_settings(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_user_provided_data_settings_rest_unset_required_fields(): + transport = transports.AnalyticsAdminServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_user_provided_data_settings._get_unset_required_fields( + {} + ) + assert set(unset_fields) == (set(()) & set(("name",))) + + +def test_get_user_provided_data_settings_rest_flattened(): + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = resources.UserProvidedDataSettings() + + # get arguments that satisfy an http rule for this method + sample_request = {"name": "properties/sample1/userProvidedDataSettings"} + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = resources.UserProvidedDataSettings.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.get_user_provided_data_settings(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1alpha/{name=properties/*/userProvidedDataSettings}" + % client.transport._host, + args[1], + ) + + +def test_get_user_provided_data_settings_rest_flattened_error(transport: str = "rest"): + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_user_provided_data_settings( + analytics_admin.GetUserProvidedDataSettingsRequest(), + name="name_value", + ) + + def test_credentials_transport_error(): # It is an error to provide credentials and a transport instance. transport = transports.AnalyticsAdminServiceGrpcTransport( @@ -92961,6 +93497,29 @@ def test_get_reporting_identity_settings_empty_call_grpc(): assert args[0] == request_msg +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_user_provided_data_settings_empty_call_grpc(): + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_user_provided_data_settings), "__call__" + ) as call: + call.return_value = resources.UserProvidedDataSettings() + client.get_user_provided_data_settings(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = analytics_admin.GetUserProvidedDataSettingsRequest() + + assert args[0] == request_msg + + def test_transport_kind_grpc_asyncio(): transport = AnalyticsAdminServiceAsyncClient.get_transport_class("grpc_asyncio")( credentials=async_anonymous_credentials() @@ -97617,6 +98176,37 @@ async def test_get_reporting_identity_settings_empty_call_grpc_asyncio(): assert args[0] == request_msg +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_get_user_provided_data_settings_empty_call_grpc_asyncio(): + client = AnalyticsAdminServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_user_provided_data_settings), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + resources.UserProvidedDataSettings( + name="name_value", + user_provided_data_collection_enabled=True, + automatically_detected_data_collection_enabled=True, + ) + ) + await client.get_user_provided_data_settings(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = analytics_admin.GetUserProvidedDataSettingsRequest() + + assert args[0] == request_msg + + def test_transport_kind_rest(): transport = AnalyticsAdminServiceClient.get_transport_class("rest")( credentials=ga_credentials.AnonymousCredentials() @@ -123192,6 +123782,145 @@ def test_get_reporting_identity_settings_rest_interceptors(null_interceptor): post_with_metadata.assert_called_once() +def test_get_user_provided_data_settings_rest_bad_request( + request_type=analytics_admin.GetUserProvidedDataSettingsRequest, +): + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"name": "properties/sample1/userProvidedDataSettings"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with ( + mock.patch.object(Session, "request") as req, + pytest.raises(core_exceptions.BadRequest), + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.get_user_provided_data_settings(request) + + +@pytest.mark.parametrize( + "request_type", + [ + analytics_admin.GetUserProvidedDataSettingsRequest, + dict, + ], +) +def test_get_user_provided_data_settings_rest_call_success(request_type): + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"name": "properties/sample1/userProvidedDataSettings"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = resources.UserProvidedDataSettings( + name="name_value", + user_provided_data_collection_enabled=True, + automatically_detected_data_collection_enabled=True, + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = resources.UserProvidedDataSettings.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.get_user_provided_data_settings(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, resources.UserProvidedDataSettings) + assert response.name == "name_value" + assert response.user_provided_data_collection_enabled is True + assert response.automatically_detected_data_collection_enabled is True + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_user_provided_data_settings_rest_interceptors(null_interceptor): + transport = transports.AnalyticsAdminServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.AnalyticsAdminServiceRestInterceptor(), + ) + client = AnalyticsAdminServiceClient(transport=transport) + + with ( + mock.patch.object(type(client.transport._session), "request") as req, + mock.patch.object(path_template, "transcode") as transcode, + mock.patch.object( + transports.AnalyticsAdminServiceRestInterceptor, + "post_get_user_provided_data_settings", + ) as post, + mock.patch.object( + transports.AnalyticsAdminServiceRestInterceptor, + "post_get_user_provided_data_settings_with_metadata", + ) as post_with_metadata, + mock.patch.object( + transports.AnalyticsAdminServiceRestInterceptor, + "pre_get_user_provided_data_settings", + ) as pre, + ): + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = analytics_admin.GetUserProvidedDataSettingsRequest.pb( + analytics_admin.GetUserProvidedDataSettingsRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = resources.UserProvidedDataSettings.to_json( + resources.UserProvidedDataSettings() + ) + req.return_value.content = return_value + + request = analytics_admin.GetUserProvidedDataSettingsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = resources.UserProvidedDataSettings() + post_with_metadata.return_value = resources.UserProvidedDataSettings(), metadata + + client.get_user_provided_data_settings( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + def test_initialize_client_w_rest(): client = AnalyticsAdminServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" @@ -126562,6 +127291,28 @@ def test_get_reporting_identity_settings_empty_call_rest(): assert args[0] == request_msg +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_user_provided_data_settings_empty_call_rest(): + client = AnalyticsAdminServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_user_provided_data_settings), "__call__" + ) as call: + client.get_user_provided_data_settings(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = analytics_admin.GetUserProvidedDataSettingsRequest() + + assert args[0] == request_msg + + def test_transport_grpc_default(): # A client should use the gRPC transport by default. client = AnalyticsAdminServiceClient( @@ -126749,6 +127500,7 @@ def test_analytics_admin_service_base_transport(): "update_subproperty_sync_config", "get_subproperty_sync_config", "get_reporting_identity_settings", + "get_user_provided_data_settings", ) for method in methods: with pytest.raises(NotImplementedError): @@ -127520,6 +128272,9 @@ def test_analytics_admin_service_client_transport_session_collision(transport_na session1 = client1.transport.get_reporting_identity_settings._session session2 = client2.transport.get_reporting_identity_settings._session assert session1 != session2 + session1 = client1.transport.get_user_provided_data_settings._session + session2 = client2.transport.get_user_provided_data_settings._session + assert session1 != session2 def test_analytics_admin_service_grpc_transport_channel(): @@ -128535,8 +129290,28 @@ def test_parse_subproperty_sync_config_path(): assert expected == actual +def test_user_provided_data_settings_path(): + property = "cuttlefish" + expected = "properties/{property}/userProvidedDataSettings".format( + property=property, + ) + actual = AnalyticsAdminServiceClient.user_provided_data_settings_path(property) + assert expected == actual + + +def test_parse_user_provided_data_settings_path(): + expected = { + "property": "mussel", + } + path = AnalyticsAdminServiceClient.user_provided_data_settings_path(**expected) + + # Check that the path construction is reversible. + actual = AnalyticsAdminServiceClient.parse_user_provided_data_settings_path(path) + assert expected == actual + + def test_common_billing_account_path(): - billing_account = "cuttlefish" + billing_account = "winkle" expected = "billingAccounts/{billing_account}".format( billing_account=billing_account, ) @@ -128546,7 +129321,7 @@ def test_common_billing_account_path(): def test_parse_common_billing_account_path(): expected = { - "billing_account": "mussel", + "billing_account": "nautilus", } path = AnalyticsAdminServiceClient.common_billing_account_path(**expected) @@ -128556,7 +129331,7 @@ def test_parse_common_billing_account_path(): def test_common_folder_path(): - folder = "winkle" + folder = "scallop" expected = "folders/{folder}".format( folder=folder, ) @@ -128566,7 +129341,7 @@ def test_common_folder_path(): def test_parse_common_folder_path(): expected = { - "folder": "nautilus", + "folder": "abalone", } path = AnalyticsAdminServiceClient.common_folder_path(**expected) @@ -128576,7 +129351,7 @@ def test_parse_common_folder_path(): def test_common_organization_path(): - organization = "scallop" + organization = "squid" expected = "organizations/{organization}".format( organization=organization, ) @@ -128586,7 +129361,7 @@ def test_common_organization_path(): def test_parse_common_organization_path(): expected = { - "organization": "abalone", + "organization": "clam", } path = AnalyticsAdminServiceClient.common_organization_path(**expected) @@ -128596,7 +129371,7 @@ def test_parse_common_organization_path(): def test_common_project_path(): - project = "squid" + project = "whelk" expected = "projects/{project}".format( project=project, ) @@ -128606,7 +129381,7 @@ def test_common_project_path(): def test_parse_common_project_path(): expected = { - "project": "clam", + "project": "octopus", } path = AnalyticsAdminServiceClient.common_project_path(**expected) @@ -128616,8 +129391,8 @@ def test_parse_common_project_path(): def test_common_location_path(): - project = "whelk" - location = "octopus" + project = "oyster" + location = "nudibranch" expected = "projects/{project}/locations/{location}".format( project=project, location=location, @@ -128628,8 +129403,8 @@ def test_common_location_path(): def test_parse_common_location_path(): expected = { - "project": "oyster", - "location": "nudibranch", + "project": "cuttlefish", + "location": "mussel", } path = AnalyticsAdminServiceClient.common_location_path(**expected) diff --git a/packages/google-apps-chat/google/apps/chat/__init__.py b/packages/google-apps-chat/google/apps/chat/__init__.py index 4f472be4866c..33c0ba90a14b 100644 --- a/packages/google-apps-chat/google/apps/chat/__init__.py +++ b/packages/google-apps-chat/google/apps/chat/__init__.py @@ -133,12 +133,15 @@ CreateSpaceRequest, DeleteSpaceRequest, FindDirectMessageRequest, + FindGroupChatsRequest, + FindGroupChatsResponse, GetSpaceRequest, ListSpacesRequest, ListSpacesResponse, SearchSpacesRequest, SearchSpacesResponse, Space, + SpaceView, UpdateSpaceRequest, ) from google.apps.chat_v1.types.space_event import ( @@ -264,6 +267,8 @@ "CreateSpaceRequest", "DeleteSpaceRequest", "FindDirectMessageRequest", + "FindGroupChatsRequest", + "FindGroupChatsResponse", "GetSpaceRequest", "ListSpacesRequest", "ListSpacesResponse", @@ -271,6 +276,7 @@ "SearchSpacesResponse", "Space", "UpdateSpaceRequest", + "SpaceView", "GetSpaceEventRequest", "ListSpaceEventsRequest", "ListSpaceEventsResponse", diff --git a/packages/google-apps-chat/google/apps/chat_v1/__init__.py b/packages/google-apps-chat/google/apps/chat_v1/__init__.py index 8faf4e22479c..31d2b7f7cec9 100644 --- a/packages/google-apps-chat/google/apps/chat_v1/__init__.py +++ b/packages/google-apps-chat/google/apps/chat_v1/__init__.py @@ -141,12 +141,15 @@ CreateSpaceRequest, DeleteSpaceRequest, FindDirectMessageRequest, + FindGroupChatsRequest, + FindGroupChatsResponse, GetSpaceRequest, ListSpacesRequest, ListSpacesResponse, SearchSpacesRequest, SearchSpacesResponse, Space, + SpaceView, UpdateSpaceRequest, ) from .types.space_event import ( @@ -303,6 +306,8 @@ def _get_version(dependency_name): "Emoji", "EmojiReactionSummary", "FindDirectMessageRequest", + "FindGroupChatsRequest", + "FindGroupChatsResponse", "ForwardedMetadata", "GetAttachmentRequest", "GetCustomEmojiRequest", @@ -372,6 +377,7 @@ def _get_version(dependency_name): "SpaceNotificationSetting", "SpaceReadState", "SpaceUpdatedEventData", + "SpaceView", "Thread", "ThreadReadState", "UpdateMembershipRequest", diff --git a/packages/google-apps-chat/google/apps/chat_v1/gapic_metadata.json b/packages/google-apps-chat/google/apps/chat_v1/gapic_metadata.json index 7cdaf7f91975..974fce08eba2 100644 --- a/packages/google-apps-chat/google/apps/chat_v1/gapic_metadata.json +++ b/packages/google-apps-chat/google/apps/chat_v1/gapic_metadata.json @@ -80,6 +80,11 @@ "find_direct_message" ] }, + "FindGroupChats": { + "methods": [ + "find_group_chats" + ] + }, "GetAttachment": { "methods": [ "get_attachment" @@ -295,6 +300,11 @@ "find_direct_message" ] }, + "FindGroupChats": { + "methods": [ + "find_group_chats" + ] + }, "GetAttachment": { "methods": [ "get_attachment" @@ -510,6 +520,11 @@ "find_direct_message" ] }, + "FindGroupChats": { + "methods": [ + "find_group_chats" + ] + }, "GetAttachment": { "methods": [ "get_attachment" diff --git a/packages/google-apps-chat/google/apps/chat_v1/services/chat_service/async_client.py b/packages/google-apps-chat/google/apps/chat_v1/services/chat_service/async_client.py index 646b04bb490f..edce8231df1f 100644 --- a/packages/google-apps-chat/google/apps/chat_v1/services/chat_service/async_client.py +++ b/packages/google-apps-chat/google/apps/chat_v1/services/chat_service/async_client.py @@ -2956,6 +2956,124 @@ async def sample_find_direct_message(): # Done; return the response. return response + async def find_group_chats( + self, + request: Optional[Union[space.FindGroupChatsRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.FindGroupChatsAsyncPager: + r"""Returns all spaces with ``spaceType == GROUP_CHAT``, whose human + memberships contain exactly the calling user, and the users + specified in ``FindGroupChatsRequest.users``. Only members that + have joined the conversation are supported. For an example, see + `Find group + chats `__. + + If the calling user blocks, or is blocked by, some users, and no + spaces with the entire specified set of users are found, this + method returns spaces that don't include the blocked or blocking + users. + + The specified set of users must contain only human (non-app) + memberships. A request that contains non-human users doesn't + return any spaces. + + Requires `user + authentication `__ + with one of the following `authorization + scopes `__: + + - ``https://www.googleapis.com/auth/chat.memberships.readonly`` + - ``https://www.googleapis.com/auth/chat.memberships`` + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.apps import chat_v1 + + async def sample_find_group_chats(): + # Create a client + client = chat_v1.ChatServiceAsyncClient() + + # Initialize request argument(s) + request = chat_v1.FindGroupChatsRequest( + ) + + # Make the request + page_result = client.find_group_chats(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.apps.chat_v1.types.FindGroupChatsRequest, dict]]): + The request object. A request to get group chat spaces + based on user resources. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.apps.chat_v1.services.chat_service.pagers.FindGroupChatsAsyncPager: + A response containing group chat + spaces with exactly the calling user and + the requested users. + + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, space.FindGroupChatsRequest): + request = space.FindGroupChatsRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.find_group_chats + ] + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.FindGroupChatsAsyncPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + async def create_membership( self, request: Optional[Union[gc_membership.CreateMembershipRequest, dict]] = None, diff --git a/packages/google-apps-chat/google/apps/chat_v1/services/chat_service/client.py b/packages/google-apps-chat/google/apps/chat_v1/services/chat_service/client.py index 147fe44be01c..e5d6c4bb52a1 100644 --- a/packages/google-apps-chat/google/apps/chat_v1/services/chat_service/client.py +++ b/packages/google-apps-chat/google/apps/chat_v1/services/chat_service/client.py @@ -3559,6 +3559,122 @@ def sample_find_direct_message(): # Done; return the response. return response + def find_group_chats( + self, + request: Optional[Union[space.FindGroupChatsRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.FindGroupChatsPager: + r"""Returns all spaces with ``spaceType == GROUP_CHAT``, whose human + memberships contain exactly the calling user, and the users + specified in ``FindGroupChatsRequest.users``. Only members that + have joined the conversation are supported. For an example, see + `Find group + chats `__. + + If the calling user blocks, or is blocked by, some users, and no + spaces with the entire specified set of users are found, this + method returns spaces that don't include the blocked or blocking + users. + + The specified set of users must contain only human (non-app) + memberships. A request that contains non-human users doesn't + return any spaces. + + Requires `user + authentication `__ + with one of the following `authorization + scopes `__: + + - ``https://www.googleapis.com/auth/chat.memberships.readonly`` + - ``https://www.googleapis.com/auth/chat.memberships`` + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.apps import chat_v1 + + def sample_find_group_chats(): + # Create a client + client = chat_v1.ChatServiceClient() + + # Initialize request argument(s) + request = chat_v1.FindGroupChatsRequest( + ) + + # Make the request + page_result = client.find_group_chats(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.apps.chat_v1.types.FindGroupChatsRequest, dict]): + The request object. A request to get group chat spaces + based on user resources. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.apps.chat_v1.services.chat_service.pagers.FindGroupChatsPager: + A response containing group chat + spaces with exactly the calling user and + the requested users. + + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, space.FindGroupChatsRequest): + request = space.FindGroupChatsRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.find_group_chats] + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.FindGroupChatsPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + def create_membership( self, request: Optional[Union[gc_membership.CreateMembershipRequest, dict]] = None, diff --git a/packages/google-apps-chat/google/apps/chat_v1/services/chat_service/pagers.py b/packages/google-apps-chat/google/apps/chat_v1/services/chat_service/pagers.py index ff5b2d48045d..84551e70b0c2 100644 --- a/packages/google-apps-chat/google/apps/chat_v1/services/chat_service/pagers.py +++ b/packages/google-apps-chat/google/apps/chat_v1/services/chat_service/pagers.py @@ -672,6 +672,162 @@ def __repr__(self) -> str: return "{0}<{1!r}>".format(self.__class__.__name__, self._response) +class FindGroupChatsPager: + """A pager for iterating through ``find_group_chats`` requests. + + This class thinly wraps an initial + :class:`google.apps.chat_v1.types.FindGroupChatsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``spaces`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``FindGroupChats`` requests and continue to iterate + through the ``spaces`` field on the + corresponding responses. + + All the usual :class:`google.apps.chat_v1.types.FindGroupChatsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., space.FindGroupChatsResponse], + request: space.FindGroupChatsRequest, + response: space.FindGroupChatsResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.apps.chat_v1.types.FindGroupChatsRequest): + The initial request object. + response (google.apps.chat_v1.types.FindGroupChatsResponse): + The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + self._method = method + self._request = space.FindGroupChatsRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[space.FindGroupChatsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) + yield self._response + + def __iter__(self) -> Iterator[space.Space]: + for page in self.pages: + yield from page.spaces + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class FindGroupChatsAsyncPager: + """A pager for iterating through ``find_group_chats`` requests. + + This class thinly wraps an initial + :class:`google.apps.chat_v1.types.FindGroupChatsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``spaces`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``FindGroupChats`` requests and continue to iterate + through the ``spaces`` field on the + corresponding responses. + + All the usual :class:`google.apps.chat_v1.types.FindGroupChatsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[space.FindGroupChatsResponse]], + request: space.FindGroupChatsRequest, + response: space.FindGroupChatsResponse, + *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.apps.chat_v1.types.FindGroupChatsRequest): + The initial request object. + response (google.apps.chat_v1.types.FindGroupChatsResponse): + The initial response object. + retry (google.api_core.retry.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + self._method = method + self._request = space.FindGroupChatsRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[space.FindGroupChatsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) + yield self._response + + def __aiter__(self) -> AsyncIterator[space.Space]: + async def async_generator(): + async for page in self.pages: + for response in page.spaces: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + class ListReactionsPager: """A pager for iterating through ``list_reactions`` requests. diff --git a/packages/google-apps-chat/google/apps/chat_v1/services/chat_service/transports/base.py b/packages/google-apps-chat/google/apps/chat_v1/services/chat_service/transports/base.py index 1726e664dcaa..7065bb9326b3 100644 --- a/packages/google-apps-chat/google/apps/chat_v1/services/chat_service/transports/base.py +++ b/packages/google-apps-chat/google/apps/chat_v1/services/chat_service/transports/base.py @@ -451,6 +451,20 @@ def _prep_wrapped_messages(self, client_info): default_timeout=30.0, client_info=client_info, ), + self.find_group_chats: gapic_v1.method.wrap_method( + self.find_group_chats, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=30.0, + ), + default_timeout=30.0, + client_info=client_info, + ), self.create_membership: gapic_v1.method.wrap_method( self.create_membership, default_retry=retries.Retry( @@ -960,6 +974,15 @@ def find_direct_message( ]: raise NotImplementedError() + @property + def find_group_chats( + self, + ) -> Callable[ + [space.FindGroupChatsRequest], + Union[space.FindGroupChatsResponse, Awaitable[space.FindGroupChatsResponse]], + ]: + raise NotImplementedError() + @property def create_membership( self, diff --git a/packages/google-apps-chat/google/apps/chat_v1/services/chat_service/transports/grpc.py b/packages/google-apps-chat/google/apps/chat_v1/services/chat_service/transports/grpc.py index a5ab6a47b320..a91ad9f105f4 100644 --- a/packages/google-apps-chat/google/apps/chat_v1/services/chat_service/transports/grpc.py +++ b/packages/google-apps-chat/google/apps/chat_v1/services/chat_service/transports/grpc.py @@ -1352,6 +1352,54 @@ def find_direct_message( ) return self._stubs["find_direct_message"] + @property + def find_group_chats( + self, + ) -> Callable[[space.FindGroupChatsRequest], space.FindGroupChatsResponse]: + r"""Return a callable for the find group chats method over gRPC. + + Returns all spaces with ``spaceType == GROUP_CHAT``, whose human + memberships contain exactly the calling user, and the users + specified in ``FindGroupChatsRequest.users``. Only members that + have joined the conversation are supported. For an example, see + `Find group + chats `__. + + If the calling user blocks, or is blocked by, some users, and no + spaces with the entire specified set of users are found, this + method returns spaces that don't include the blocked or blocking + users. + + The specified set of users must contain only human (non-app) + memberships. A request that contains non-human users doesn't + return any spaces. + + Requires `user + authentication `__ + with one of the following `authorization + scopes `__: + + - ``https://www.googleapis.com/auth/chat.memberships.readonly`` + - ``https://www.googleapis.com/auth/chat.memberships`` + + Returns: + Callable[[~.FindGroupChatsRequest], + ~.FindGroupChatsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "find_group_chats" not in self._stubs: + self._stubs["find_group_chats"] = self._logged_channel.unary_unary( + "/google.chat.v1.ChatService/FindGroupChats", + request_serializer=space.FindGroupChatsRequest.serialize, + response_deserializer=space.FindGroupChatsResponse.deserialize, + ) + return self._stubs["find_group_chats"] + @property def create_membership( self, diff --git a/packages/google-apps-chat/google/apps/chat_v1/services/chat_service/transports/grpc_asyncio.py b/packages/google-apps-chat/google/apps/chat_v1/services/chat_service/transports/grpc_asyncio.py index 50fac72a710f..afb695773948 100644 --- a/packages/google-apps-chat/google/apps/chat_v1/services/chat_service/transports/grpc_asyncio.py +++ b/packages/google-apps-chat/google/apps/chat_v1/services/chat_service/transports/grpc_asyncio.py @@ -1374,6 +1374,56 @@ def find_direct_message( ) return self._stubs["find_direct_message"] + @property + def find_group_chats( + self, + ) -> Callable[ + [space.FindGroupChatsRequest], Awaitable[space.FindGroupChatsResponse] + ]: + r"""Return a callable for the find group chats method over gRPC. + + Returns all spaces with ``spaceType == GROUP_CHAT``, whose human + memberships contain exactly the calling user, and the users + specified in ``FindGroupChatsRequest.users``. Only members that + have joined the conversation are supported. For an example, see + `Find group + chats `__. + + If the calling user blocks, or is blocked by, some users, and no + spaces with the entire specified set of users are found, this + method returns spaces that don't include the blocked or blocking + users. + + The specified set of users must contain only human (non-app) + memberships. A request that contains non-human users doesn't + return any spaces. + + Requires `user + authentication `__ + with one of the following `authorization + scopes `__: + + - ``https://www.googleapis.com/auth/chat.memberships.readonly`` + - ``https://www.googleapis.com/auth/chat.memberships`` + + Returns: + Callable[[~.FindGroupChatsRequest], + Awaitable[~.FindGroupChatsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "find_group_chats" not in self._stubs: + self._stubs["find_group_chats"] = self._logged_channel.unary_unary( + "/google.chat.v1.ChatService/FindGroupChats", + request_serializer=space.FindGroupChatsRequest.serialize, + response_deserializer=space.FindGroupChatsResponse.deserialize, + ) + return self._stubs["find_group_chats"] + @property def create_membership( self, @@ -2731,6 +2781,20 @@ def _prep_wrapped_messages(self, client_info): default_timeout=30.0, client_info=client_info, ), + self.find_group_chats: self._wrap_method( + self.find_group_chats, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=30.0, + ), + default_timeout=30.0, + client_info=client_info, + ), self.create_membership: self._wrap_method( self.create_membership, default_retry=retries.AsyncRetry( diff --git a/packages/google-apps-chat/google/apps/chat_v1/services/chat_service/transports/rest.py b/packages/google-apps-chat/google/apps/chat_v1/services/chat_service/transports/rest.py index a8cd7e10e435..353b46a180dc 100644 --- a/packages/google-apps-chat/google/apps/chat_v1/services/chat_service/transports/rest.py +++ b/packages/google-apps-chat/google/apps/chat_v1/services/chat_service/transports/rest.py @@ -186,6 +186,14 @@ def post_find_direct_message(self, response): logging.log(f"Received response: {response}") return response + def pre_find_group_chats(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_find_group_chats(self, response): + logging.log(f"Received response: {response}") + return response + def pre_get_attachment(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -898,6 +906,52 @@ def post_find_direct_message_with_metadata( """ return response, metadata + def pre_find_group_chats( + self, + request: space.FindGroupChatsRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[space.FindGroupChatsRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for find_group_chats + + Override in a subclass to manipulate the request or metadata + before they are sent to the ChatService server. + """ + return request, metadata + + def post_find_group_chats( + self, response: space.FindGroupChatsResponse + ) -> space.FindGroupChatsResponse: + """Post-rpc interceptor for find_group_chats + + DEPRECATED. Please use the `post_find_group_chats_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the ChatService server but before + it is returned to user code. This `post_find_group_chats` interceptor runs + before the `post_find_group_chats_with_metadata` interceptor. + """ + return response + + def post_find_group_chats_with_metadata( + self, + response: space.FindGroupChatsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[space.FindGroupChatsResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for find_group_chats + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the ChatService server but before it is returned to user code. + + We recommend only using this `post_find_group_chats_with_metadata` + interceptor in new development instead of the `post_find_group_chats` interceptor. + When both interceptors are used, this `post_find_group_chats_with_metadata` interceptor runs after the + `post_find_group_chats` interceptor. The (possibly modified) response returned by + `post_find_group_chats` will be passed to + `post_find_group_chats_with_metadata`. + """ + return response, metadata + def pre_get_attachment( self, request: attachment.GetAttachmentRequest, @@ -4277,6 +4331,156 @@ def __call__( ) return resp + class _FindGroupChats( + _BaseChatServiceRestTransport._BaseFindGroupChats, ChatServiceRestStub + ): + def __hash__(self): + return hash("ChatServiceRestTransport.FindGroupChats") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: space.FindGroupChatsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> space.FindGroupChatsResponse: + r"""Call the find group chats method over HTTP. + + Args: + request (~.space.FindGroupChatsRequest): + The request object. A request to get group chat spaces + based on user resources. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.space.FindGroupChatsResponse: + A response containing group chat + spaces with exactly the calling user and + the requested users. + + """ + + http_options = ( + _BaseChatServiceRestTransport._BaseFindGroupChats._get_http_options() + ) + + request, metadata = self._interceptor.pre_find_group_chats( + request, metadata + ) + transcoded_request = _BaseChatServiceRestTransport._BaseFindGroupChats._get_transcoded_request( + http_options, request + ) + + # Jsonify the query params + query_params = _BaseChatServiceRestTransport._BaseFindGroupChats._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.chat_v1.ChatServiceClient.FindGroupChats", + extra={ + "serviceName": "google.chat.v1.ChatService", + "rpcName": "FindGroupChats", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = ChatServiceRestTransport._FindGroupChats._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = space.FindGroupChatsResponse() + pb_resp = space.FindGroupChatsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_find_group_chats(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_find_group_chats_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = space.FindGroupChatsResponse.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.chat_v1.ChatServiceClient.find_group_chats", + extra={ + "serviceName": "google.chat.v1.ChatService", + "rpcName": "FindGroupChats", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + class _GetAttachment( _BaseChatServiceRestTransport._BaseGetAttachment, ChatServiceRestStub ): @@ -8683,6 +8887,14 @@ def find_direct_message( # In C++ this would require a dynamic_cast return self._FindDirectMessage(self._session, self._host, self._interceptor) # type: ignore + @property + def find_group_chats( + self, + ) -> Callable[[space.FindGroupChatsRequest], space.FindGroupChatsResponse]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._FindGroupChats(self._session, self._host, self._interceptor) # type: ignore + @property def get_attachment( self, diff --git a/packages/google-apps-chat/google/apps/chat_v1/services/chat_service/transports/rest_base.py b/packages/google-apps-chat/google/apps/chat_v1/services/chat_service/transports/rest_base.py index 683a9f1fe395..a74f035608aa 100644 --- a/packages/google-apps-chat/google/apps/chat_v1/services/chat_service/transports/rest_base.py +++ b/packages/google-apps-chat/google/apps/chat_v1/services/chat_service/transports/rest_base.py @@ -839,6 +839,38 @@ def _get_query_params_json(transcoded_request): query_params["$alt"] = "json;enum-encoding=int" return query_params + class _BaseFindGroupChats: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/spaces:findGroupChats", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = space.FindGroupChatsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + class _BaseGetAttachment: def __hash__(self): # pragma: NO COVER return NotImplementedError("__hash__ must be implemented.") diff --git a/packages/google-apps-chat/google/apps/chat_v1/types/__init__.py b/packages/google-apps-chat/google/apps/chat_v1/types/__init__.py index c68178f3e922..de5c8d543da0 100644 --- a/packages/google-apps-chat/google/apps/chat_v1/types/__init__.py +++ b/packages/google-apps-chat/google/apps/chat_v1/types/__init__.py @@ -138,12 +138,15 @@ CreateSpaceRequest, DeleteSpaceRequest, FindDirectMessageRequest, + FindGroupChatsRequest, + FindGroupChatsResponse, GetSpaceRequest, ListSpacesRequest, ListSpacesResponse, SearchSpacesRequest, SearchSpacesResponse, Space, + SpaceView, UpdateSpaceRequest, ) from .space_event import ( @@ -273,6 +276,8 @@ "CreateSpaceRequest", "DeleteSpaceRequest", "FindDirectMessageRequest", + "FindGroupChatsRequest", + "FindGroupChatsResponse", "GetSpaceRequest", "ListSpacesRequest", "ListSpacesResponse", @@ -280,6 +285,7 @@ "SearchSpacesResponse", "Space", "UpdateSpaceRequest", + "SpaceView", "GetSpaceEventRequest", "ListSpaceEventsRequest", "ListSpaceEventsResponse", diff --git a/packages/google-apps-chat/google/apps/chat_v1/types/space.py b/packages/google-apps-chat/google/apps/chat_v1/types/space.py index 97b540196a84..25197ee6930a 100644 --- a/packages/google-apps-chat/google/apps/chat_v1/types/space.py +++ b/packages/google-apps-chat/google/apps/chat_v1/types/space.py @@ -26,12 +26,15 @@ __protobuf__ = proto.module( package="google.chat.v1", manifest={ + "SpaceView", "Space", "CreateSpaceRequest", "ListSpacesRequest", "ListSpacesResponse", "GetSpaceRequest", "FindDirectMessageRequest", + "FindGroupChatsRequest", + "FindGroupChatsResponse", "UpdateSpaceRequest", "SearchSpacesRequest", "SearchSpacesResponse", @@ -42,6 +45,31 @@ ) +class SpaceView(proto.Enum): + r"""A view that specifies which fields should be populated on the + ```Space`` `__ + resource. To ensure compatibility with future releases, we recommend + that your code account for additional values. + + Values: + SPACE_VIEW_UNSPECIFIED (0): + The default / unset value. + SPACE_VIEW_RESOURCE_NAME_ONLY (3): + Populates only the Space resource name. + SPACE_VIEW_EXPANDED (4): + Populates Space resource fields. Note: the + ``permissionSettings`` field will not be populated. Requests + that specify SPACE_VIEW_EXPANDED must include scopes that + allow reading space data, for example, + https://www.googleapis.com/auth/chat.spaces or + https://www.googleapis.com/auth/chat.spaces.readonly. + """ + + SPACE_VIEW_UNSPECIFIED = 0 + SPACE_VIEW_RESOURCE_NAME_ONLY = 3 + SPACE_VIEW_EXPANDED = 4 + + class Space(proto.Message): r"""A space in Google Chat. Spaces are conversations between two or more users or 1:1 messages between a user and a Chat app. @@ -867,6 +895,106 @@ class FindDirectMessageRequest(proto.Message): ) +class FindGroupChatsRequest(proto.Message): + r"""A request to get group chat spaces based on user resources. + + Attributes: + users (MutableSequence[str]): + Optional. Resource names of all human users in group chat + with the calling user. Chat apps can't be included in the + request. + + The maximum number of users that can be specified in a + single request is ``49``. + + Format: ``users/{user}``, where ``{user}`` is either the + ``id`` for the + `person `__ + from the People API, or the ``id`` for the + `user `__ + in the Directory API. For example, to find all group chats + with the calling user and two other users, with People API + profile IDs ``123456789`` and ``987654321``, you can use + ``users/123456789`` and ``users/987654321``. You can also + use the email as an alias for ``{user}``. For example, + ``users/example@gmail.com`` where ``example@gmail.com`` is + the email of the Google Chat user. + page_size (int): + Optional. The maximum number of spaces to return. The + service might return fewer than this value. + + If unspecified, at most 10 spaces are returned. + + The maximum value is 30. If you use a value more than 30, + it's automatically changed to 30. + + Negative values return an ``INVALID_ARGUMENT`` error. + page_token (str): + Optional. A page token, received from a + previous call to find group chats. Provide this + parameter to retrieve the subsequent page. + + When paginating, all other parameters provided + should match the call that provided the token. + Passing different values may lead to unexpected + results. + space_view (google.apps.chat_v1.types.SpaceView): + Requested space view type. If unset, defaults to + ``SPACE_VIEW_RESOURCE_NAME_ONLY``. Requests that specify + ``SPACE_VIEW_EXPANDED`` must include scopes that allow + reading space data, for example, + https://www.googleapis.com/auth/chat.spaces or + https://www.googleapis.com/auth/chat.spaces.readonly. + """ + + users: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=5, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + space_view: "SpaceView" = proto.Field( + proto.ENUM, + number=4, + enum="SpaceView", + ) + + +class FindGroupChatsResponse(proto.Message): + r"""A response containing group chat spaces with exactly the + calling user and the requested users. + + Attributes: + spaces (MutableSequence[google.apps.chat_v1.types.Space]): + List of spaces in the requested (or first) + page. + next_page_token (str): + A token that you can send as ``pageToken`` to retrieve the + next page of results. If empty, there are no subsequent + pages. + """ + + @property + def raw_page(self): + return self + + spaces: MutableSequence["Space"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="Space", + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + class UpdateSpaceRequest(proto.Message): r"""A request to update a single space. diff --git a/packages/google-apps-chat/samples/generated_samples/chat_v1_generated_chat_service_find_group_chats_async.py b/packages/google-apps-chat/samples/generated_samples/chat_v1_generated_chat_service_find_group_chats_async.py new file mode 100644 index 000000000000..1d3156492740 --- /dev/null +++ b/packages/google-apps-chat/samples/generated_samples/chat_v1_generated_chat_service_find_group_chats_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for FindGroupChats +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-apps-chat + + +# [START chat_v1_generated_ChatService_FindGroupChats_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.apps import chat_v1 + + +async def sample_find_group_chats(): + # Create a client + client = chat_v1.ChatServiceAsyncClient() + + # Initialize request argument(s) + request = chat_v1.FindGroupChatsRequest() + + # Make the request + page_result = client.find_group_chats(request=request) + + # Handle the response + async for response in page_result: + print(response) + + +# [END chat_v1_generated_ChatService_FindGroupChats_async] diff --git a/packages/google-apps-chat/samples/generated_samples/chat_v1_generated_chat_service_find_group_chats_sync.py b/packages/google-apps-chat/samples/generated_samples/chat_v1_generated_chat_service_find_group_chats_sync.py new file mode 100644 index 000000000000..4afeef5c48d1 --- /dev/null +++ b/packages/google-apps-chat/samples/generated_samples/chat_v1_generated_chat_service_find_group_chats_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for FindGroupChats +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-apps-chat + + +# [START chat_v1_generated_ChatService_FindGroupChats_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.apps import chat_v1 + + +def sample_find_group_chats(): + # Create a client + client = chat_v1.ChatServiceClient() + + # Initialize request argument(s) + request = chat_v1.FindGroupChatsRequest() + + # Make the request + page_result = client.find_group_chats(request=request) + + # Handle the response + for response in page_result: + print(response) + + +# [END chat_v1_generated_ChatService_FindGroupChats_sync] diff --git a/packages/google-apps-chat/samples/generated_samples/snippet_metadata_google.chat.v1.json b/packages/google-apps-chat/samples/generated_samples/snippet_metadata_google.chat.v1.json index ed3799abf3ed..f24b69292dc6 100644 --- a/packages/google-apps-chat/samples/generated_samples/snippet_metadata_google.chat.v1.json +++ b/packages/google-apps-chat/samples/generated_samples/snippet_metadata_google.chat.v1.json @@ -2259,6 +2259,159 @@ ], "title": "chat_v1_generated_chat_service_find_direct_message_sync.py" }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.apps.chat_v1.ChatServiceAsyncClient", + "shortName": "ChatServiceAsyncClient" + }, + "fullName": "google.apps.chat_v1.ChatServiceAsyncClient.find_group_chats", + "method": { + "fullName": "google.chat.v1.ChatService.FindGroupChats", + "service": { + "fullName": "google.chat.v1.ChatService", + "shortName": "ChatService" + }, + "shortName": "FindGroupChats" + }, + "parameters": [ + { + "name": "request", + "type": "google.apps.chat_v1.types.FindGroupChatsRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.apps.chat_v1.services.chat_service.pagers.FindGroupChatsAsyncPager", + "shortName": "find_group_chats" + }, + "description": "Sample for FindGroupChats", + "file": "chat_v1_generated_chat_service_find_group_chats_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "chat_v1_generated_ChatService_FindGroupChats_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "chat_v1_generated_chat_service_find_group_chats_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.apps.chat_v1.ChatServiceClient", + "shortName": "ChatServiceClient" + }, + "fullName": "google.apps.chat_v1.ChatServiceClient.find_group_chats", + "method": { + "fullName": "google.chat.v1.ChatService.FindGroupChats", + "service": { + "fullName": "google.chat.v1.ChatService", + "shortName": "ChatService" + }, + "shortName": "FindGroupChats" + }, + "parameters": [ + { + "name": "request", + "type": "google.apps.chat_v1.types.FindGroupChatsRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.apps.chat_v1.services.chat_service.pagers.FindGroupChatsPager", + "shortName": "find_group_chats" + }, + "description": "Sample for FindGroupChats", + "file": "chat_v1_generated_chat_service_find_group_chats_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "chat_v1_generated_ChatService_FindGroupChats_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 47, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 48, + "type": "RESPONSE_HANDLING" + } + ], + "title": "chat_v1_generated_chat_service_find_group_chats_sync.py" + }, { "canonical": true, "clientMethod": { diff --git a/packages/google-apps-chat/tests/unit/gapic/chat_v1/test_chat_service.py b/packages/google-apps-chat/tests/unit/gapic/chat_v1/test_chat_service.py index 95ed14043d74..b2f62ba60257 100644 --- a/packages/google-apps-chat/tests/unit/gapic/chat_v1/test_chat_service.py +++ b/packages/google-apps-chat/tests/unit/gapic/chat_v1/test_chat_service.py @@ -7549,6 +7549,379 @@ async def test_find_direct_message_async_from_dict(): await test_find_direct_message_async(request_type=dict) +@pytest.mark.parametrize( + "request_type", + [ + space.FindGroupChatsRequest, + dict, + ], +) +def test_find_group_chats(request_type, transport: str = "grpc"): + client = ChatServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.find_group_chats), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = space.FindGroupChatsResponse( + next_page_token="next_page_token_value", + ) + response = client.find_group_chats(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = space.FindGroupChatsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.FindGroupChatsPager) + assert response.next_page_token == "next_page_token_value" + + +def test_find_group_chats_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = ChatServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = space.FindGroupChatsRequest( + page_token="page_token_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.find_group_chats), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.find_group_chats(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == space.FindGroupChatsRequest( + page_token="page_token_value", + ) + + +def test_find_group_chats_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ChatServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.find_group_chats in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.find_group_chats] = ( + mock_rpc + ) + request = {} + client.find_group_chats(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.find_group_chats(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_find_group_chats_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = ChatServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.find_group_chats + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.find_group_chats + ] = mock_rpc + + request = {} + await client.find_group_chats(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.find_group_chats(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_find_group_chats_async( + transport: str = "grpc_asyncio", request_type=space.FindGroupChatsRequest +): + client = ChatServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.find_group_chats), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + space.FindGroupChatsResponse( + next_page_token="next_page_token_value", + ) + ) + response = await client.find_group_chats(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = space.FindGroupChatsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.FindGroupChatsAsyncPager) + assert response.next_page_token == "next_page_token_value" + + +@pytest.mark.asyncio +async def test_find_group_chats_async_from_dict(): + await test_find_group_chats_async(request_type=dict) + + +def test_find_group_chats_pager(transport_name: str = "grpc"): + client = ChatServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.find_group_chats), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + space.FindGroupChatsResponse( + spaces=[ + space.Space(), + space.Space(), + space.Space(), + ], + next_page_token="abc", + ), + space.FindGroupChatsResponse( + spaces=[], + next_page_token="def", + ), + space.FindGroupChatsResponse( + spaces=[ + space.Space(), + ], + next_page_token="ghi", + ), + space.FindGroupChatsResponse( + spaces=[ + space.Space(), + space.Space(), + ], + ), + RuntimeError, + ) + + expected_metadata = () + retry = retries.Retry() + timeout = 5 + pager = client.find_group_chats(request={}, retry=retry, timeout=timeout) + + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, space.Space) for i in results) + + +def test_find_group_chats_pages(transport_name: str = "grpc"): + client = ChatServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.find_group_chats), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + space.FindGroupChatsResponse( + spaces=[ + space.Space(), + space.Space(), + space.Space(), + ], + next_page_token="abc", + ), + space.FindGroupChatsResponse( + spaces=[], + next_page_token="def", + ), + space.FindGroupChatsResponse( + spaces=[ + space.Space(), + ], + next_page_token="ghi", + ), + space.FindGroupChatsResponse( + spaces=[ + space.Space(), + space.Space(), + ], + ), + RuntimeError, + ) + pages = list(client.find_group_chats(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_find_group_chats_async_pager(): + client = ChatServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.find_group_chats), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + space.FindGroupChatsResponse( + spaces=[ + space.Space(), + space.Space(), + space.Space(), + ], + next_page_token="abc", + ), + space.FindGroupChatsResponse( + spaces=[], + next_page_token="def", + ), + space.FindGroupChatsResponse( + spaces=[ + space.Space(), + ], + next_page_token="ghi", + ), + space.FindGroupChatsResponse( + spaces=[ + space.Space(), + space.Space(), + ], + ), + RuntimeError, + ) + async_pager = await client.find_group_chats( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, space.Space) for i in responses) + + +@pytest.mark.asyncio +async def test_find_group_chats_async_pages(): + client = ChatServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.find_group_chats), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + space.FindGroupChatsResponse( + spaces=[ + space.Space(), + space.Space(), + space.Space(), + ], + next_page_token="abc", + ), + space.FindGroupChatsResponse( + spaces=[], + next_page_token="def", + ), + space.FindGroupChatsResponse( + spaces=[ + space.Space(), + ], + next_page_token="ghi", + ), + space.FindGroupChatsResponse( + spaces=[ + space.Space(), + space.Space(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.find_group_chats(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + @pytest.mark.parametrize( "request_type", [ @@ -19556,6 +19929,105 @@ def test_find_direct_message_rest_unset_required_fields(): assert set(unset_fields) == (set(("name",)) & set(("name",))) +def test_find_group_chats_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ChatServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.find_group_chats in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.find_group_chats] = ( + mock_rpc + ) + + request = {} + client.find_group_chats(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.find_group_chats(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_find_group_chats_rest_pager(transport: str = "rest"): + client = ChatServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + space.FindGroupChatsResponse( + spaces=[ + space.Space(), + space.Space(), + space.Space(), + ], + next_page_token="abc", + ), + space.FindGroupChatsResponse( + spaces=[], + next_page_token="def", + ), + space.FindGroupChatsResponse( + spaces=[ + space.Space(), + ], + next_page_token="ghi", + ), + space.FindGroupChatsResponse( + spaces=[ + space.Space(), + space.Space(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(space.FindGroupChatsResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {} + + pager = client.find_group_chats(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, space.Space) for i in results) + + pages = list(client.find_group_chats(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + def test_create_membership_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -24635,6 +25107,27 @@ def test_find_direct_message_empty_call_grpc(): assert args[0] == request_msg +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_find_group_chats_empty_call_grpc(): + client = ChatServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.find_group_chats), "__call__") as call: + call.return_value = space.FindGroupChatsResponse() + client.find_group_chats(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = space.FindGroupChatsRequest() + + assert args[0] == request_msg + + # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. def test_create_membership_empty_call_grpc(): @@ -25749,6 +26242,33 @@ async def test_find_direct_message_empty_call_grpc_asyncio(): assert args[0] == request_msg +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_find_group_chats_empty_call_grpc_asyncio(): + client = ChatServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.find_group_chats), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + space.FindGroupChatsResponse( + next_page_token="next_page_token_value", + ) + ) + await client.find_group_chats(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = space.FindGroupChatsRequest() + + assert args[0] == request_msg + + # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @pytest.mark.asyncio @@ -30205,6 +30725,134 @@ def test_find_direct_message_rest_interceptors(null_interceptor): post_with_metadata.assert_called_once() +def test_find_group_chats_rest_bad_request(request_type=space.FindGroupChatsRequest): + client = ChatServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with ( + mock.patch.object(Session, "request") as req, + pytest.raises(core_exceptions.BadRequest), + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.find_group_chats(request) + + +@pytest.mark.parametrize( + "request_type", + [ + space.FindGroupChatsRequest, + dict, + ], +) +def test_find_group_chats_rest_call_success(request_type): + client = ChatServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = space.FindGroupChatsResponse( + next_page_token="next_page_token_value", + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = space.FindGroupChatsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.find_group_chats(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.FindGroupChatsPager) + assert response.next_page_token == "next_page_token_value" + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_find_group_chats_rest_interceptors(null_interceptor): + transport = transports.ChatServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.ChatServiceRestInterceptor(), + ) + client = ChatServiceClient(transport=transport) + + with ( + mock.patch.object(type(client.transport._session), "request") as req, + mock.patch.object(path_template, "transcode") as transcode, + mock.patch.object( + transports.ChatServiceRestInterceptor, "post_find_group_chats" + ) as post, + mock.patch.object( + transports.ChatServiceRestInterceptor, "post_find_group_chats_with_metadata" + ) as post_with_metadata, + mock.patch.object( + transports.ChatServiceRestInterceptor, "pre_find_group_chats" + ) as pre, + ): + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = space.FindGroupChatsRequest.pb(space.FindGroupChatsRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = space.FindGroupChatsResponse.to_json( + space.FindGroupChatsResponse() + ) + req.return_value.content = return_value + + request = space.FindGroupChatsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = space.FindGroupChatsResponse() + post_with_metadata.return_value = space.FindGroupChatsResponse(), metadata + + client.find_group_chats( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + def test_create_membership_rest_bad_request( request_type=gc_membership.CreateMembershipRequest, ): @@ -34357,6 +35005,26 @@ def test_find_direct_message_empty_call_rest(): assert args[0] == request_msg +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_find_group_chats_empty_call_rest(): + client = ChatServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.find_group_chats), "__call__") as call: + client.find_group_chats(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = space.FindGroupChatsRequest() + + assert args[0] == request_msg + + # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. def test_create_membership_empty_call_rest(): @@ -34918,6 +35586,7 @@ def test_chat_service_base_transport(): "delete_space", "complete_import_space", "find_direct_message", + "find_group_chats", "create_membership", "update_membership", "delete_membership", @@ -35396,6 +36065,9 @@ def test_chat_service_client_transport_session_collision(transport_name): session1 = client1.transport.find_direct_message._session session2 = client2.transport.find_direct_message._session assert session1 != session2 + session1 = client1.transport.find_group_chats._session + session2 = client2.transport.find_group_chats._session + assert session1 != session2 session1 = client1.transport.create_membership._session session2 = client2.transport.create_membership._session assert session1 != session2 diff --git a/packages/google-cloud-bigquery-reservation/google/cloud/bigquery_reservation_v1/types/reservation.py b/packages/google-cloud-bigquery-reservation/google/cloud/bigquery_reservation_v1/types/reservation.py index 9829cd7325eb..1d7a98ed74e6 100644 --- a/packages/google-cloud-bigquery-reservation/google/cloud/bigquery_reservation_v1/types/reservation.py +++ b/packages/google-cloud-bigquery-reservation/google/cloud/bigquery_reservation_v1/types/reservation.py @@ -345,7 +345,7 @@ class ScalingMode(proto.Enum): with 200 baseline and 800 idle slots. 2. if there are 500 idle slots available in other reservations, the reservation will scale up to 700 slots - with 200 baseline and 300 idle slots. Please note, in + with 200 baseline and 500 idle slots. Please note, in this mode, the reservation might not be able to scale up to max_slots. @@ -1400,6 +1400,24 @@ class Assignment(proto.Message): scheduling policy specified on the reservation. This feature is not yet generally available. + principal (str): + Optional. Represents the principal for this assignment. If + not empty, jobs run by this principal will utilize the + associated reservation. Otherwise, jobs will fall back to + using the reservation assigned to the project, folder, or + organization (in that order). If no reservation is assigned + at any of these levels, on-demand capacity will be used. + + The supported formats are: + + - ``principal://goog/subject/USER_EMAIL_ADDRESS`` for users, + - ``principal://iam.googleapis.com/projects/-/serviceAccounts/SA_EMAIL_ADDRESS`` + for service accounts, + - ``principal://iam.googleapis.com/projects/PROJECT_NUMBER/locations/global/workloadIdentityPools/POOL_ID/subject/SUBJECT_ID`` + for workload identity pool identities. + - The special value ``unknown_or_deleted_user`` represents + principals which cannot be read from the user info + service, for example deleted users. """ class JobType(proto.Enum): @@ -1505,6 +1523,10 @@ class State(proto.Enum): number=11, message="SchedulingPolicy", ) + principal: str = proto.Field( + proto.STRING, + number=12, + ) class CreateAssignmentRequest(proto.Message): diff --git a/packages/google-cloud-bigquery-reservation/tests/unit/gapic/bigquery_reservation_v1/test_reservation_service.py b/packages/google-cloud-bigquery-reservation/tests/unit/gapic/bigquery_reservation_v1/test_reservation_service.py index 89f07427743f..8c6c6b5e5973 100644 --- a/packages/google-cloud-bigquery-reservation/tests/unit/gapic/bigquery_reservation_v1/test_reservation_service.py +++ b/packages/google-cloud-bigquery-reservation/tests/unit/gapic/bigquery_reservation_v1/test_reservation_service.py @@ -6482,6 +6482,7 @@ def test_create_assignment(request_type, transport: str = "grpc"): job_type=reservation.Assignment.JobType.PIPELINE, state=reservation.Assignment.State.PENDING, enable_gemini_in_bigquery=True, + principal="principal_value", ) response = client.create_assignment(request) @@ -6498,6 +6499,7 @@ def test_create_assignment(request_type, transport: str = "grpc"): assert response.job_type == reservation.Assignment.JobType.PIPELINE assert response.state == reservation.Assignment.State.PENDING assert response.enable_gemini_in_bigquery is True + assert response.principal == "principal_value" def test_create_assignment_non_empty_request_with_auto_populated_field(): @@ -6636,6 +6638,7 @@ async def test_create_assignment_async( job_type=reservation.Assignment.JobType.PIPELINE, state=reservation.Assignment.State.PENDING, enable_gemini_in_bigquery=True, + principal="principal_value", ) ) response = await client.create_assignment(request) @@ -6653,6 +6656,7 @@ async def test_create_assignment_async( assert response.job_type == reservation.Assignment.JobType.PIPELINE assert response.state == reservation.Assignment.State.PENDING assert response.enable_gemini_in_bigquery is True + assert response.principal == "principal_value" @pytest.mark.asyncio @@ -8811,6 +8815,7 @@ def test_move_assignment(request_type, transport: str = "grpc"): job_type=reservation.Assignment.JobType.PIPELINE, state=reservation.Assignment.State.PENDING, enable_gemini_in_bigquery=True, + principal="principal_value", ) response = client.move_assignment(request) @@ -8827,6 +8832,7 @@ def test_move_assignment(request_type, transport: str = "grpc"): assert response.job_type == reservation.Assignment.JobType.PIPELINE assert response.state == reservation.Assignment.State.PENDING assert response.enable_gemini_in_bigquery is True + assert response.principal == "principal_value" def test_move_assignment_non_empty_request_with_auto_populated_field(): @@ -8961,6 +8967,7 @@ async def test_move_assignment_async( job_type=reservation.Assignment.JobType.PIPELINE, state=reservation.Assignment.State.PENDING, enable_gemini_in_bigquery=True, + principal="principal_value", ) ) response = await client.move_assignment(request) @@ -8978,6 +8985,7 @@ async def test_move_assignment_async( assert response.job_type == reservation.Assignment.JobType.PIPELINE assert response.state == reservation.Assignment.State.PENDING assert response.enable_gemini_in_bigquery is True + assert response.principal == "principal_value" @pytest.mark.asyncio @@ -9166,6 +9174,7 @@ def test_update_assignment(request_type, transport: str = "grpc"): job_type=reservation.Assignment.JobType.PIPELINE, state=reservation.Assignment.State.PENDING, enable_gemini_in_bigquery=True, + principal="principal_value", ) response = client.update_assignment(request) @@ -9182,6 +9191,7 @@ def test_update_assignment(request_type, transport: str = "grpc"): assert response.job_type == reservation.Assignment.JobType.PIPELINE assert response.state == reservation.Assignment.State.PENDING assert response.enable_gemini_in_bigquery is True + assert response.principal == "principal_value" def test_update_assignment_non_empty_request_with_auto_populated_field(): @@ -9314,6 +9324,7 @@ async def test_update_assignment_async( job_type=reservation.Assignment.JobType.PIPELINE, state=reservation.Assignment.State.PENDING, enable_gemini_in_bigquery=True, + principal="principal_value", ) ) response = await client.update_assignment(request) @@ -9331,6 +9342,7 @@ async def test_update_assignment_async( assert response.job_type == reservation.Assignment.JobType.PIPELINE assert response.state == reservation.Assignment.State.PENDING assert response.enable_gemini_in_bigquery is True + assert response.principal == "principal_value" @pytest.mark.asyncio @@ -19127,6 +19139,7 @@ async def test_create_assignment_empty_call_grpc_asyncio(): job_type=reservation.Assignment.JobType.PIPELINE, state=reservation.Assignment.State.PENDING, enable_gemini_in_bigquery=True, + principal="principal_value", ) ) await client.create_assignment(request=None) @@ -19268,6 +19281,7 @@ async def test_move_assignment_empty_call_grpc_asyncio(): job_type=reservation.Assignment.JobType.PIPELINE, state=reservation.Assignment.State.PENDING, enable_gemini_in_bigquery=True, + principal="principal_value", ) ) await client.move_assignment(request=None) @@ -19301,6 +19315,7 @@ async def test_update_assignment_empty_call_grpc_asyncio(): job_type=reservation.Assignment.JobType.PIPELINE, state=reservation.Assignment.State.PENDING, enable_gemini_in_bigquery=True, + principal="principal_value", ) ) await client.update_assignment(request=None) @@ -21880,6 +21895,7 @@ def test_create_assignment_rest_call_success(request_type): "state": 1, "enable_gemini_in_bigquery": True, "scheduling_policy": {"concurrency": 1195, "max_slots": 986}, + "principal": "principal_value", } # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency @@ -21959,6 +21975,7 @@ def get_message_fields(field): job_type=reservation.Assignment.JobType.PIPELINE, state=reservation.Assignment.State.PENDING, enable_gemini_in_bigquery=True, + principal="principal_value", ) # Wrap the value into a proper Response obj @@ -21980,6 +21997,7 @@ def get_message_fields(field): assert response.job_type == reservation.Assignment.JobType.PIPELINE assert response.state == reservation.Assignment.State.PENDING assert response.enable_gemini_in_bigquery is True + assert response.principal == "principal_value" @pytest.mark.parametrize("null_interceptor", [True, False]) @@ -22624,6 +22642,7 @@ def test_move_assignment_rest_call_success(request_type): job_type=reservation.Assignment.JobType.PIPELINE, state=reservation.Assignment.State.PENDING, enable_gemini_in_bigquery=True, + principal="principal_value", ) # Wrap the value into a proper Response obj @@ -22645,6 +22664,7 @@ def test_move_assignment_rest_call_success(request_type): assert response.job_type == reservation.Assignment.JobType.PIPELINE assert response.state == reservation.Assignment.State.PENDING assert response.enable_gemini_in_bigquery is True + assert response.principal == "principal_value" @pytest.mark.parametrize("null_interceptor", [True, False]) @@ -22767,6 +22787,7 @@ def test_update_assignment_rest_call_success(request_type): "state": 1, "enable_gemini_in_bigquery": True, "scheduling_policy": {"concurrency": 1195, "max_slots": 986}, + "principal": "principal_value", } # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency @@ -22846,6 +22867,7 @@ def get_message_fields(field): job_type=reservation.Assignment.JobType.PIPELINE, state=reservation.Assignment.State.PENDING, enable_gemini_in_bigquery=True, + principal="principal_value", ) # Wrap the value into a proper Response obj @@ -22867,6 +22889,7 @@ def get_message_fields(field): assert response.job_type == reservation.Assignment.JobType.PIPELINE assert response.state == reservation.Assignment.State.PENDING assert response.enable_gemini_in_bigquery is True + assert response.principal == "principal_value" @pytest.mark.parametrize("null_interceptor", [True, False]) diff --git a/packages/google-cloud-ces/google/cloud/ces_v1beta/__init__.py b/packages/google-cloud-ces/google/cloud/ces_v1beta/__init__.py index 298a0af135e2..e03a85287ef6 100644 --- a/packages/google-cloud-ces/google/cloud/ces_v1beta/__init__.py +++ b/packages/google-cloud-ces/google/cloud/ces_v1beta/__init__.py @@ -192,6 +192,11 @@ DeleteEvaluationRunOperationMetadata, DeleteEvaluationRunRequest, DeleteScheduledEvaluationRunRequest, + ExportEvaluationResultsResponse, + ExportEvaluationRunsResponse, + ExportEvaluationsRequest, + ExportEvaluationsResponse, + ExportOptions, GenerateEvaluationOperationMetadata, GenerateEvaluationRequest, GetEvaluationDatasetRequest, @@ -244,6 +249,7 @@ from .types.guardrail import Guardrail from .types.mcp_tool import McpTool from .types.mcp_toolset import McpToolset +from .types.mocks import MockedToolCall from .types.omnichannel import Omnichannel, OmnichannelIntegrationConfig from .types.omnichannel_service import OmnichannelOperationMetadata from .types.open_api_tool import OpenApiTool @@ -262,6 +268,7 @@ GoAway, InputAudioConfig, InterruptionSignal, + MockConfig, OutputAudioConfig, RecognitionResult, RunSessionRequest, @@ -478,6 +485,11 @@ def _get_version(dependency_name): "ExecutionType", "ExportAppRequest", "ExportAppResponse", + "ExportEvaluationResultsResponse", + "ExportEvaluationRunsResponse", + "ExportEvaluationsRequest", + "ExportEvaluationsResponse", + "ExportOptions", "ExpressionCondition", "FileContext", "FileSearchTool", @@ -557,6 +569,8 @@ def _get_version(dependency_name): "McpToolset", "Message", "MetricAnalysisSettings", + "MockConfig", + "MockedToolCall", "ModelSettings", "OAuthConfig", "Omnichannel", diff --git a/packages/google-cloud-ces/google/cloud/ces_v1beta/gapic_metadata.json b/packages/google-cloud-ces/google/cloud/ces_v1beta/gapic_metadata.json index 7d57975e2ee0..dafb8cabd826 100644 --- a/packages/google-cloud-ces/google/cloud/ces_v1beta/gapic_metadata.json +++ b/packages/google-cloud-ces/google/cloud/ces_v1beta/gapic_metadata.json @@ -844,6 +844,11 @@ "delete_scheduled_evaluation_run" ] }, + "ExportEvaluations": { + "methods": [ + "export_evaluations" + ] + }, "GenerateEvaluation": { "methods": [ "generate_evaluation" @@ -1004,6 +1009,11 @@ "delete_scheduled_evaluation_run" ] }, + "ExportEvaluations": { + "methods": [ + "export_evaluations" + ] + }, "GenerateEvaluation": { "methods": [ "generate_evaluation" @@ -1164,6 +1174,11 @@ "delete_scheduled_evaluation_run" ] }, + "ExportEvaluations": { + "methods": [ + "export_evaluations" + ] + }, "GenerateEvaluation": { "methods": [ "generate_evaluation" diff --git a/packages/google-cloud-ces/google/cloud/ces_v1beta/services/evaluation_service/async_client.py b/packages/google-cloud-ces/google/cloud/ces_v1beta/services/evaluation_service/async_client.py index 8496fbb8d3e3..03031e819a7f 100644 --- a/packages/google-cloud-ces/google/cloud/ces_v1beta/services/evaluation_service/async_client.py +++ b/packages/google-cloud-ces/google/cloud/ces_v1beta/services/evaluation_service/async_client.py @@ -56,6 +56,7 @@ from google.cloud.ces_v1beta.services.evaluation_service import pagers from google.cloud.ces_v1beta.types import ( + agent_service, app, evaluation, evaluation_service, @@ -4208,6 +4209,135 @@ async def sample_test_persona_voice(): # Done; return the response. return response + async def export_evaluations( + self, + request: Optional[ + Union[evaluation_service.ExportEvaluationsRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation_async.AsyncOperation: + r"""Exports evaluations. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import ces_v1beta + + async def sample_export_evaluations(): + # Create a client + client = ces_v1beta.EvaluationServiceAsyncClient() + + # Initialize request argument(s) + request = ces_v1beta.ExportEvaluationsRequest( + parent="parent_value", + names=['names_value1', 'names_value2'], + ) + + # Make the request + operation = client.export_evaluations(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.ces_v1beta.types.ExportEvaluationsRequest, dict]]): + The request object. Request message for + [EvaluationService.ExportEvaluations][google.cloud.ces.v1beta.EvaluationService.ExportEvaluations]. + parent (:class:`str`): + Required. The resource name of the app to export + evaluations from. Format: + ``projects/{project}/locations/{location}/apps/{app}`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.ces_v1beta.types.ExportEvaluationsResponse` Response message for + [EvaluationService.ExportEvaluations][google.cloud.ces.v1beta.EvaluationService.ExportEvaluations]. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, evaluation_service.ExportEvaluationsRequest): + request = evaluation_service.ExportEvaluationsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.export_evaluations + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + evaluation_service.ExportEvaluationsResponse, + metadata_type=agent_service.OperationMetadata, + ) + + # Done; return the response. + return response + async def list_operations( self, request: Optional[Union[operations_pb2.ListOperationsRequest, dict]] = None, diff --git a/packages/google-cloud-ces/google/cloud/ces_v1beta/services/evaluation_service/client.py b/packages/google-cloud-ces/google/cloud/ces_v1beta/services/evaluation_service/client.py index d68dc97dfd82..a2eeb11c69aa 100644 --- a/packages/google-cloud-ces/google/cloud/ces_v1beta/services/evaluation_service/client.py +++ b/packages/google-cloud-ces/google/cloud/ces_v1beta/services/evaluation_service/client.py @@ -73,6 +73,7 @@ from google.cloud.ces_v1beta.services.evaluation_service import pagers from google.cloud.ces_v1beta.types import ( + agent_service, app, evaluation, evaluation_service, @@ -4844,6 +4845,132 @@ def sample_test_persona_voice(): # Done; return the response. return response + def export_evaluations( + self, + request: Optional[ + Union[evaluation_service.ExportEvaluationsRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation.Operation: + r"""Exports evaluations. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import ces_v1beta + + def sample_export_evaluations(): + # Create a client + client = ces_v1beta.EvaluationServiceClient() + + # Initialize request argument(s) + request = ces_v1beta.ExportEvaluationsRequest( + parent="parent_value", + names=['names_value1', 'names_value2'], + ) + + # Make the request + operation = client.export_evaluations(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.ces_v1beta.types.ExportEvaluationsRequest, dict]): + The request object. Request message for + [EvaluationService.ExportEvaluations][google.cloud.ces.v1beta.EvaluationService.ExportEvaluations]. + parent (str): + Required. The resource name of the app to export + evaluations from. Format: + ``projects/{project}/locations/{location}/apps/{app}`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.ces_v1beta.types.ExportEvaluationsResponse` Response message for + [EvaluationService.ExportEvaluations][google.cloud.ces.v1beta.EvaluationService.ExportEvaluations]. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, evaluation_service.ExportEvaluationsRequest): + request = evaluation_service.ExportEvaluationsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.export_evaluations] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + evaluation_service.ExportEvaluationsResponse, + metadata_type=agent_service.OperationMetadata, + ) + + # Done; return the response. + return response + def __enter__(self) -> "EvaluationServiceClient": return self diff --git a/packages/google-cloud-ces/google/cloud/ces_v1beta/services/evaluation_service/transports/base.py b/packages/google-cloud-ces/google/cloud/ces_v1beta/services/evaluation_service/transports/base.py index e4a4f01a4f28..6d7a98feba51 100644 --- a/packages/google-cloud-ces/google/cloud/ces_v1beta/services/evaluation_service/transports/base.py +++ b/packages/google-cloud-ces/google/cloud/ces_v1beta/services/evaluation_service/transports/base.py @@ -304,6 +304,11 @@ def _prep_wrapped_messages(self, client_info): default_timeout=None, client_info=client_info, ), + self.export_evaluations: gapic_v1.method.wrap_method( + self.export_evaluations, + default_timeout=None, + client_info=client_info, + ), self.get_location: gapic_v1.method.wrap_method( self.get_location, default_timeout=None, @@ -671,6 +676,15 @@ def test_persona_voice( ]: raise NotImplementedError() + @property + def export_evaluations( + self, + ) -> Callable[ + [evaluation_service.ExportEvaluationsRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + @property def list_operations( self, diff --git a/packages/google-cloud-ces/google/cloud/ces_v1beta/services/evaluation_service/transports/grpc.py b/packages/google-cloud-ces/google/cloud/ces_v1beta/services/evaluation_service/transports/grpc.py index 9036f70b23db..fd50ebbad95c 100644 --- a/packages/google-cloud-ces/google/cloud/ces_v1beta/services/evaluation_service/transports/grpc.py +++ b/packages/google-cloud-ces/google/cloud/ces_v1beta/services/evaluation_service/transports/grpc.py @@ -1249,6 +1249,34 @@ def test_persona_voice( ) return self._stubs["test_persona_voice"] + @property + def export_evaluations( + self, + ) -> Callable[ + [evaluation_service.ExportEvaluationsRequest], operations_pb2.Operation + ]: + r"""Return a callable for the export evaluations method over gRPC. + + Exports evaluations. + + Returns: + Callable[[~.ExportEvaluationsRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "export_evaluations" not in self._stubs: + self._stubs["export_evaluations"] = self._logged_channel.unary_unary( + "/google.cloud.ces.v1beta.EvaluationService/ExportEvaluations", + request_serializer=evaluation_service.ExportEvaluationsRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["export_evaluations"] + def close(self): self._logged_channel.close() diff --git a/packages/google-cloud-ces/google/cloud/ces_v1beta/services/evaluation_service/transports/grpc_asyncio.py b/packages/google-cloud-ces/google/cloud/ces_v1beta/services/evaluation_service/transports/grpc_asyncio.py index d95e7d7f3f43..0314620e018b 100644 --- a/packages/google-cloud-ces/google/cloud/ces_v1beta/services/evaluation_service/transports/grpc_asyncio.py +++ b/packages/google-cloud-ces/google/cloud/ces_v1beta/services/evaluation_service/transports/grpc_asyncio.py @@ -1277,6 +1277,35 @@ def test_persona_voice( ) return self._stubs["test_persona_voice"] + @property + def export_evaluations( + self, + ) -> Callable[ + [evaluation_service.ExportEvaluationsRequest], + Awaitable[operations_pb2.Operation], + ]: + r"""Return a callable for the export evaluations method over gRPC. + + Exports evaluations. + + Returns: + Callable[[~.ExportEvaluationsRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "export_evaluations" not in self._stubs: + self._stubs["export_evaluations"] = self._logged_channel.unary_unary( + "/google.cloud.ces.v1beta.EvaluationService/ExportEvaluations", + request_serializer=evaluation_service.ExportEvaluationsRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["export_evaluations"] + def _prep_wrapped_messages(self, client_info): """Precompute the wrapped methods, overriding the base class method to use async wrappers.""" self._wrapped_methods = { @@ -1435,6 +1464,11 @@ def _prep_wrapped_messages(self, client_info): default_timeout=None, client_info=client_info, ), + self.export_evaluations: self._wrap_method( + self.export_evaluations, + default_timeout=None, + client_info=client_info, + ), self.get_location: self._wrap_method( self.get_location, default_timeout=None, diff --git a/packages/google-cloud-ces/google/cloud/ces_v1beta/services/evaluation_service/transports/rest.py b/packages/google-cloud-ces/google/cloud/ces_v1beta/services/evaluation_service/transports/rest.py index a973256e1e2d..eb770a26cc94 100644 --- a/packages/google-cloud-ces/google/cloud/ces_v1beta/services/evaluation_service/transports/rest.py +++ b/packages/google-cloud-ces/google/cloud/ces_v1beta/services/evaluation_service/transports/rest.py @@ -136,6 +136,14 @@ def pre_delete_scheduled_evaluation_run(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata + def pre_export_evaluations(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_export_evaluations(self, response): + logging.log(f"Received response: {response}") + return response + def pre_generate_evaluation(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -634,6 +642,55 @@ def pre_delete_scheduled_evaluation_run( """ return request, metadata + def pre_export_evaluations( + self, + request: evaluation_service.ExportEvaluationsRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + evaluation_service.ExportEvaluationsRequest, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Pre-rpc interceptor for export_evaluations + + Override in a subclass to manipulate the request or metadata + before they are sent to the EvaluationService server. + """ + return request, metadata + + def post_export_evaluations( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for export_evaluations + + DEPRECATED. Please use the `post_export_evaluations_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the EvaluationService server but before + it is returned to user code. This `post_export_evaluations` interceptor runs + before the `post_export_evaluations_with_metadata` interceptor. + """ + return response + + def post_export_evaluations_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for export_evaluations + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the EvaluationService server but before it is returned to user code. + + We recommend only using this `post_export_evaluations_with_metadata` + interceptor in new development instead of the `post_export_evaluations` interceptor. + When both interceptors are used, this `post_export_evaluations_with_metadata` interceptor runs after the + `post_export_evaluations` interceptor. The (possibly modified) response returned by + `post_export_evaluations` will be passed to + `post_export_evaluations_with_metadata`. + """ + return response, metadata + def pre_generate_evaluation( self, request: evaluation_service.GenerateEvaluationRequest, @@ -3313,6 +3370,159 @@ def __call__( if response.status_code >= 400: raise core_exceptions.from_http_response(response) + class _ExportEvaluations( + _BaseEvaluationServiceRestTransport._BaseExportEvaluations, + EvaluationServiceRestStub, + ): + def __hash__(self): + return hash("EvaluationServiceRestTransport.ExportEvaluations") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__( + self, + request: evaluation_service.ExportEvaluationsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.Operation: + r"""Call the export evaluations method over HTTP. + + Args: + request (~.evaluation_service.ExportEvaluationsRequest): + The request object. Request message for + [EvaluationService.ExportEvaluations][google.cloud.ces.v1beta.EvaluationService.ExportEvaluations]. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options = _BaseEvaluationServiceRestTransport._BaseExportEvaluations._get_http_options() + + request, metadata = self._interceptor.pre_export_evaluations( + request, metadata + ) + transcoded_request = _BaseEvaluationServiceRestTransport._BaseExportEvaluations._get_transcoded_request( + http_options, request + ) + + body = _BaseEvaluationServiceRestTransport._BaseExportEvaluations._get_request_body_json( + transcoded_request + ) + + # Jsonify the query params + query_params = _BaseEvaluationServiceRestTransport._BaseExportEvaluations._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.ces_v1beta.EvaluationServiceClient.ExportEvaluations", + extra={ + "serviceName": "google.cloud.ces.v1beta.EvaluationService", + "rpcName": "ExportEvaluations", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = EvaluationServiceRestTransport._ExportEvaluations._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_export_evaluations(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_export_evaluations_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.ces_v1beta.EvaluationServiceClient.export_evaluations", + extra={ + "serviceName": "google.cloud.ces.v1beta.EvaluationService", + "rpcName": "ExportEvaluations", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + class _GenerateEvaluation( _BaseEvaluationServiceRestTransport._BaseGenerateEvaluation, EvaluationServiceRestStub, @@ -6646,6 +6856,16 @@ def delete_scheduled_evaluation_run( self._session, self._host, self._interceptor ) # type: ignore + @property + def export_evaluations( + self, + ) -> Callable[ + [evaluation_service.ExportEvaluationsRequest], operations_pb2.Operation + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ExportEvaluations(self._session, self._host, self._interceptor) # type: ignore + @property def generate_evaluation( self, diff --git a/packages/google-cloud-ces/google/cloud/ces_v1beta/services/evaluation_service/transports/rest_base.py b/packages/google-cloud-ces/google/cloud/ces_v1beta/services/evaluation_service/transports/rest_base.py index 8a943c7d7c99..dcaeff2df74c 100644 --- a/packages/google-cloud-ces/google/cloud/ces_v1beta/services/evaluation_service/transports/rest_base.py +++ b/packages/google-cloud-ces/google/cloud/ces_v1beta/services/evaluation_service/transports/rest_base.py @@ -609,6 +609,63 @@ def _get_query_params_json(transcoded_request): query_params["$alt"] = "json;enum-encoding=int" return query_params + class _BaseExportEvaluations: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1beta/{parent=projects/*/locations/*/apps/*}/evaluations:export", + "body": "*", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = evaluation_service.ExportEvaluationsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + return body + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseEvaluationServiceRestTransport._BaseExportEvaluations._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + class _BaseGenerateEvaluation: def __hash__(self): # pragma: NO COVER return NotImplementedError("__hash__ must be implemented.") diff --git a/packages/google-cloud-ces/google/cloud/ces_v1beta/types/__init__.py b/packages/google-cloud-ces/google/cloud/ces_v1beta/types/__init__.py index 8af16f3ccece..4d90d1c26ce9 100644 --- a/packages/google-cloud-ces/google/cloud/ces_v1beta/types/__init__.py +++ b/packages/google-cloud-ces/google/cloud/ces_v1beta/types/__init__.py @@ -194,6 +194,11 @@ DeleteEvaluationRunOperationMetadata, DeleteEvaluationRunRequest, DeleteScheduledEvaluationRunRequest, + ExportEvaluationResultsResponse, + ExportEvaluationRunsResponse, + ExportEvaluationsRequest, + ExportEvaluationsResponse, + ExportOptions, GenerateEvaluationOperationMetadata, GenerateEvaluationRequest, GetEvaluationDatasetRequest, @@ -264,6 +269,9 @@ from .mcp_toolset import ( McpToolset, ) +from .mocks import ( + MockedToolCall, +) from .omnichannel import ( Omnichannel, OmnichannelIntegrationConfig, @@ -301,6 +309,7 @@ GoAway, InputAudioConfig, InterruptionSignal, + MockConfig, OutputAudioConfig, RecognitionResult, RunSessionRequest, @@ -484,6 +493,11 @@ "DeleteEvaluationRunOperationMetadata", "DeleteEvaluationRunRequest", "DeleteScheduledEvaluationRunRequest", + "ExportEvaluationResultsResponse", + "ExportEvaluationRunsResponse", + "ExportEvaluationsRequest", + "ExportEvaluationsResponse", + "ExportOptions", "GenerateEvaluationOperationMetadata", "GenerateEvaluationRequest", "GetEvaluationDatasetRequest", @@ -535,6 +549,7 @@ "Guardrail", "McpTool", "McpToolset", + "MockedToolCall", "Omnichannel", "OmnichannelIntegrationConfig", "OmnichannelOperationMetadata", @@ -554,6 +569,7 @@ "GoAway", "InputAudioConfig", "InterruptionSignal", + "MockConfig", "OutputAudioConfig", "RecognitionResult", "RunSessionRequest", diff --git a/packages/google-cloud-ces/google/cloud/ces_v1beta/types/app.py b/packages/google-cloud-ces/google/cloud/ces_v1beta/types/app.py index e34cfcd6f430..8c7622f3b854 100644 --- a/packages/google-cloud-ces/google/cloud/ces_v1beta/types/app.py +++ b/packages/google-cloud-ces/google/cloud/ces_v1beta/types/app.py @@ -719,6 +719,13 @@ class ErrorHandlingSettings(proto.Message): error_handling_strategy (google.cloud.ces_v1beta.types.ErrorHandlingSettings.ErrorHandlingStrategy): Optional. The strategy to use for error handling. + fallback_response_config (google.cloud.ces_v1beta.types.ErrorHandlingSettings.FallbackResponseConfig): + Optional. Configuration for handling fallback + responses. + end_session_config (google.cloud.ces_v1beta.types.ErrorHandlingSettings.EndSessionConfig): + Optional. Configuration for ending the + session in case of system errors (e.g. LLM + errors). """ class ErrorHandlingStrategy(proto.Enum): @@ -742,11 +749,71 @@ class ErrorHandlingStrategy(proto.Enum): FALLBACK_RESPONSE = 2 END_SESSION = 3 + class FallbackResponseConfig(proto.Message): + r"""Configuration for handling fallback responses. + + Attributes: + custom_fallback_messages (MutableMapping[str, str]): + Optional. The fallback messages in case of system errors + (e.g. LLM errors), mapped by `supported language + code `__. + max_fallback_attempts (int): + Optional. The maximum number of fallback attempts to make + before the agent emitting + [EndSession][google.cloud.ces.v1beta.EndSession] Signal. + """ + + custom_fallback_messages: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=1, + ) + max_fallback_attempts: int = proto.Field( + proto.INT32, + number=2, + ) + + class EndSessionConfig(proto.Message): + r"""Configuration for ending the session in case of system errors + (e.g. LLM errors). + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + escalate_session (bool): + Optional. Whether to escalate the session in + [EndSession][google.cloud.ces.v1beta.EndSession]. If session + is escalated, [metadata in + EndSession][google.cloud.ces.v1beta.EndSession.metadata] + will contain ``session_escalated = true``. See + https://docs.cloud.google.com/customer-engagement-ai/conversational-agents/ps/deploy/google-telephony-platform#transfer_a_call_to_a_human_agent + for details. + + This field is a member of `oneof`_ ``_escalate_session``. + """ + + escalate_session: bool = proto.Field( + proto.BOOL, + number=1, + optional=True, + ) + error_handling_strategy: ErrorHandlingStrategy = proto.Field( proto.ENUM, number=1, enum=ErrorHandlingStrategy, ) + fallback_response_config: FallbackResponseConfig = proto.Field( + proto.MESSAGE, + number=2, + message=FallbackResponseConfig, + ) + end_session_config: EndSessionConfig = proto.Field( + proto.MESSAGE, + number=3, + message=EndSessionConfig, + ) class EvaluationMetricsThresholds(proto.Message): @@ -1062,12 +1129,21 @@ class ConversationLoggingSettings(proto.Message): disable_conversation_logging (bool): Optional. Whether to disable conversation logging for the sessions. + retention_window (google.protobuf.duration_pb2.Duration): + Optional. Controls the retention window for + the conversation. If not set, the conversation + will be retained for 365 days. """ disable_conversation_logging: bool = proto.Field( proto.BOOL, number=1, ) + retention_window: duration_pb2.Duration = proto.Field( + proto.MESSAGE, + number=2, + message=duration_pb2.Duration, + ) class CloudLoggingSettings(proto.Message): diff --git a/packages/google-cloud-ces/google/cloud/ces_v1beta/types/evaluation.py b/packages/google-cloud-ces/google/cloud/ces_v1beta/types/evaluation.py index 30d0a3f2876d..c06a437dce47 100644 --- a/packages/google-cloud-ces/google/cloud/ces_v1beta/types/evaluation.py +++ b/packages/google-cloud-ces/google/cloud/ces_v1beta/types/evaluation.py @@ -1039,11 +1039,14 @@ class Outcome(proto.Enum): Evaluation/Expectation failed. In the case of an evaluation, this means that at least one expectation was not met. + SKIPPED (3): + Evaluation/Expectation was skipped. """ OUTCOME_UNSPECIFIED = 0 PASS = 1 FAIL = 2 + SKIPPED = 3 class ExecutionState(proto.Enum): r"""The state of the evaluation result execution. diff --git a/packages/google-cloud-ces/google/cloud/ces_v1beta/types/evaluation_service.py b/packages/google-cloud-ces/google/cloud/ces_v1beta/types/evaluation_service.py index c0f1886a3c8f..7bc17db3f93f 100644 --- a/packages/google-cloud-ces/google/cloud/ces_v1beta/types/evaluation_service.py +++ b/packages/google-cloud-ces/google/cloud/ces_v1beta/types/evaluation_service.py @@ -72,6 +72,11 @@ "GetEvaluationExpectationRequest", "ListEvaluationExpectationsRequest", "ListEvaluationExpectationsResponse", + "ExportOptions", + "ExportEvaluationsRequest", + "ExportEvaluationsResponse", + "ExportEvaluationResultsResponse", + "ExportEvaluationRunsResponse", }, ) @@ -349,12 +354,27 @@ class ImportEvaluationsResponse(proto.Message): evaluations (MutableSequence[google.cloud.ces_v1beta.types.Evaluation]): The list of evaluations that were imported into the app. + evaluation_results (MutableSequence[google.cloud.ces_v1beta.types.EvaluationResult]): + The list of evaluation results that were + imported into the app. + evaluation_runs (MutableSequence[google.cloud.ces_v1beta.types.EvaluationRun]): + The list of evaluation runs that were + imported into the app. error_messages (MutableSequence[str]): Optional. A list of error messages associated with evaluations that failed to be imported. import_failure_count (int): - The number of evaluations that were not - imported due to errors. + The number of evaluations that either failed + to import entirely or completed import with one + or more errors. + evaluation_result_import_failure_count (int): + The number of evaluation results that either + failed to import entirely or completed import + with one or more errors. + evaluation_run_import_failure_count (int): + The number of evaluation runs that either + failed to import entirely or completed import + with one or more errors. """ evaluations: MutableSequence[gcc_evaluation.Evaluation] = proto.RepeatedField( @@ -362,6 +382,20 @@ class ImportEvaluationsResponse(proto.Message): number=1, message=gcc_evaluation.Evaluation, ) + evaluation_results: MutableSequence[gcc_evaluation.EvaluationResult] = ( + proto.RepeatedField( + proto.MESSAGE, + number=4, + message=gcc_evaluation.EvaluationResult, + ) + ) + evaluation_runs: MutableSequence[gcc_evaluation.EvaluationRun] = ( + proto.RepeatedField( + proto.MESSAGE, + number=5, + message=gcc_evaluation.EvaluationRun, + ) + ) error_messages: MutableSequence[str] = proto.RepeatedField( proto.STRING, number=2, @@ -370,6 +404,14 @@ class ImportEvaluationsResponse(proto.Message): proto.INT32, number=3, ) + evaluation_result_import_failure_count: int = proto.Field( + proto.INT32, + number=6, + ) + evaluation_run_import_failure_count: int = proto.Field( + proto.INT32, + number=7, + ) class ImportEvaluationsOperationMetadata(proto.Message): @@ -1501,4 +1543,212 @@ def raw_page(self): ) +class ExportOptions(proto.Message): + r"""Options for exporting CES evaluation resources. + + Attributes: + export_format (google.cloud.ces_v1beta.types.ExportOptions.ExportFormat): + Optional. The format to export the evaluation + results in. Defaults to JSON if not specified. + gcs_uri (str): + Optional. The Google Cloud Storage URI to + write the exported Evaluation Results to. + """ + + class ExportFormat(proto.Enum): + r"""The format to export the items in. Defaults to JSON if not + specified. + + Values: + EXPORT_FORMAT_UNSPECIFIED (0): + Unspecified format. + JSON (1): + JSON format. + YAML (2): + YAML format. + """ + + EXPORT_FORMAT_UNSPECIFIED = 0 + JSON = 1 + YAML = 2 + + export_format: ExportFormat = proto.Field( + proto.ENUM, + number=1, + enum=ExportFormat, + ) + gcs_uri: str = proto.Field( + proto.STRING, + number=2, + ) + + +class ExportEvaluationsRequest(proto.Message): + r"""Request message for + [EvaluationService.ExportEvaluations][google.cloud.ces.v1beta.EvaluationService.ExportEvaluations]. + + Attributes: + parent (str): + Required. The resource name of the app to export evaluations + from. Format: + ``projects/{project}/locations/{location}/apps/{app}`` + names (MutableSequence[str]): + Required. The resource names of the + evaluations to export. + export_options (google.cloud.ces_v1beta.types.ExportOptions): + Optional. The export options for the + evaluations. + include_evaluation_results (bool): + Optional. Includes evaluation results in the export. At + least one of include_evaluation_results or + include_evaluations must be set. + include_evaluations (bool): + Optional. Includes evaluations in the export. At least one + of include_evaluation_results or include_evaluations must be + set. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + names: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=2, + ) + export_options: "ExportOptions" = proto.Field( + proto.MESSAGE, + number=3, + message="ExportOptions", + ) + include_evaluation_results: bool = proto.Field( + proto.BOOL, + number=4, + ) + include_evaluations: bool = proto.Field( + proto.BOOL, + number=5, + ) + + +class ExportEvaluationsResponse(proto.Message): + r"""Response message for + [EvaluationService.ExportEvaluations][google.cloud.ces.v1beta.EvaluationService.ExportEvaluations]. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + evaluations_content (bytes): + The content of the exported Evaluations. This will be + populated if gcs_uri was not specified in the request. + + This field is a member of `oneof`_ ``evaluations``. + evaluations_uri (str): + The Google Cloud Storage URI folder where the exported + evaluations were written. This will be populated if gcs_uri + was specified in the request. + + This field is a member of `oneof`_ ``evaluations``. + failed_evaluations (MutableMapping[str, str]): + Output only. A map of evaluation resource + names that could not be exported, to the reason + why they failed. + """ + + evaluations_content: bytes = proto.Field( + proto.BYTES, + number=1, + oneof="evaluations", + ) + evaluations_uri: str = proto.Field( + proto.STRING, + number=2, + oneof="evaluations", + ) + failed_evaluations: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=3, + ) + + +class ExportEvaluationResultsResponse(proto.Message): + r"""Response message for + [EvaluationService.ExportEvaluationResults][google.cloud.ces.v1beta.EvaluationService.ExportEvaluationResults]. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + evaluation_results_content (bytes): + The content of the exported Evaluation Results. This will be + populated if gcs_uri was not specified in the request. + + This field is a member of `oneof`_ ``evaluation_results``. + evaluation_results_uri (str): + The Google Cloud Storage URI folder where the exported + Evaluation Results were written. This will be populated if + gcs_uri was specified in the request. + + This field is a member of `oneof`_ ``evaluation_results``. + """ + + evaluation_results_content: bytes = proto.Field( + proto.BYTES, + number=1, + oneof="evaluation_results", + ) + evaluation_results_uri: str = proto.Field( + proto.STRING, + number=2, + oneof="evaluation_results", + ) + + +class ExportEvaluationRunsResponse(proto.Message): + r"""Response message for + [EvaluationService.ExportEvaluationRuns][google.cloud.ces.v1beta.EvaluationService.ExportEvaluationRuns]. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + evaluation_runs_content (bytes): + The content of the exported Evaluation Runs. This will be + populated if gcs_uri was not specified in the request. + + This field is a member of `oneof`_ ``evaluation_runs``. + evaluation_runs_uri (str): + The Google Cloud Storage URI folder where the exported + Evaluation Runs were written. This will be populated if + gcs_uri was specified in the request. + + This field is a member of `oneof`_ ``evaluation_runs``. + """ + + evaluation_runs_content: bytes = proto.Field( + proto.BYTES, + number=1, + oneof="evaluation_runs", + ) + evaluation_runs_uri: str = proto.Field( + proto.STRING, + number=2, + oneof="evaluation_runs", + ) + + __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-ces/google/cloud/ces_v1beta/types/mocks.py b/packages/google-cloud-ces/google/cloud/ces_v1beta/types/mocks.py new file mode 100644 index 000000000000..24eefe24ad35 --- /dev/null +++ b/packages/google-cloud-ces/google/cloud/ces_v1beta/types/mocks.py @@ -0,0 +1,97 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import google.protobuf.struct_pb2 as struct_pb2 # type: ignore +import proto # type: ignore + +from google.cloud.ces_v1beta.types import toolset_tool + +__protobuf__ = proto.module( + package="google.cloud.ces.v1beta", + manifest={ + "MockedToolCall", + }, +) + + +class MockedToolCall(proto.Message): + r"""A mocked tool call. + + Expresses the target tool + a pattern to match against that + tool's args / inputs. If the pattern matches, then the mock + response will be returned. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + tool_id (str): + Optional. The name of the tool to mock. Format: + ``projects/{project}/locations/{location}/apps/{app}/tools/{tool}`` + + This field is a member of `oneof`_ ``tool_identifier``. + toolset (google.cloud.ces_v1beta.types.ToolsetTool): + Optional. The toolset to mock. + + This field is a member of `oneof`_ ``tool_identifier``. + tool (str): + Optional. Deprecated. Use tool_identifier instead. + expected_args_pattern (google.protobuf.struct_pb2.Struct): + Required. A pattern to match against the args + / inputs of all dispatched tool calls. If the + tool call inputs match this pattern, then mock + output will be returned. + mock_response (google.protobuf.struct_pb2.Struct): + Optional. The mock response / output to + return if the tool call args / inputs match the + pattern. + """ + + tool_id: str = proto.Field( + proto.STRING, + number=4, + oneof="tool_identifier", + ) + toolset: toolset_tool.ToolsetTool = proto.Field( + proto.MESSAGE, + number=5, + oneof="tool_identifier", + message=toolset_tool.ToolsetTool, + ) + tool: str = proto.Field( + proto.STRING, + number=1, + ) + expected_args_pattern: struct_pb2.Struct = proto.Field( + proto.MESSAGE, + number=2, + message=struct_pb2.Struct, + ) + mock_response: struct_pb2.Struct = proto.Field( + proto.MESSAGE, + number=3, + message=struct_pb2.Struct, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-ces/google/cloud/ces_v1beta/types/session_service.py b/packages/google-cloud-ces/google/cloud/ces_v1beta/types/session_service.py index cafb2af9b69a..5f9c86f1aaa1 100644 --- a/packages/google-cloud-ces/google/cloud/ces_v1beta/types/session_service.py +++ b/packages/google-cloud-ces/google/cloud/ces_v1beta/types/session_service.py @@ -20,12 +20,13 @@ import google.protobuf.struct_pb2 as struct_pb2 # type: ignore import proto # type: ignore -from google.cloud.ces_v1beta.types import common, example, search_suggestions +from google.cloud.ces_v1beta.types import common, example, mocks, search_suggestions __protobuf__ = proto.module( package="google.cloud.ces.v1beta", manifest={ "AudioEncoding", + "MockConfig", "InputAudioConfig", "OutputAudioConfig", "SessionConfig", @@ -69,6 +70,49 @@ class AudioEncoding(proto.Enum): ALAW = 3 +class MockConfig(proto.Message): + r"""Mock tool calls configuration for the session. + + Attributes: + mocked_tool_calls (MutableSequence[google.cloud.ces_v1beta.types.MockedToolCall]): + Optional. All tool calls to mock for the + duration of the session. + unmatched_tool_call_behavior (google.cloud.ces_v1beta.types.MockConfig.UnmatchedToolCallBehavior): + Required. Beavhior for tool calls that don't match any args + patterns in mocked_tool_calls. + """ + + class UnmatchedToolCallBehavior(proto.Enum): + r"""What to do when a tool call doesn't match any mocked tool + calls. + + Values: + UNMATCHED_TOOL_CALL_BEHAVIOR_UNSPECIFIED (0): + Default value. This value is unused. + FAIL (1): + Throw an error for any tool calls that don't + match a mock expected input pattern. + PASS_THROUGH (2): + For unmatched tool calls, pass the tool call + through to real tool. + """ + + UNMATCHED_TOOL_CALL_BEHAVIOR_UNSPECIFIED = 0 + FAIL = 1 + PASS_THROUGH = 2 + + mocked_tool_calls: MutableSequence[mocks.MockedToolCall] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=mocks.MockedToolCall, + ) + unmatched_tool_call_behavior: UnmatchedToolCallBehavior = proto.Field( + proto.ENUM, + number=2, + enum=UnmatchedToolCallBehavior, + ) + + class InputAudioConfig(proto.Message): r"""InputAudioConfig configures how the CES agent should interpret the incoming audio data. diff --git a/packages/google-cloud-ces/google/cloud/ces_v1beta/types/tool_service.py b/packages/google-cloud-ces/google/cloud/ces_v1beta/types/tool_service.py index 78ca9fdd218b..7a39c75c02ca 100644 --- a/packages/google-cloud-ces/google/cloud/ces_v1beta/types/tool_service.py +++ b/packages/google-cloud-ces/google/cloud/ces_v1beta/types/tool_service.py @@ -20,7 +20,7 @@ import google.protobuf.struct_pb2 as struct_pb2 # type: ignore import proto # type: ignore -from google.cloud.ces_v1beta.types import schema +from google.cloud.ces_v1beta.types import schema, session_service from google.cloud.ces_v1beta.types import tool as gcc_tool from google.cloud.ces_v1beta.types import toolset_tool as gcc_toolset_tool @@ -80,6 +80,11 @@ class ExecuteToolRequest(proto.Message): args (google.protobuf.struct_pb2.Struct): Optional. The input parameters and values for the tool in JSON object format. + mock_config (google.cloud.ces_v1beta.types.MockConfig): + Optional. Mock configuration for the tool + execution. If this field is set, tools that call + other tools will be mocked based on the provided + patterns and responses. """ tool: str = proto.Field( @@ -114,6 +119,11 @@ class ExecuteToolRequest(proto.Message): number=2, message=struct_pb2.Struct, ) + mock_config: session_service.MockConfig = proto.Field( + proto.MESSAGE, + number=7, + message=session_service.MockConfig, + ) class ExecuteToolResponse(proto.Message): diff --git a/packages/google-cloud-ces/samples/generated_samples/ces_v1beta_generated_evaluation_service_export_evaluations_async.py b/packages/google-cloud-ces/samples/generated_samples/ces_v1beta_generated_evaluation_service_export_evaluations_async.py new file mode 100644 index 000000000000..07982f2e463b --- /dev/null +++ b/packages/google-cloud-ces/samples/generated_samples/ces_v1beta_generated_evaluation_service_export_evaluations_async.py @@ -0,0 +1,58 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ExportEvaluations +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-ces + + +# [START ces_v1beta_generated_EvaluationService_ExportEvaluations_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import ces_v1beta + + +async def sample_export_evaluations(): + # Create a client + client = ces_v1beta.EvaluationServiceAsyncClient() + + # Initialize request argument(s) + request = ces_v1beta.ExportEvaluationsRequest( + parent="parent_value", + names=["names_value1", "names_value2"], + ) + + # Make the request + operation = client.export_evaluations(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + +# [END ces_v1beta_generated_EvaluationService_ExportEvaluations_async] diff --git a/packages/google-cloud-ces/samples/generated_samples/ces_v1beta_generated_evaluation_service_export_evaluations_sync.py b/packages/google-cloud-ces/samples/generated_samples/ces_v1beta_generated_evaluation_service_export_evaluations_sync.py new file mode 100644 index 000000000000..3e96686df95a --- /dev/null +++ b/packages/google-cloud-ces/samples/generated_samples/ces_v1beta_generated_evaluation_service_export_evaluations_sync.py @@ -0,0 +1,58 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ExportEvaluations +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-ces + + +# [START ces_v1beta_generated_EvaluationService_ExportEvaluations_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import ces_v1beta + + +def sample_export_evaluations(): + # Create a client + client = ces_v1beta.EvaluationServiceClient() + + # Initialize request argument(s) + request = ces_v1beta.ExportEvaluationsRequest( + parent="parent_value", + names=["names_value1", "names_value2"], + ) + + # Make the request + operation = client.export_evaluations(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + +# [END ces_v1beta_generated_EvaluationService_ExportEvaluations_sync] diff --git a/packages/google-cloud-ces/samples/generated_samples/snippet_metadata_google.cloud.ces.v1beta.json b/packages/google-cloud-ces/samples/generated_samples/snippet_metadata_google.cloud.ces.v1beta.json index 8088e1f8d4be..672fc166bad1 100644 --- a/packages/google-cloud-ces/samples/generated_samples/snippet_metadata_google.cloud.ces.v1beta.json +++ b/packages/google-cloud-ces/samples/generated_samples/snippet_metadata_google.cloud.ces.v1beta.json @@ -10026,6 +10026,167 @@ ], "title": "ces_v1beta_generated_evaluation_service_delete_scheduled_evaluation_run_sync.py" }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.ces_v1beta.EvaluationServiceAsyncClient", + "shortName": "EvaluationServiceAsyncClient" + }, + "fullName": "google.cloud.ces_v1beta.EvaluationServiceAsyncClient.export_evaluations", + "method": { + "fullName": "google.cloud.ces.v1beta.EvaluationService.ExportEvaluations", + "service": { + "fullName": "google.cloud.ces.v1beta.EvaluationService", + "shortName": "EvaluationService" + }, + "shortName": "ExportEvaluations" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.ces_v1beta.types.ExportEvaluationsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "export_evaluations" + }, + "description": "Sample for ExportEvaluations", + "file": "ces_v1beta_generated_evaluation_service_export_evaluations_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "ces_v1beta_generated_EvaluationService_ExportEvaluations_async", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "ces_v1beta_generated_evaluation_service_export_evaluations_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.ces_v1beta.EvaluationServiceClient", + "shortName": "EvaluationServiceClient" + }, + "fullName": "google.cloud.ces_v1beta.EvaluationServiceClient.export_evaluations", + "method": { + "fullName": "google.cloud.ces.v1beta.EvaluationService.ExportEvaluations", + "service": { + "fullName": "google.cloud.ces.v1beta.EvaluationService", + "shortName": "EvaluationService" + }, + "shortName": "ExportEvaluations" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.ces_v1beta.types.ExportEvaluationsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "export_evaluations" + }, + "description": "Sample for ExportEvaluations", + "file": "ces_v1beta_generated_evaluation_service_export_evaluations_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "ces_v1beta_generated_EvaluationService_ExportEvaluations_sync", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "ces_v1beta_generated_evaluation_service_export_evaluations_sync.py" + }, { "canonical": true, "clientMethod": { diff --git a/packages/google-cloud-ces/tests/unit/gapic/ces_v1beta/test_agent_service.py b/packages/google-cloud-ces/tests/unit/gapic/ces_v1beta/test_agent_service.py index e4bd34ce7d2b..23c76c80acc9 100644 --- a/packages/google-cloud-ces/tests/unit/gapic/ces_v1beta/test_agent_service.py +++ b/packages/google-cloud-ces/tests/unit/gapic/ces_v1beta/test_agent_service.py @@ -34090,11 +34090,21 @@ def test_create_app_rest_call_success(request_type): "dataset": "dataset_value", }, "cloud_logging_settings": {"enable_cloud_logging": True}, - "conversation_logging_settings": {"disable_conversation_logging": True}, + "conversation_logging_settings": { + "disable_conversation_logging": True, + "retention_window": {}, + }, "evaluation_audio_recording_config": {}, "metric_analysis_settings": {"llm_metrics_opted_out": True}, }, - "error_handling_settings": {"error_handling_strategy": 1}, + "error_handling_settings": { + "error_handling_strategy": 1, + "fallback_response_config": { + "custom_fallback_messages": {}, + "max_fallback_attempts": 2214, + }, + "end_session_config": {"escalate_session": True}, + }, "model_settings": {"model": "model_value", "temperature": 0.1198}, "tool_execution_mode": 1, "evaluation_metrics_thresholds": { @@ -34435,11 +34445,21 @@ def test_update_app_rest_call_success(request_type): "dataset": "dataset_value", }, "cloud_logging_settings": {"enable_cloud_logging": True}, - "conversation_logging_settings": {"disable_conversation_logging": True}, + "conversation_logging_settings": { + "disable_conversation_logging": True, + "retention_window": {}, + }, "evaluation_audio_recording_config": {}, "metric_analysis_settings": {"llm_metrics_opted_out": True}, }, - "error_handling_settings": {"error_handling_strategy": 1}, + "error_handling_settings": { + "error_handling_strategy": 1, + "fallback_response_config": { + "custom_fallback_messages": {}, + "max_fallback_attempts": 2214, + }, + "end_session_config": {"escalate_session": True}, + }, "model_settings": {"model": "model_value", "temperature": 0.1198}, "tool_execution_mode": 1, "evaluation_metrics_thresholds": { @@ -42157,12 +42177,20 @@ def test_create_app_version_rest_call_success(request_type): }, "cloud_logging_settings": {"enable_cloud_logging": True}, "conversation_logging_settings": { - "disable_conversation_logging": True + "disable_conversation_logging": True, + "retention_window": {}, }, "evaluation_audio_recording_config": {}, "metric_analysis_settings": {"llm_metrics_opted_out": True}, }, - "error_handling_settings": {"error_handling_strategy": 1}, + "error_handling_settings": { + "error_handling_strategy": 1, + "fallback_response_config": { + "custom_fallback_messages": {}, + "max_fallback_attempts": 2214, + }, + "end_session_config": {"escalate_session": True}, + }, "model_settings": {"model": "model_value", "temperature": 0.1198}, "tool_execution_mode": 1, "evaluation_metrics_thresholds": { diff --git a/packages/google-cloud-ces/tests/unit/gapic/ces_v1beta/test_evaluation_service.py b/packages/google-cloud-ces/tests/unit/gapic/ces_v1beta/test_evaluation_service.py index 43a5a98d0414..a8f9a0c4215c 100644 --- a/packages/google-cloud-ces/tests/unit/gapic/ces_v1beta/test_evaluation_service.py +++ b/packages/google-cloud-ces/tests/unit/gapic/ces_v1beta/test_evaluation_service.py @@ -77,6 +77,7 @@ transports, ) from google.cloud.ces_v1beta.types import ( + agent_service, app, common, evaluation, @@ -13922,6 +13923,352 @@ async def test_test_persona_voice_flattened_error_async(): ) +@pytest.mark.parametrize( + "request_type", + [ + evaluation_service.ExportEvaluationsRequest, + dict, + ], +) +def test_export_evaluations(request_type, transport: str = "grpc"): + client = EvaluationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.export_evaluations), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.export_evaluations(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = evaluation_service.ExportEvaluationsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_export_evaluations_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = EvaluationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = evaluation_service.ExportEvaluationsRequest( + parent="parent_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.export_evaluations), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.export_evaluations(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == evaluation_service.ExportEvaluationsRequest( + parent="parent_value", + ) + + +def test_export_evaluations_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = EvaluationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.export_evaluations in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.export_evaluations] = ( + mock_rpc + ) + request = {} + client.export_evaluations(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.export_evaluations(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_export_evaluations_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = EvaluationServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.export_evaluations + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.export_evaluations + ] = mock_rpc + + request = {} + await client.export_evaluations(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.export_evaluations(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_export_evaluations_async( + transport: str = "grpc_asyncio", + request_type=evaluation_service.ExportEvaluationsRequest, +): + client = EvaluationServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.export_evaluations), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.export_evaluations(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = evaluation_service.ExportEvaluationsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_export_evaluations_async_from_dict(): + await test_export_evaluations_async(request_type=dict) + + +def test_export_evaluations_field_headers(): + client = EvaluationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = evaluation_service.ExportEvaluationsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.export_evaluations), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.export_evaluations(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_export_evaluations_field_headers_async(): + client = EvaluationServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = evaluation_service.ExportEvaluationsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.export_evaluations), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.export_evaluations(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_export_evaluations_flattened(): + client = EvaluationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.export_evaluations), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.export_evaluations( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +def test_export_evaluations_flattened_error(): + client = EvaluationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.export_evaluations( + evaluation_service.ExportEvaluationsRequest(), + parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_export_evaluations_flattened_async(): + client = EvaluationServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.export_evaluations), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.export_evaluations( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_export_evaluations_flattened_error_async(): + client = EvaluationServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.export_evaluations( + evaluation_service.ExportEvaluationsRequest(), + parent="parent_value", + ) + + def test_run_evaluation_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -20064,7 +20411,207 @@ def test_delete_scheduled_evaluation_run_rest_unset_required_fields(): assert set(unset_fields) == (set(("etag",)) & set(("name",))) -def test_delete_scheduled_evaluation_run_rest_flattened(): +def test_delete_scheduled_evaluation_run_rest_flattened(): + client = EvaluationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/apps/sample3/scheduledEvaluationRuns/sample4" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.delete_scheduled_evaluation_run(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1beta/{name=projects/*/locations/*/apps/*/scheduledEvaluationRuns/*}" + % client.transport._host, + args[1], + ) + + +def test_delete_scheduled_evaluation_run_rest_flattened_error(transport: str = "rest"): + client = EvaluationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_scheduled_evaluation_run( + evaluation_service.DeleteScheduledEvaluationRunRequest(), + name="name_value", + ) + + +def test_test_persona_voice_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = EvaluationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.test_persona_voice in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.test_persona_voice] = ( + mock_rpc + ) + + request = {} + client.test_persona_voice(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.test_persona_voice(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_test_persona_voice_rest_required_fields( + request_type=evaluation_service.TestPersonaVoiceRequest, +): + transport_class = transports.EvaluationServiceRestTransport + + request_init = {} + request_init["app"] = "" + request_init["persona_id"] = "" + request_init["text"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).test_persona_voice._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["app"] = "app_value" + jsonified_request["personaId"] = "persona_id_value" + jsonified_request["text"] = "text_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).test_persona_voice._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "app" in jsonified_request + assert jsonified_request["app"] == "app_value" + assert "personaId" in jsonified_request + assert jsonified_request["personaId"] == "persona_id_value" + assert "text" in jsonified_request + assert jsonified_request["text"] == "text_value" + + client = EvaluationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = evaluation_service.TestPersonaVoiceResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = evaluation_service.TestPersonaVoiceResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.test_persona_voice(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_test_persona_voice_rest_unset_required_fields(): + transport = transports.EvaluationServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.test_persona_voice._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "app", + "personaId", + "text", + ) + ) + ) + + +def test_test_persona_voice_rest_flattened(): client = EvaluationServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -20073,41 +20620,41 @@ def test_delete_scheduled_evaluation_run_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = None + return_value = evaluation_service.TestPersonaVoiceResponse() # get arguments that satisfy an http rule for this method - sample_request = { - "name": "projects/sample1/locations/sample2/apps/sample3/scheduledEvaluationRuns/sample4" - } + sample_request = {"app": "projects/sample1/locations/sample2/apps/sample3"} # get truthy value for each flattened field mock_args = dict( - name="name_value", + app="app_value", ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - json_return_value = "" + # Convert return value to protobuf type + return_value = evaluation_service.TestPersonaVoiceResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.delete_scheduled_evaluation_run(**mock_args) + client.test_persona_voice(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1beta/{name=projects/*/locations/*/apps/*/scheduledEvaluationRuns/*}" + "%s/v1beta/{app=projects/*/locations/*/apps/*}:testPersonaVoice" % client.transport._host, args[1], ) -def test_delete_scheduled_evaluation_run_rest_flattened_error(transport: str = "rest"): +def test_test_persona_voice_rest_flattened_error(transport: str = "rest"): client = EvaluationServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -20116,13 +20663,13 @@ def test_delete_scheduled_evaluation_run_rest_flattened_error(transport: str = " # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.delete_scheduled_evaluation_run( - evaluation_service.DeleteScheduledEvaluationRunRequest(), - name="name_value", + client.test_persona_voice( + evaluation_service.TestPersonaVoiceRequest(), + app="app_value", ) -def test_test_persona_voice_rest_use_cached_wrapped_rpc(): +def test_export_evaluations_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -20137,7 +20684,7 @@ def test_test_persona_voice_rest_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.test_persona_voice in client._transport._wrapped_methods + client._transport.export_evaluations in client._transport._wrapped_methods ) # Replace cached wrapped function with mock @@ -20145,32 +20692,35 @@ def test_test_persona_voice_rest_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.test_persona_voice] = ( + client._transport._wrapped_methods[client._transport.export_evaluations] = ( mock_rpc ) request = {} - client.test_persona_voice(request) + client.export_evaluations(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.test_persona_voice(request) + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.export_evaluations(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_test_persona_voice_rest_required_fields( - request_type=evaluation_service.TestPersonaVoiceRequest, +def test_export_evaluations_rest_required_fields( + request_type=evaluation_service.ExportEvaluationsRequest, ): transport_class = transports.EvaluationServiceRestTransport request_init = {} - request_init["app"] = "" - request_init["persona_id"] = "" - request_init["text"] = "" + request_init["parent"] = "" + request_init["names"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -20181,27 +20731,24 @@ def test_test_persona_voice_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).test_persona_voice._get_unset_required_fields(jsonified_request) + ).export_evaluations._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["app"] = "app_value" - jsonified_request["personaId"] = "persona_id_value" - jsonified_request["text"] = "text_value" + jsonified_request["parent"] = "parent_value" + jsonified_request["names"] = "names_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).test_persona_voice._get_unset_required_fields(jsonified_request) + ).export_evaluations._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "app" in jsonified_request - assert jsonified_request["app"] == "app_value" - assert "personaId" in jsonified_request - assert jsonified_request["personaId"] == "persona_id_value" - assert "text" in jsonified_request - assert jsonified_request["text"] == "text_value" + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + assert "names" in jsonified_request + assert jsonified_request["names"] == "names_value" client = EvaluationServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -20210,7 +20757,7 @@ def test_test_persona_voice_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = evaluation_service.TestPersonaVoiceResponse() + return_value = operations_pb2.Operation(name="operations/spam") # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -20230,41 +20777,37 @@ def test_test_persona_voice_rest_required_fields( response_value = Response() response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = evaluation_service.TestPersonaVoiceResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.test_persona_voice(request) + response = client.export_evaluations(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_test_persona_voice_rest_unset_required_fields(): +def test_export_evaluations_rest_unset_required_fields(): transport = transports.EvaluationServiceRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.test_persona_voice._get_unset_required_fields({}) + unset_fields = transport.export_evaluations._get_unset_required_fields({}) assert set(unset_fields) == ( set(()) & set( ( - "app", - "personaId", - "text", + "parent", + "names", ) ) ) -def test_test_persona_voice_rest_flattened(): +def test_export_evaluations_rest_flattened(): client = EvaluationServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -20273,41 +20816,39 @@ def test_test_persona_voice_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = evaluation_service.TestPersonaVoiceResponse() + return_value = operations_pb2.Operation(name="operations/spam") # get arguments that satisfy an http rule for this method - sample_request = {"app": "projects/sample1/locations/sample2/apps/sample3"} + sample_request = {"parent": "projects/sample1/locations/sample2/apps/sample3"} # get truthy value for each flattened field mock_args = dict( - app="app_value", + parent="parent_value", ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = evaluation_service.TestPersonaVoiceResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.test_persona_voice(**mock_args) + client.export_evaluations(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1beta/{app=projects/*/locations/*/apps/*}:testPersonaVoice" + "%s/v1beta/{parent=projects/*/locations/*/apps/*}/evaluations:export" % client.transport._host, args[1], ) -def test_test_persona_voice_rest_flattened_error(transport: str = "rest"): +def test_export_evaluations_rest_flattened_error(transport: str = "rest"): client = EvaluationServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -20316,9 +20857,9 @@ def test_test_persona_voice_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.test_persona_voice( - evaluation_service.TestPersonaVoiceRequest(), - app="app_value", + client.export_evaluations( + evaluation_service.ExportEvaluationsRequest(), + parent="parent_value", ) @@ -21135,6 +21676,29 @@ def test_test_persona_voice_empty_call_grpc(): assert args[0] == request_msg +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_export_evaluations_empty_call_grpc(): + client = EvaluationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.export_evaluations), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.export_evaluations(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = evaluation_service.ExportEvaluationsRequest() + + assert args[0] == request_msg + + def test_transport_kind_grpc_asyncio(): transport = EvaluationServiceAsyncClient.get_transport_class("grpc_asyncio")( credentials=async_anonymous_credentials() @@ -22112,6 +22676,33 @@ async def test_test_persona_voice_empty_call_grpc_asyncio(): assert args[0] == request_msg +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_export_evaluations_empty_call_grpc_asyncio(): + client = EvaluationServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.export_evaluations), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + await client.export_evaluations(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = evaluation_service.ExportEvaluationsRequest() + + assert args[0] == request_msg + + def test_transport_kind_rest(): transport = EvaluationServiceClient.get_transport_class("rest")( credentials=ga_credentials.AnonymousCredentials() @@ -27845,6 +28436,132 @@ def test_test_persona_voice_rest_interceptors(null_interceptor): post_with_metadata.assert_called_once() +def test_export_evaluations_rest_bad_request( + request_type=evaluation_service.ExportEvaluationsRequest, +): + client = EvaluationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2/apps/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with ( + mock.patch.object(Session, "request") as req, + pytest.raises(core_exceptions.BadRequest), + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.export_evaluations(request) + + +@pytest.mark.parametrize( + "request_type", + [ + evaluation_service.ExportEvaluationsRequest, + dict, + ], +) +def test_export_evaluations_rest_call_success(request_type): + client = EvaluationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2/apps/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.export_evaluations(request) + + # Establish that the response is the type that we expect. + json_return_value = json_format.MessageToJson(return_value) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_export_evaluations_rest_interceptors(null_interceptor): + transport = transports.EvaluationServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.EvaluationServiceRestInterceptor(), + ) + client = EvaluationServiceClient(transport=transport) + + with ( + mock.patch.object(type(client.transport._session), "request") as req, + mock.patch.object(path_template, "transcode") as transcode, + mock.patch.object(operation.Operation, "_set_result_from_operation"), + mock.patch.object( + transports.EvaluationServiceRestInterceptor, "post_export_evaluations" + ) as post, + mock.patch.object( + transports.EvaluationServiceRestInterceptor, + "post_export_evaluations_with_metadata", + ) as post_with_metadata, + mock.patch.object( + transports.EvaluationServiceRestInterceptor, "pre_export_evaluations" + ) as pre, + ): + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = evaluation_service.ExportEvaluationsRequest.pb( + evaluation_service.ExportEvaluationsRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = json_format.MessageToJson(operations_pb2.Operation()) + req.return_value.content = return_value + + request = evaluation_service.ExportEvaluationsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata + + client.export_evaluations( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + def test_get_location_rest_bad_request(request_type=locations_pb2.GetLocationRequest): client = EvaluationServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -28902,6 +29619,28 @@ def test_test_persona_voice_empty_call_rest(): assert args[0] == request_msg +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_export_evaluations_empty_call_rest(): + client = EvaluationServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.export_evaluations), "__call__" + ) as call: + client.export_evaluations(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = evaluation_service.ExportEvaluationsRequest() + + assert args[0] == request_msg + + def test_evaluation_service_rest_lro_client(): client = EvaluationServiceClient( credentials=ga_credentials.AnonymousCredentials(), @@ -28983,6 +29722,7 @@ def test_evaluation_service_base_transport(): "update_scheduled_evaluation_run", "delete_scheduled_evaluation_run", "test_persona_voice", + "export_evaluations", "get_location", "list_locations", "get_operation", @@ -29362,6 +30102,9 @@ def test_evaluation_service_client_transport_session_collision(transport_name): session1 = client1.transport.test_persona_voice._session session2 = client2.transport.test_persona_voice._session assert session1 != session2 + session1 = client1.transport.export_evaluations._session + session2 = client2.transport.export_evaluations._session + assert session1 != session2 def test_evaluation_service_grpc_transport_channel(): diff --git a/packages/google-cloud-ces/tests/unit/gapic/ces_v1beta/test_tool_service.py b/packages/google-cloud-ces/tests/unit/gapic/ces_v1beta/test_tool_service.py index 2534769e89e0..abead1849783 100644 --- a/packages/google-cloud-ces/tests/unit/gapic/ces_v1beta/test_tool_service.py +++ b/packages/google-cloud-ces/tests/unit/gapic/ces_v1beta/test_tool_service.py @@ -65,7 +65,14 @@ ToolServiceClient, transports, ) -from google.cloud.ces_v1beta.types import schema, tool, tool_service, toolset_tool +from google.cloud.ces_v1beta.types import ( + mocks, + schema, + session_service, + tool, + tool_service, + toolset_tool, +) CRED_INFO_JSON = { "credential_source": "/path/to/file", diff --git a/packages/google-cloud-chronicle/docs/chronicle_v1/data_table_service.rst b/packages/google-cloud-chronicle/docs/chronicle_v1/data_table_service.rst new file mode 100644 index 000000000000..f6ae74275662 --- /dev/null +++ b/packages/google-cloud-chronicle/docs/chronicle_v1/data_table_service.rst @@ -0,0 +1,10 @@ +DataTableService +---------------------------------- + +.. automodule:: google.cloud.chronicle_v1.services.data_table_service + :members: + :inherited-members: + +.. automodule:: google.cloud.chronicle_v1.services.data_table_service.pagers + :members: + :inherited-members: diff --git a/packages/google-cloud-chronicle/docs/chronicle_v1/services_.rst b/packages/google-cloud-chronicle/docs/chronicle_v1/services_.rst index 4e20ba183e4e..72d4f29eda92 100644 --- a/packages/google-cloud-chronicle/docs/chronicle_v1/services_.rst +++ b/packages/google-cloud-chronicle/docs/chronicle_v1/services_.rst @@ -4,6 +4,7 @@ Services for Google Cloud Chronicle v1 API :maxdepth: 2 data_access_control_service + data_table_service entity_service instance_service reference_list_service diff --git a/packages/google-cloud-chronicle/google/cloud/chronicle/__init__.py b/packages/google-cloud-chronicle/google/cloud/chronicle/__init__.py index 216dd36dd32e..46a18dc7c4a3 100644 --- a/packages/google-cloud-chronicle/google/cloud/chronicle/__init__.py +++ b/packages/google-cloud-chronicle/google/cloud/chronicle/__init__.py @@ -24,6 +24,12 @@ from google.cloud.chronicle_v1.services.data_access_control_service.client import ( DataAccessControlServiceClient, ) +from google.cloud.chronicle_v1.services.data_table_service.async_client import ( + DataTableServiceAsyncClient, +) +from google.cloud.chronicle_v1.services.data_table_service.client import ( + DataTableServiceClient, +) from google.cloud.chronicle_v1.services.entity_service.async_client import ( EntityServiceAsyncClient, ) @@ -62,6 +68,35 @@ UpdateDataAccessLabelRequest, UpdateDataAccessScopeRequest, ) +from google.cloud.chronicle_v1.types.data_table import ( + BulkCreateDataTableRowsRequest, + BulkCreateDataTableRowsResponse, + BulkGetDataTableRowsRequest, + BulkGetDataTableRowsResponse, + BulkReplaceDataTableRowsRequest, + BulkReplaceDataTableRowsResponse, + BulkUpdateDataTableRowsRequest, + BulkUpdateDataTableRowsResponse, + CreateDataTableRequest, + CreateDataTableRowRequest, + DataTable, + DataTableColumnInfo, + DataTableOperationErrors, + DataTableRow, + DataTableScopeInfo, + DataTableUpdateSource, + DeleteDataTableRequest, + DeleteDataTableRowRequest, + GetDataTableOperationErrorsRequest, + GetDataTableRequest, + GetDataTableRowRequest, + ListDataTableRowsRequest, + ListDataTableRowsResponse, + ListDataTablesRequest, + ListDataTablesResponse, + UpdateDataTableRequest, + UpdateDataTableRowRequest, +) from google.cloud.chronicle_v1.types.entity import ( CreateWatchlistRequest, DeleteWatchlistRequest, @@ -119,6 +154,8 @@ __all__ = ( "DataAccessControlServiceClient", "DataAccessControlServiceAsyncClient", + "DataTableServiceClient", + "DataTableServiceAsyncClient", "EntityServiceClient", "EntityServiceAsyncClient", "InstanceServiceClient", @@ -143,6 +180,33 @@ "ListDataAccessScopesResponse", "UpdateDataAccessLabelRequest", "UpdateDataAccessScopeRequest", + "BulkCreateDataTableRowsRequest", + "BulkCreateDataTableRowsResponse", + "BulkGetDataTableRowsRequest", + "BulkGetDataTableRowsResponse", + "BulkReplaceDataTableRowsRequest", + "BulkReplaceDataTableRowsResponse", + "BulkUpdateDataTableRowsRequest", + "BulkUpdateDataTableRowsResponse", + "CreateDataTableRequest", + "CreateDataTableRowRequest", + "DataTable", + "DataTableColumnInfo", + "DataTableOperationErrors", + "DataTableRow", + "DataTableScopeInfo", + "DeleteDataTableRequest", + "DeleteDataTableRowRequest", + "GetDataTableOperationErrorsRequest", + "GetDataTableRequest", + "GetDataTableRowRequest", + "ListDataTableRowsRequest", + "ListDataTableRowsResponse", + "ListDataTablesRequest", + "ListDataTablesResponse", + "UpdateDataTableRequest", + "UpdateDataTableRowRequest", + "DataTableUpdateSource", "CreateWatchlistRequest", "DeleteWatchlistRequest", "GetWatchlistRequest", diff --git a/packages/google-cloud-chronicle/google/cloud/chronicle_v1/__init__.py b/packages/google-cloud-chronicle/google/cloud/chronicle_v1/__init__.py index e51581438276..31b014576c56 100644 --- a/packages/google-cloud-chronicle/google/cloud/chronicle_v1/__init__.py +++ b/packages/google-cloud-chronicle/google/cloud/chronicle_v1/__init__.py @@ -33,6 +33,10 @@ DataAccessControlServiceAsyncClient, DataAccessControlServiceClient, ) +from .services.data_table_service import ( + DataTableServiceAsyncClient, + DataTableServiceClient, +) from .services.entity_service import EntityServiceAsyncClient, EntityServiceClient from .services.instance_service import InstanceServiceAsyncClient, InstanceServiceClient from .services.reference_list_service import ( @@ -58,6 +62,35 @@ UpdateDataAccessLabelRequest, UpdateDataAccessScopeRequest, ) +from .types.data_table import ( + BulkCreateDataTableRowsRequest, + BulkCreateDataTableRowsResponse, + BulkGetDataTableRowsRequest, + BulkGetDataTableRowsResponse, + BulkReplaceDataTableRowsRequest, + BulkReplaceDataTableRowsResponse, + BulkUpdateDataTableRowsRequest, + BulkUpdateDataTableRowsResponse, + CreateDataTableRequest, + CreateDataTableRowRequest, + DataTable, + DataTableColumnInfo, + DataTableOperationErrors, + DataTableRow, + DataTableScopeInfo, + DataTableUpdateSource, + DeleteDataTableRequest, + DeleteDataTableRowRequest, + GetDataTableOperationErrorsRequest, + GetDataTableRequest, + GetDataTableRowRequest, + ListDataTableRowsRequest, + ListDataTableRowsResponse, + ListDataTablesRequest, + ListDataTablesResponse, + UpdateDataTableRequest, + UpdateDataTableRowRequest, +) from .types.entity import ( CreateWatchlistRequest, DeleteWatchlistRequest, @@ -208,14 +241,25 @@ def _get_version(dependency_name): __all__ = ( "DataAccessControlServiceAsyncClient", + "DataTableServiceAsyncClient", "EntityServiceAsyncClient", "InstanceServiceAsyncClient", "ReferenceListServiceAsyncClient", "RuleServiceAsyncClient", + "BulkCreateDataTableRowsRequest", + "BulkCreateDataTableRowsResponse", + "BulkGetDataTableRowsRequest", + "BulkGetDataTableRowsResponse", + "BulkReplaceDataTableRowsRequest", + "BulkReplaceDataTableRowsResponse", + "BulkUpdateDataTableRowsRequest", + "BulkUpdateDataTableRowsResponse", "CompilationDiagnostic", "CompilationPosition", "CreateDataAccessLabelRequest", "CreateDataAccessScopeRequest", + "CreateDataTableRequest", + "CreateDataTableRowRequest", "CreateReferenceListRequest", "CreateRetrohuntRequest", "CreateRuleRequest", @@ -224,13 +268,25 @@ def _get_version(dependency_name): "DataAccessLabel", "DataAccessLabelReference", "DataAccessScope", + "DataTable", + "DataTableColumnInfo", + "DataTableOperationErrors", + "DataTableRow", + "DataTableScopeInfo", + "DataTableServiceClient", + "DataTableUpdateSource", "DeleteDataAccessLabelRequest", "DeleteDataAccessScopeRequest", + "DeleteDataTableRequest", + "DeleteDataTableRowRequest", "DeleteRuleRequest", "DeleteWatchlistRequest", "EntityServiceClient", "GetDataAccessLabelRequest", "GetDataAccessScopeRequest", + "GetDataTableOperationErrorsRequest", + "GetDataTableRequest", + "GetDataTableRowRequest", "GetInstanceRequest", "GetReferenceListRequest", "GetRetrohuntRequest", @@ -245,6 +301,10 @@ def _get_version(dependency_name): "ListDataAccessLabelsResponse", "ListDataAccessScopesRequest", "ListDataAccessScopesResponse", + "ListDataTableRowsRequest", + "ListDataTableRowsResponse", + "ListDataTablesRequest", + "ListDataTablesResponse", "ListReferenceListsRequest", "ListReferenceListsResponse", "ListRetrohuntsRequest", @@ -275,6 +335,8 @@ def _get_version(dependency_name): "Severity", "UpdateDataAccessLabelRequest", "UpdateDataAccessScopeRequest", + "UpdateDataTableRequest", + "UpdateDataTableRowRequest", "UpdateReferenceListRequest", "UpdateRuleDeploymentRequest", "UpdateRuleRequest", diff --git a/packages/google-cloud-chronicle/google/cloud/chronicle_v1/gapic_metadata.json b/packages/google-cloud-chronicle/google/cloud/chronicle_v1/gapic_metadata.json index 7c37a1bb2ac9..554628abd09e 100644 --- a/packages/google-cloud-chronicle/google/cloud/chronicle_v1/gapic_metadata.json +++ b/packages/google-cloud-chronicle/google/cloud/chronicle_v1/gapic_metadata.json @@ -174,6 +174,250 @@ } } }, + "DataTableService": { + "clients": { + "grpc": { + "libraryClient": "DataTableServiceClient", + "rpcs": { + "BulkCreateDataTableRows": { + "methods": [ + "bulk_create_data_table_rows" + ] + }, + "BulkGetDataTableRows": { + "methods": [ + "bulk_get_data_table_rows" + ] + }, + "BulkReplaceDataTableRows": { + "methods": [ + "bulk_replace_data_table_rows" + ] + }, + "BulkUpdateDataTableRows": { + "methods": [ + "bulk_update_data_table_rows" + ] + }, + "CreateDataTable": { + "methods": [ + "create_data_table" + ] + }, + "CreateDataTableRow": { + "methods": [ + "create_data_table_row" + ] + }, + "DeleteDataTable": { + "methods": [ + "delete_data_table" + ] + }, + "DeleteDataTableRow": { + "methods": [ + "delete_data_table_row" + ] + }, + "GetDataTable": { + "methods": [ + "get_data_table" + ] + }, + "GetDataTableOperationErrors": { + "methods": [ + "get_data_table_operation_errors" + ] + }, + "GetDataTableRow": { + "methods": [ + "get_data_table_row" + ] + }, + "ListDataTableRows": { + "methods": [ + "list_data_table_rows" + ] + }, + "ListDataTables": { + "methods": [ + "list_data_tables" + ] + }, + "UpdateDataTable": { + "methods": [ + "update_data_table" + ] + }, + "UpdateDataTableRow": { + "methods": [ + "update_data_table_row" + ] + } + } + }, + "grpc-async": { + "libraryClient": "DataTableServiceAsyncClient", + "rpcs": { + "BulkCreateDataTableRows": { + "methods": [ + "bulk_create_data_table_rows" + ] + }, + "BulkGetDataTableRows": { + "methods": [ + "bulk_get_data_table_rows" + ] + }, + "BulkReplaceDataTableRows": { + "methods": [ + "bulk_replace_data_table_rows" + ] + }, + "BulkUpdateDataTableRows": { + "methods": [ + "bulk_update_data_table_rows" + ] + }, + "CreateDataTable": { + "methods": [ + "create_data_table" + ] + }, + "CreateDataTableRow": { + "methods": [ + "create_data_table_row" + ] + }, + "DeleteDataTable": { + "methods": [ + "delete_data_table" + ] + }, + "DeleteDataTableRow": { + "methods": [ + "delete_data_table_row" + ] + }, + "GetDataTable": { + "methods": [ + "get_data_table" + ] + }, + "GetDataTableOperationErrors": { + "methods": [ + "get_data_table_operation_errors" + ] + }, + "GetDataTableRow": { + "methods": [ + "get_data_table_row" + ] + }, + "ListDataTableRows": { + "methods": [ + "list_data_table_rows" + ] + }, + "ListDataTables": { + "methods": [ + "list_data_tables" + ] + }, + "UpdateDataTable": { + "methods": [ + "update_data_table" + ] + }, + "UpdateDataTableRow": { + "methods": [ + "update_data_table_row" + ] + } + } + }, + "rest": { + "libraryClient": "DataTableServiceClient", + "rpcs": { + "BulkCreateDataTableRows": { + "methods": [ + "bulk_create_data_table_rows" + ] + }, + "BulkGetDataTableRows": { + "methods": [ + "bulk_get_data_table_rows" + ] + }, + "BulkReplaceDataTableRows": { + "methods": [ + "bulk_replace_data_table_rows" + ] + }, + "BulkUpdateDataTableRows": { + "methods": [ + "bulk_update_data_table_rows" + ] + }, + "CreateDataTable": { + "methods": [ + "create_data_table" + ] + }, + "CreateDataTableRow": { + "methods": [ + "create_data_table_row" + ] + }, + "DeleteDataTable": { + "methods": [ + "delete_data_table" + ] + }, + "DeleteDataTableRow": { + "methods": [ + "delete_data_table_row" + ] + }, + "GetDataTable": { + "methods": [ + "get_data_table" + ] + }, + "GetDataTableOperationErrors": { + "methods": [ + "get_data_table_operation_errors" + ] + }, + "GetDataTableRow": { + "methods": [ + "get_data_table_row" + ] + }, + "ListDataTableRows": { + "methods": [ + "list_data_table_rows" + ] + }, + "ListDataTables": { + "methods": [ + "list_data_tables" + ] + }, + "UpdateDataTable": { + "methods": [ + "update_data_table" + ] + }, + "UpdateDataTableRow": { + "methods": [ + "update_data_table_row" + ] + } + } + } + } + }, "EntityService": { "clients": { "grpc": { diff --git a/packages/google-cloud-chronicle/google/cloud/chronicle_v1/services/data_table_service/__init__.py b/packages/google-cloud-chronicle/google/cloud/chronicle_v1/services/data_table_service/__init__.py new file mode 100644 index 000000000000..5512ca40a205 --- /dev/null +++ b/packages/google-cloud-chronicle/google/cloud/chronicle_v1/services/data_table_service/__init__.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .async_client import DataTableServiceAsyncClient +from .client import DataTableServiceClient + +__all__ = ( + "DataTableServiceClient", + "DataTableServiceAsyncClient", +) diff --git a/packages/google-cloud-chronicle/google/cloud/chronicle_v1/services/data_table_service/async_client.py b/packages/google-cloud-chronicle/google/cloud/chronicle_v1/services/data_table_service/async_client.py new file mode 100644 index 000000000000..ba6334e45d8a --- /dev/null +++ b/packages/google-cloud-chronicle/google/cloud/chronicle_v1/services/data_table_service/async_client.py @@ -0,0 +1,2425 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import logging as std_logging +import re +import uuid +from collections import OrderedDict +from typing import ( + Callable, + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, +) + +import google.protobuf +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry_async as retries +from google.api_core.client_options import ClientOptions +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.chronicle_v1 import gapic_version as package_version + +try: + OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.AsyncRetry, object, None] # type: ignore + +import google.protobuf.field_mask_pb2 as field_mask_pb2 # type: ignore +import google.protobuf.timestamp_pb2 as timestamp_pb2 # type: ignore +import google.rpc.status_pb2 as status_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore + +from google.cloud.chronicle_v1.services.data_table_service import pagers +from google.cloud.chronicle_v1.types import data_table +from google.cloud.chronicle_v1.types import data_table as gcc_data_table + +from .client import DataTableServiceClient +from .transports.base import DEFAULT_CLIENT_INFO, DataTableServiceTransport +from .transports.grpc_asyncio import DataTableServiceGrpcAsyncIOTransport + +try: + from google.api_core import client_logging # type: ignore + + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) + + +class DataTableServiceAsyncClient: + """DataTableManager provides an interface for managing data + tables. + """ + + _client: DataTableServiceClient + + # Copy defaults from the synchronous client for use here. + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. + DEFAULT_ENDPOINT = DataTableServiceClient.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = DataTableServiceClient.DEFAULT_MTLS_ENDPOINT + _DEFAULT_ENDPOINT_TEMPLATE = DataTableServiceClient._DEFAULT_ENDPOINT_TEMPLATE + _DEFAULT_UNIVERSE = DataTableServiceClient._DEFAULT_UNIVERSE + + data_access_scope_path = staticmethod(DataTableServiceClient.data_access_scope_path) + parse_data_access_scope_path = staticmethod( + DataTableServiceClient.parse_data_access_scope_path + ) + data_table_path = staticmethod(DataTableServiceClient.data_table_path) + parse_data_table_path = staticmethod(DataTableServiceClient.parse_data_table_path) + data_table_operation_errors_path = staticmethod( + DataTableServiceClient.data_table_operation_errors_path + ) + parse_data_table_operation_errors_path = staticmethod( + DataTableServiceClient.parse_data_table_operation_errors_path + ) + data_table_row_path = staticmethod(DataTableServiceClient.data_table_row_path) + parse_data_table_row_path = staticmethod( + DataTableServiceClient.parse_data_table_row_path + ) + common_billing_account_path = staticmethod( + DataTableServiceClient.common_billing_account_path + ) + parse_common_billing_account_path = staticmethod( + DataTableServiceClient.parse_common_billing_account_path + ) + common_folder_path = staticmethod(DataTableServiceClient.common_folder_path) + parse_common_folder_path = staticmethod( + DataTableServiceClient.parse_common_folder_path + ) + common_organization_path = staticmethod( + DataTableServiceClient.common_organization_path + ) + parse_common_organization_path = staticmethod( + DataTableServiceClient.parse_common_organization_path + ) + common_project_path = staticmethod(DataTableServiceClient.common_project_path) + parse_common_project_path = staticmethod( + DataTableServiceClient.parse_common_project_path + ) + common_location_path = staticmethod(DataTableServiceClient.common_location_path) + parse_common_location_path = staticmethod( + DataTableServiceClient.parse_common_location_path + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + DataTableServiceAsyncClient: The constructed client. + """ + sa_info_func = ( + DataTableServiceClient.from_service_account_info.__func__ # type: ignore + ) + return sa_info_func(DataTableServiceAsyncClient, info, *args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + DataTableServiceAsyncClient: The constructed client. + """ + sa_file_func = ( + DataTableServiceClient.from_service_account_file.__func__ # type: ignore + ) + return sa_file_func(DataTableServiceAsyncClient, filename, *args, **kwargs) + + from_service_account_json = from_service_account_file + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + return DataTableServiceClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore + + @property + def transport(self) -> DataTableServiceTransport: + """Returns the transport used by the client instance. + + Returns: + DataTableServiceTransport: The transport used by the client instance. + """ + return self._client.transport + + @property + def api_endpoint(self) -> str: + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used by the client instance. + """ + return self._client._api_endpoint + + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used + by the client instance. + """ + return self._client._universe_domain + + get_transport_class = DataTableServiceClient.get_transport_class + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[ + Union[ + str, DataTableServiceTransport, Callable[..., DataTableServiceTransport] + ] + ] = "grpc_asyncio", + client_options: Optional[ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the data table service async client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Optional[Union[str,DataTableServiceTransport,Callable[..., DataTableServiceTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport to use. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the DataTableServiceTransport constructor. + If set to None, a transport is chosen automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide a client certificate for mTLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client = DataTableServiceClient( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + std_logging.DEBUG + ): # pragma: NO COVER + _LOGGER.debug( + "Created client `google.cloud.chronicle_v1.DataTableServiceAsyncClient`.", + extra={ + "serviceName": "google.cloud.chronicle.v1.DataTableService", + "universeDomain": getattr( + self._client._transport._credentials, "universe_domain", "" + ), + "credentialsType": f"{type(self._client._transport._credentials).__module__}.{type(self._client._transport._credentials).__qualname__}", + "credentialsInfo": getattr( + self.transport._credentials, "get_cred_info", lambda: None + )(), + } + if hasattr(self._client._transport, "_credentials") + else { + "serviceName": "google.cloud.chronicle.v1.DataTableService", + "credentialsType": None, + }, + ) + + async def create_data_table( + self, + request: Optional[Union[gcc_data_table.CreateDataTableRequest, dict]] = None, + *, + parent: Optional[str] = None, + data_table: Optional[gcc_data_table.DataTable] = None, + data_table_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> gcc_data_table.DataTable: + r"""Create a new data table. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import chronicle_v1 + + async def sample_create_data_table(): + # Create a client + client = chronicle_v1.DataTableServiceAsyncClient() + + # Initialize request argument(s) + data_table = chronicle_v1.DataTable() + data_table.description = "description_value" + + request = chronicle_v1.CreateDataTableRequest( + parent="parent_value", + data_table=data_table, + data_table_id="data_table_id_value", + ) + + # Make the request + response = await client.create_data_table(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.chronicle_v1.types.CreateDataTableRequest, dict]]): + The request object. A request to create DataTable. + parent (:class:`str`): + Required. The parent resource where + this data table will be created. Format: + projects/{project}/locations/{location}/instances/{instance} + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + data_table (:class:`google.cloud.chronicle_v1.types.DataTable`): + Required. The data table being + created. + + This corresponds to the ``data_table`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + data_table_id (:class:`str`): + Required. The ID to use for the data + table. This is also the display name for + the data table. It must satisfy the + following requirements: + + - Starts with letter. + - Contains only letters, numbers and + underscore. + - Must be unique and has length < 256. + + This corresponds to the ``data_table_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.chronicle_v1.types.DataTable: + DataTable represents the data table + resource. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent, data_table, data_table_id] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, gcc_data_table.CreateDataTableRequest): + request = gcc_data_table.CreateDataTableRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if data_table is not None: + request.data_table = data_table + if data_table_id is not None: + request.data_table_id = data_table_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.create_data_table + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_data_tables( + self, + request: Optional[Union[data_table.ListDataTablesRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListDataTablesAsyncPager: + r"""List data tables. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import chronicle_v1 + + async def sample_list_data_tables(): + # Create a client + client = chronicle_v1.DataTableServiceAsyncClient() + + # Initialize request argument(s) + request = chronicle_v1.ListDataTablesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_data_tables(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.chronicle_v1.types.ListDataTablesRequest, dict]]): + The request object. A request for a list of data tables. + parent (:class:`str`): + Required. The parent resource where + this data table will be created. Format: + projects/{project}/locations/{location}/instances/{instance} + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.chronicle_v1.services.data_table_service.pagers.ListDataTablesAsyncPager: + Response message for listing data + tables. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, data_table.ListDataTablesRequest): + request = data_table.ListDataTablesRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.list_data_tables + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListDataTablesAsyncPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_data_table( + self, + request: Optional[Union[data_table.GetDataTableRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> data_table.DataTable: + r"""Get data table info. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import chronicle_v1 + + async def sample_get_data_table(): + # Create a client + client = chronicle_v1.DataTableServiceAsyncClient() + + # Initialize request argument(s) + request = chronicle_v1.GetDataTableRequest( + name="name_value", + ) + + # Make the request + response = await client.get_data_table(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.chronicle_v1.types.GetDataTableRequest, dict]]): + The request object. A request to get details about a data + table. + name (:class:`str`): + Required. The resource name of the data table to + retrieve. Format: + projects/{project}/locations/{location}/instances/{instances}/dataTables/{data_table} + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.chronicle_v1.types.DataTable: + DataTable represents the data table + resource. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, data_table.GetDataTableRequest): + request = data_table.GetDataTableRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.get_data_table + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def update_data_table( + self, + request: Optional[Union[gcc_data_table.UpdateDataTableRequest, dict]] = None, + *, + data_table: Optional[gcc_data_table.DataTable] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> gcc_data_table.DataTable: + r"""Update data table. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import chronicle_v1 + + async def sample_update_data_table(): + # Create a client + client = chronicle_v1.DataTableServiceAsyncClient() + + # Initialize request argument(s) + data_table = chronicle_v1.DataTable() + data_table.description = "description_value" + + request = chronicle_v1.UpdateDataTableRequest( + data_table=data_table, + ) + + # Make the request + response = await client.update_data_table(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.chronicle_v1.types.UpdateDataTableRequest, dict]]): + The request object. A request to update details of data + table. + data_table (:class:`google.cloud.chronicle_v1.types.DataTable`): + Required. This field is used to identify the datatable + to update. Format: + projects/{project}/locations/{locations}/instances/{instance}/dataTables/{data_table} + + This corresponds to the ``data_table`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + Optional. The list of metadata fields to update. + Currently data tables only support updating the + ``description``, ``row_time_to_live`` and ``scope_info`` + fields. When no field mask is supplied, all non-empty + fields will be updated. A field mask of "\*" will update + all fields, whether empty or not. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.chronicle_v1.types.DataTable: + DataTable represents the data table + resource. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [data_table, update_mask] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, gcc_data_table.UpdateDataTableRequest): + request = gcc_data_table.UpdateDataTableRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if data_table is not None: + request.data_table = data_table + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.update_data_table + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("data_table.name", request.data_table.name),) + ), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_data_table( + self, + request: Optional[Union[data_table.DeleteDataTableRequest, dict]] = None, + *, + name: Optional[str] = None, + force: Optional[bool] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: + r"""Delete data table. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import chronicle_v1 + + async def sample_delete_data_table(): + # Create a client + client = chronicle_v1.DataTableServiceAsyncClient() + + # Initialize request argument(s) + request = chronicle_v1.DeleteDataTableRequest( + name="name_value", + ) + + # Make the request + await client.delete_data_table(request=request) + + Args: + request (Optional[Union[google.cloud.chronicle_v1.types.DeleteDataTableRequest, dict]]): + The request object. Request message for deleting data + tables. + name (:class:`str`): + Required. The resource name of the data table to delete. + Format + projects/{project}/locations/{location}/instances/{instances}/dataTables/{data_table} + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + force (:class:`bool`): + Optional. If set to true, any rows + under this data table will also be + deleted. (Otherwise, the request will + only work if the data table has no + rows.) + + This corresponds to the ``force`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name, force] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, data_table.DeleteDataTableRequest): + request = data_table.DeleteDataTableRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + if force is not None: + request.force = force + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.delete_data_table + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def create_data_table_row( + self, + request: Optional[Union[data_table.CreateDataTableRowRequest, dict]] = None, + *, + parent: Optional[str] = None, + data_table_row: Optional[data_table.DataTableRow] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> data_table.DataTableRow: + r"""Create a new data table row. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import chronicle_v1 + + async def sample_create_data_table_row(): + # Create a client + client = chronicle_v1.DataTableServiceAsyncClient() + + # Initialize request argument(s) + data_table_row = chronicle_v1.DataTableRow() + data_table_row.values = ['values_value1', 'values_value2'] + + request = chronicle_v1.CreateDataTableRowRequest( + parent="parent_value", + data_table_row=data_table_row, + ) + + # Make the request + response = await client.create_data_table_row(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.chronicle_v1.types.CreateDataTableRowRequest, dict]]): + The request object. Request to create data table row. + parent (:class:`str`): + Required. The resource id of the data table. Format: + /projects/{project}/locations/{location}/instances/{instance}/dataTables/{data_table} + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + data_table_row (:class:`google.cloud.chronicle_v1.types.DataTableRow`): + Required. The data table row to + create. + + This corresponds to the ``data_table_row`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.chronicle_v1.types.DataTableRow: + DataTableRow represents a single row + in a data table. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent, data_table_row] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, data_table.CreateDataTableRowRequest): + request = data_table.CreateDataTableRowRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if data_table_row is not None: + request.data_table_row = data_table_row + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.create_data_table_row + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def update_data_table_row( + self, + request: Optional[Union[data_table.UpdateDataTableRowRequest, dict]] = None, + *, + data_table_row: Optional[data_table.DataTableRow] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> data_table.DataTableRow: + r"""Update data table row + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import chronicle_v1 + + async def sample_update_data_table_row(): + # Create a client + client = chronicle_v1.DataTableServiceAsyncClient() + + # Initialize request argument(s) + data_table_row = chronicle_v1.DataTableRow() + data_table_row.values = ['values_value1', 'values_value2'] + + request = chronicle_v1.UpdateDataTableRowRequest( + data_table_row=data_table_row, + ) + + # Make the request + response = await client.update_data_table_row(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.chronicle_v1.types.UpdateDataTableRowRequest, dict]]): + The request object. Request to update data table row. + data_table_row (:class:`google.cloud.chronicle_v1.types.DataTableRow`): + Required. Format: + projects/{project}/locations/{location}/instances/{instance}/dataTables/{data_table}/dataTableRows/{data_table_row} + + This corresponds to the ``data_table_row`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + Optional. The list of fields to update. Currently data + table rows only support updating the ``values`` field. + When no field mask is supplied, all non-empty fields + will be updated. A field mask of "\*" will update all + fields, whether empty or not. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.chronicle_v1.types.DataTableRow: + DataTableRow represents a single row + in a data table. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [data_table_row, update_mask] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, data_table.UpdateDataTableRowRequest): + request = data_table.UpdateDataTableRowRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if data_table_row is not None: + request.data_table_row = data_table_row + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.update_data_table_row + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("data_table_row.name", request.data_table_row.name),) + ), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_data_table_rows( + self, + request: Optional[Union[data_table.ListDataTableRowsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListDataTableRowsAsyncPager: + r"""List data table rows. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import chronicle_v1 + + async def sample_list_data_table_rows(): + # Create a client + client = chronicle_v1.DataTableServiceAsyncClient() + + # Initialize request argument(s) + request = chronicle_v1.ListDataTableRowsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_data_table_rows(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.chronicle_v1.types.ListDataTableRowsRequest, dict]]): + The request object. Request to list data table rows. + parent (:class:`str`): + Required. The resource id of the data table. Format: + projects/{project}/locations/{locations}/instances/{instance}/dataTables/{data_table} + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.chronicle_v1.services.data_table_service.pagers.ListDataTableRowsAsyncPager: + Response message for listing data + table rows. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, data_table.ListDataTableRowsRequest): + request = data_table.ListDataTableRowsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.list_data_table_rows + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListDataTableRowsAsyncPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_data_table_row( + self, + request: Optional[Union[data_table.GetDataTableRowRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> data_table.DataTableRow: + r"""Get data table row + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import chronicle_v1 + + async def sample_get_data_table_row(): + # Create a client + client = chronicle_v1.DataTableServiceAsyncClient() + + # Initialize request argument(s) + request = chronicle_v1.GetDataTableRowRequest( + name="name_value", + ) + + # Make the request + response = await client.get_data_table_row(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.chronicle_v1.types.GetDataTableRowRequest, dict]]): + The request object. Request to get data table row. + name (:class:`str`): + Required. The resource name of the data table row i,e + row_id. Format: + projects/{project}/locations/{location}/instances/{instance}/dataTables/{data_table}/dataTableRows/{data_table_row} + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.chronicle_v1.types.DataTableRow: + DataTableRow represents a single row + in a data table. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, data_table.GetDataTableRowRequest): + request = data_table.GetDataTableRowRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.get_data_table_row + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_data_table_row( + self, + request: Optional[Union[data_table.DeleteDataTableRowRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: + r"""Delete data table row. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import chronicle_v1 + + async def sample_delete_data_table_row(): + # Create a client + client = chronicle_v1.DataTableServiceAsyncClient() + + # Initialize request argument(s) + request = chronicle_v1.DeleteDataTableRowRequest( + name="name_value", + ) + + # Make the request + await client.delete_data_table_row(request=request) + + Args: + request (Optional[Union[google.cloud.chronicle_v1.types.DeleteDataTableRowRequest, dict]]): + The request object. Request to delete data table row. + name (:class:`str`): + Required. The resource name of the data table row i,e + row_id. Format: + projects/{project}/locations/{location}/instances/{instance}/dataTables/{data_table}/dataTableRows/{data_table_row} + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, data_table.DeleteDataTableRowRequest): + request = data_table.DeleteDataTableRowRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.delete_data_table_row + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def bulk_create_data_table_rows( + self, + request: Optional[ + Union[data_table.BulkCreateDataTableRowsRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + requests: Optional[ + MutableSequence[data_table.CreateDataTableRowRequest] + ] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> data_table.BulkCreateDataTableRowsResponse: + r"""Create data table rows in bulk. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import chronicle_v1 + + async def sample_bulk_create_data_table_rows(): + # Create a client + client = chronicle_v1.DataTableServiceAsyncClient() + + # Initialize request argument(s) + requests = chronicle_v1.CreateDataTableRowRequest() + requests.parent = "parent_value" + requests.data_table_row.values = ['values_value1', 'values_value2'] + + request = chronicle_v1.BulkCreateDataTableRowsRequest( + parent="parent_value", + requests=requests, + ) + + # Make the request + response = await client.bulk_create_data_table_rows(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.chronicle_v1.types.BulkCreateDataTableRowsRequest, dict]]): + The request object. Request to create data table rows in + bulk. + parent (:class:`str`): + Required. The resource id of the data table. Format: + /projects/{project}/locations/{location}/instances/{instance}/dataTables/{data_table} + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + requests (:class:`MutableSequence[google.cloud.chronicle_v1.types.CreateDataTableRowRequest]`): + Required. Data table rows to create. + A maximum of 1000 rows (for sync + requests) or 2000 rows (for async + requests) can be created in a single + request. Total size of the rows should + be less than 4MB. + + This corresponds to the ``requests`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.chronicle_v1.types.BulkCreateDataTableRowsResponse: + Response message with created data + table rows. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent, requests] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, data_table.BulkCreateDataTableRowsRequest): + request = data_table.BulkCreateDataTableRowsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if requests: + request.requests.extend(requests) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.bulk_create_data_table_rows + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def bulk_get_data_table_rows( + self, + request: Optional[Union[data_table.BulkGetDataTableRowsRequest, dict]] = None, + *, + parent: Optional[str] = None, + requests: Optional[MutableSequence[data_table.GetDataTableRowRequest]] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> data_table.BulkGetDataTableRowsResponse: + r"""Get data table rows in bulk. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import chronicle_v1 + + async def sample_bulk_get_data_table_rows(): + # Create a client + client = chronicle_v1.DataTableServiceAsyncClient() + + # Initialize request argument(s) + requests = chronicle_v1.GetDataTableRowRequest() + requests.name = "name_value" + + request = chronicle_v1.BulkGetDataTableRowsRequest( + parent="parent_value", + requests=requests, + ) + + # Make the request + response = await client.bulk_get_data_table_rows(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.chronicle_v1.types.BulkGetDataTableRowsRequest, dict]]): + The request object. Request to get data table rows in + bulk. + parent (:class:`str`): + Required. The resource id of the data table. Format: + /projects/{project}/locations/{location}/instances/{instance}/dataTables/{data_table} + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + requests (:class:`MutableSequence[google.cloud.chronicle_v1.types.GetDataTableRowRequest]`): + Required. Data table rows to get. At + max 1,000 rows can be there in a + request. + + This corresponds to the ``requests`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.chronicle_v1.types.BulkGetDataTableRowsResponse: + Response message with data table + rows. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent, requests] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, data_table.BulkGetDataTableRowsRequest): + request = data_table.BulkGetDataTableRowsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if requests: + request.requests.extend(requests) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.bulk_get_data_table_rows + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def bulk_replace_data_table_rows( + self, + request: Optional[ + Union[data_table.BulkReplaceDataTableRowsRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + requests: Optional[ + MutableSequence[data_table.CreateDataTableRowRequest] + ] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> data_table.BulkReplaceDataTableRowsResponse: + r"""Replace all existing data table rows with new data + table rows. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import chronicle_v1 + + async def sample_bulk_replace_data_table_rows(): + # Create a client + client = chronicle_v1.DataTableServiceAsyncClient() + + # Initialize request argument(s) + requests = chronicle_v1.CreateDataTableRowRequest() + requests.parent = "parent_value" + requests.data_table_row.values = ['values_value1', 'values_value2'] + + request = chronicle_v1.BulkReplaceDataTableRowsRequest( + parent="parent_value", + requests=requests, + ) + + # Make the request + response = await client.bulk_replace_data_table_rows(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.chronicle_v1.types.BulkReplaceDataTableRowsRequest, dict]]): + The request object. Request to replace data table rows in + bulk. + parent (:class:`str`): + Required. The resource id of the data table. Format: + /projects/{project}/locations/{location}/instances/{instance}/dataTables/{data_table} + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + requests (:class:`MutableSequence[google.cloud.chronicle_v1.types.CreateDataTableRowRequest]`): + Required. Data table rows to replace + the existing data table rows. A maximum + of 1000 rows (for sync requests) or 2000 + rows (for async requests) can be + replaced in a single request. Total size + of the rows should be less than 4MB. + + This corresponds to the ``requests`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.chronicle_v1.types.BulkReplaceDataTableRowsResponse: + Response message with data table rows + that replaced existing data table rows. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent, requests] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, data_table.BulkReplaceDataTableRowsRequest): + request = data_table.BulkReplaceDataTableRowsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if requests: + request.requests.extend(requests) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.bulk_replace_data_table_rows + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def bulk_update_data_table_rows( + self, + request: Optional[ + Union[data_table.BulkUpdateDataTableRowsRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + requests: Optional[ + MutableSequence[data_table.UpdateDataTableRowRequest] + ] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> data_table.BulkUpdateDataTableRowsResponse: + r"""Update data table rows in bulk. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import chronicle_v1 + + async def sample_bulk_update_data_table_rows(): + # Create a client + client = chronicle_v1.DataTableServiceAsyncClient() + + # Initialize request argument(s) + requests = chronicle_v1.UpdateDataTableRowRequest() + requests.data_table_row.values = ['values_value1', 'values_value2'] + + request = chronicle_v1.BulkUpdateDataTableRowsRequest( + parent="parent_value", + requests=requests, + ) + + # Make the request + response = await client.bulk_update_data_table_rows(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.chronicle_v1.types.BulkUpdateDataTableRowsRequest, dict]]): + The request object. Request to update data table rows in + bulk. + parent (:class:`str`): + Required. The resource id of the data table. Format: + /projects/{project}/locations/{location}/instances/{instance}/dataTables/{data_table} + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + requests (:class:`MutableSequence[google.cloud.chronicle_v1.types.UpdateDataTableRowRequest]`): + Required. Data table rows to update. + At max 1,000 rows (or rows with size + less than 2MB) can be there in a + request. + + This corresponds to the ``requests`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.chronicle_v1.types.BulkUpdateDataTableRowsResponse: + Response message with updated data + table rows. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent, requests] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, data_table.BulkUpdateDataTableRowsRequest): + request = data_table.BulkUpdateDataTableRowsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if requests: + request.requests.extend(requests) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.bulk_update_data_table_rows + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_data_table_operation_errors( + self, + request: Optional[ + Union[data_table.GetDataTableOperationErrorsRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> data_table.DataTableOperationErrors: + r"""Get the error for a data table operation. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import chronicle_v1 + + async def sample_get_data_table_operation_errors(): + # Create a client + client = chronicle_v1.DataTableServiceAsyncClient() + + # Initialize request argument(s) + request = chronicle_v1.GetDataTableOperationErrorsRequest( + name="name_value", + ) + + # Make the request + response = await client.get_data_table_operation_errors(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.chronicle_v1.types.GetDataTableOperationErrorsRequest, dict]]): + The request object. The request message for + GetDataTableOperationErrors. + name (:class:`str`): + Required. Resource name for the data table operation + errors. Format: + projects/{project}/locations/{location}/instances/{instance}/dataTableOperationErrors/{data_table_operation_errors} + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.chronicle_v1.types.DataTableOperationErrors: + The message containing the errors for + a data table operation. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, data_table.GetDataTableOperationErrorsRequest): + request = data_table.GetDataTableOperationErrorsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.get_data_table_operation_errors + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_operations( + self, + request: Optional[Union[operations_pb2.ListOperationsRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if request is None: + request_pb = operations_pb2.ListOperationsRequest() + elif isinstance(request, dict): + request_pb = operations_pb2.ListOperationsRequest(**request) + else: + request_pb = request + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self.transport._wrapped_methods[self._client._transport.list_operations] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request_pb.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request_pb, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_operation( + self, + request: Optional[Union[operations_pb2.GetOperationRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if request is None: + request_pb = operations_pb2.GetOperationRequest() + elif isinstance(request, dict): + request_pb = operations_pb2.GetOperationRequest(**request) + else: + request_pb = request + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self.transport._wrapped_methods[self._client._transport.get_operation] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request_pb.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request_pb, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_operation( + self, + request: Optional[Union[operations_pb2.DeleteOperationRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: + r"""Deletes a long-running operation. + + This method indicates that the client is no longer interested + in the operation result. It does not cancel the operation. + If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.DeleteOperationRequest`): + The request object. Request message for + `DeleteOperation` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if request is None: + request_pb = operations_pb2.DeleteOperationRequest() + elif isinstance(request, dict): + request_pb = operations_pb2.DeleteOperationRequest(**request) + else: + request_pb = request + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self.transport._wrapped_methods[self._client._transport.delete_operation] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request_pb.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + await rpc( + request_pb, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def cancel_operation( + self, + request: Optional[Union[operations_pb2.CancelOperationRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if request is None: + request_pb = operations_pb2.CancelOperationRequest() + elif isinstance(request, dict): + request_pb = operations_pb2.CancelOperationRequest(**request) + else: + request_pb = request + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self.transport._wrapped_methods[self._client._transport.cancel_operation] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request_pb.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + await rpc( + request_pb, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def __aenter__(self) -> "DataTableServiceAsyncClient": + return self + + async def __aexit__(self, exc_type, exc, tb): + await self.transport.close() + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + +if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER + DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ + + +__all__ = ("DataTableServiceAsyncClient",) diff --git a/packages/google-cloud-chronicle/google/cloud/chronicle_v1/services/data_table_service/client.py b/packages/google-cloud-chronicle/google/cloud/chronicle_v1/services/data_table_service/client.py new file mode 100644 index 000000000000..a8afc702de5e --- /dev/null +++ b/packages/google-cloud-chronicle/google/cloud/chronicle_v1/services/data_table_service/client.py @@ -0,0 +1,2899 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import json +import logging as std_logging +import os +import re +import uuid +import warnings +from collections import OrderedDict +from http import HTTPStatus +from typing import ( + Callable, + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, + cast, +) + +import google.protobuf +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.chronicle_v1 import gapic_version as package_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + +try: + from google.api_core import client_logging # type: ignore + + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) + +import google.protobuf.field_mask_pb2 as field_mask_pb2 # type: ignore +import google.protobuf.timestamp_pb2 as timestamp_pb2 # type: ignore +import google.rpc.status_pb2 as status_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore + +from google.cloud.chronicle_v1.services.data_table_service import pagers +from google.cloud.chronicle_v1.types import data_table +from google.cloud.chronicle_v1.types import data_table as gcc_data_table + +from .transports.base import DEFAULT_CLIENT_INFO, DataTableServiceTransport +from .transports.grpc import DataTableServiceGrpcTransport +from .transports.grpc_asyncio import DataTableServiceGrpcAsyncIOTransport +from .transports.rest import DataTableServiceRestTransport + + +class DataTableServiceClientMeta(type): + """Metaclass for the DataTableService client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + + _transport_registry = OrderedDict() # type: Dict[str, Type[DataTableServiceTransport]] + _transport_registry["grpc"] = DataTableServiceGrpcTransport + _transport_registry["grpc_asyncio"] = DataTableServiceGrpcAsyncIOTransport + _transport_registry["rest"] = DataTableServiceRestTransport + + def get_transport_class( + cls, + label: Optional[str] = None, + ) -> Type[DataTableServiceTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class DataTableServiceClient(metaclass=DataTableServiceClientMeta): + """DataTableManager provides an interface for managing data + tables. + """ + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint) -> Optional[str]: + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + Optional[str]: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + if m is None: + # Could not parse api_endpoint; return as-is. + return api_endpoint + + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. + DEFAULT_ENDPOINT = "chronicle.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + _DEFAULT_ENDPOINT_TEMPLATE = "chronicle.{UNIVERSE_DOMAIN}" + _DEFAULT_UNIVERSE = "googleapis.com" + + @staticmethod + def _use_client_cert_effective(): + """Returns whether client certificate should be used for mTLS if the + google-auth version supports should_use_client_cert automatic mTLS enablement. + + Alternatively, read from the GOOGLE_API_USE_CLIENT_CERTIFICATE env var. + + Returns: + bool: whether client certificate should be used for mTLS + Raises: + ValueError: (If using a version of google-auth without should_use_client_cert and + GOOGLE_API_USE_CLIENT_CERTIFICATE is set to an unexpected value.) + """ + # check if google-auth version supports should_use_client_cert for automatic mTLS enablement + if hasattr(mtls, "should_use_client_cert"): # pragma: NO COVER + return mtls.should_use_client_cert() + else: # pragma: NO COVER + # if unsupported, fallback to reading from env var + use_client_cert_str = os.getenv( + "GOOGLE_API_USE_CLIENT_CERTIFICATE", "false" + ).lower() + if use_client_cert_str not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be" + " either `true` or `false`" + ) + return use_client_cert_str == "true" + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + DataTableServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + DataTableServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file(filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> DataTableServiceTransport: + """Returns the transport used by the client instance. + + Returns: + DataTableServiceTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def data_access_scope_path( + project: str, + location: str, + instance: str, + data_access_scope: str, + ) -> str: + """Returns a fully-qualified data_access_scope string.""" + return "projects/{project}/locations/{location}/instances/{instance}/dataAccessScopes/{data_access_scope}".format( + project=project, + location=location, + instance=instance, + data_access_scope=data_access_scope, + ) + + @staticmethod + def parse_data_access_scope_path(path: str) -> Dict[str, str]: + """Parses a data_access_scope path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/instances/(?P.+?)/dataAccessScopes/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def data_table_path( + project: str, + location: str, + instance: str, + data_table: str, + ) -> str: + """Returns a fully-qualified data_table string.""" + return "projects/{project}/locations/{location}/instances/{instance}/dataTables/{data_table}".format( + project=project, + location=location, + instance=instance, + data_table=data_table, + ) + + @staticmethod + def parse_data_table_path(path: str) -> Dict[str, str]: + """Parses a data_table path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/instances/(?P.+?)/dataTables/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def data_table_operation_errors_path( + project: str, + location: str, + instance: str, + data_table_operation_errors: str, + ) -> str: + """Returns a fully-qualified data_table_operation_errors string.""" + return "projects/{project}/locations/{location}/instances/{instance}/dataTableOperationErrors/{data_table_operation_errors}".format( + project=project, + location=location, + instance=instance, + data_table_operation_errors=data_table_operation_errors, + ) + + @staticmethod + def parse_data_table_operation_errors_path(path: str) -> Dict[str, str]: + """Parses a data_table_operation_errors path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/instances/(?P.+?)/dataTableOperationErrors/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def data_table_row_path( + project: str, + location: str, + instance: str, + data_table: str, + data_table_row: str, + ) -> str: + """Returns a fully-qualified data_table_row string.""" + return "projects/{project}/locations/{location}/instances/{instance}/dataTables/{data_table}/dataTableRows/{data_table_row}".format( + project=project, + location=location, + instance=instance, + data_table=data_table, + data_table_row=data_table_row, + ) + + @staticmethod + def parse_data_table_row_path(path: str) -> Dict[str, str]: + """Parses a data_table_row path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/instances/(?P.+?)/dataTables/(?P.+?)/dataTableRows/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def common_billing_account_path( + billing_account: str, + ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str, str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path( + folder: str, + ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format( + folder=folder, + ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str, str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path( + organization: str, + ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format( + organization=organization, + ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str, str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path( + project: str, + ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format( + project=project, + ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str, str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path( + project: str, + location: str, + ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str, str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[client_options_lib.ClientOptions] = None + ): + """Deprecated. Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + + warnings.warn( + "get_mtls_endpoint_and_cert_source is deprecated. Use the api_endpoint property instead.", + DeprecationWarning, + ) + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = DataTableServiceClient._use_client_cert_effective() + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert: + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + @staticmethod + def _read_environment_variables(): + """Returns the environment variables used by the client. + + Returns: + Tuple[bool, str, str]: returns the GOOGLE_API_USE_CLIENT_CERTIFICATE, + GOOGLE_API_USE_MTLS_ENDPOINT, and GOOGLE_CLOUD_UNIVERSE_DOMAIN environment variables. + + Raises: + ValueError: If GOOGLE_API_USE_CLIENT_CERTIFICATE is not + any of ["true", "false"]. + google.auth.exceptions.MutualTLSChannelError: If GOOGLE_API_USE_MTLS_ENDPOINT + is not any of ["auto", "never", "always"]. + """ + use_client_cert = DataTableServiceClient._use_client_cert_effective() + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto").lower() + universe_domain_env = os.getenv("GOOGLE_CLOUD_UNIVERSE_DOMAIN") + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + return use_client_cert, use_mtls_endpoint, universe_domain_env + + @staticmethod + def _get_client_cert_source(provided_cert_source, use_cert_flag): + """Return the client cert source to be used by the client. + + Args: + provided_cert_source (bytes): The client certificate source provided. + use_cert_flag (bool): A flag indicating whether to use the client certificate. + + Returns: + bytes or None: The client cert source to be used by the client. + """ + client_cert_source = None + if use_cert_flag: + if provided_cert_source: + client_cert_source = provided_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + return client_cert_source + + @staticmethod + def _get_api_endpoint( + api_override, client_cert_source, universe_domain, use_mtls_endpoint + ) -> str: + """Return the API endpoint used by the client. + + Args: + api_override (str): The API endpoint override. If specified, this is always + the return value of this function and the other arguments are not used. + client_cert_source (bytes): The client certificate source used by the client. + universe_domain (str): The universe domain used by the client. + use_mtls_endpoint (str): How to use the mTLS endpoint, which depends also on the other parameters. + Possible values are "always", "auto", or "never". + + Returns: + str: The API endpoint to be used by the client. + """ + if api_override is not None: + api_endpoint = api_override + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + _default_universe = DataTableServiceClient._DEFAULT_UNIVERSE + if universe_domain != _default_universe: + raise MutualTLSChannelError( + f"mTLS is not supported in any universe other than {_default_universe}." + ) + api_endpoint = DataTableServiceClient.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = DataTableServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=universe_domain + ) + return api_endpoint + + @staticmethod + def _get_universe_domain( + client_universe_domain: Optional[str], universe_domain_env: Optional[str] + ) -> str: + """Return the universe domain used by the client. + + Args: + client_universe_domain (Optional[str]): The universe domain configured via the client options. + universe_domain_env (Optional[str]): The universe domain configured via the "GOOGLE_CLOUD_UNIVERSE_DOMAIN" environment variable. + + Returns: + str: The universe domain to be used by the client. + + Raises: + ValueError: If the universe domain is an empty string. + """ + universe_domain = DataTableServiceClient._DEFAULT_UNIVERSE + if client_universe_domain is not None: + universe_domain = client_universe_domain + elif universe_domain_env is not None: + universe_domain = universe_domain_env + if len(universe_domain.strip()) == 0: + raise ValueError("Universe Domain cannot be an empty string.") + return universe_domain + + def _validate_universe_domain(self): + """Validates client's and credentials' universe domains are consistent. + + Returns: + bool: True iff the configured universe domain is valid. + + Raises: + ValueError: If the configured universe domain is not valid. + """ + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True + + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + + @property + def api_endpoint(self) -> str: + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used by the client instance. + """ + return self._api_endpoint + + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used by the client instance. + """ + return self._universe_domain + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[ + Union[ + str, DataTableServiceTransport, Callable[..., DataTableServiceTransport] + ] + ] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the data table service client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Optional[Union[str,DataTableServiceTransport,Callable[..., DataTableServiceTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the DataTableServiceTransport constructor. + If set to None, a transport is chosen automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide a client certificate for mTLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that the ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client_options = client_options + if isinstance(self._client_options, dict): + self._client_options = client_options_lib.from_dict(self._client_options) + if self._client_options is None: + self._client_options = client_options_lib.ClientOptions() + self._client_options = cast( + client_options_lib.ClientOptions, self._client_options + ) + + universe_domain_opt = getattr(self._client_options, "universe_domain", None) + + self._use_client_cert, self._use_mtls_endpoint, self._universe_domain_env = ( + DataTableServiceClient._read_environment_variables() + ) + self._client_cert_source = DataTableServiceClient._get_client_cert_source( + self._client_options.client_cert_source, self._use_client_cert + ) + self._universe_domain = DataTableServiceClient._get_universe_domain( + universe_domain_opt, self._universe_domain_env + ) + self._api_endpoint: str = "" # updated below, depending on `transport` + + # Initialize the universe domain validation. + self._is_universe_domain_valid = False + + if CLIENT_LOGGING_SUPPORTED: # pragma: NO COVER + # Setup logging. + client_logging.initialize_logging() + + api_key_value = getattr(self._client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError( + "client_options.api_key and credentials are mutually exclusive" + ) + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + transport_provided = isinstance(transport, DataTableServiceTransport) + if transport_provided: + # transport is a DataTableServiceTransport instance. + if credentials or self._client_options.credentials_file or api_key_value: + raise ValueError( + "When providing a transport instance, " + "provide its credentials directly." + ) + if self._client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes directly." + ) + self._transport = cast(DataTableServiceTransport, transport) + self._api_endpoint = self._transport.host + + self._api_endpoint = ( + self._api_endpoint + or DataTableServiceClient._get_api_endpoint( + self._client_options.api_endpoint, + self._client_cert_source, + self._universe_domain, + self._use_mtls_endpoint, + ) + ) + + if not transport_provided: + import google.auth._default # type: ignore + + if api_key_value and hasattr( + google.auth._default, "get_api_key_credentials" + ): + credentials = google.auth._default.get_api_key_credentials( + api_key_value + ) + + transport_init: Union[ + Type[DataTableServiceTransport], + Callable[..., DataTableServiceTransport], + ] = ( + DataTableServiceClient.get_transport_class(transport) + if isinstance(transport, str) or transport is None + else cast(Callable[..., DataTableServiceTransport], transport) + ) + # initialize with the provided callable or the passed in class + self._transport = transport_init( + credentials=credentials, + credentials_file=self._client_options.credentials_file, + host=self._api_endpoint, + scopes=self._client_options.scopes, + client_cert_source_for_mtls=self._client_cert_source, + quota_project_id=self._client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=self._client_options.api_audience, + ) + + if "async" not in str(self._transport): + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + std_logging.DEBUG + ): # pragma: NO COVER + _LOGGER.debug( + "Created client `google.cloud.chronicle_v1.DataTableServiceClient`.", + extra={ + "serviceName": "google.cloud.chronicle.v1.DataTableService", + "universeDomain": getattr( + self._transport._credentials, "universe_domain", "" + ), + "credentialsType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", + "credentialsInfo": getattr( + self.transport._credentials, "get_cred_info", lambda: None + )(), + } + if hasattr(self._transport, "_credentials") + else { + "serviceName": "google.cloud.chronicle.v1.DataTableService", + "credentialsType": None, + }, + ) + + def create_data_table( + self, + request: Optional[Union[gcc_data_table.CreateDataTableRequest, dict]] = None, + *, + parent: Optional[str] = None, + data_table: Optional[gcc_data_table.DataTable] = None, + data_table_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> gcc_data_table.DataTable: + r"""Create a new data table. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import chronicle_v1 + + def sample_create_data_table(): + # Create a client + client = chronicle_v1.DataTableServiceClient() + + # Initialize request argument(s) + data_table = chronicle_v1.DataTable() + data_table.description = "description_value" + + request = chronicle_v1.CreateDataTableRequest( + parent="parent_value", + data_table=data_table, + data_table_id="data_table_id_value", + ) + + # Make the request + response = client.create_data_table(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.chronicle_v1.types.CreateDataTableRequest, dict]): + The request object. A request to create DataTable. + parent (str): + Required. The parent resource where + this data table will be created. Format: + projects/{project}/locations/{location}/instances/{instance} + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + data_table (google.cloud.chronicle_v1.types.DataTable): + Required. The data table being + created. + + This corresponds to the ``data_table`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + data_table_id (str): + Required. The ID to use for the data + table. This is also the display name for + the data table. It must satisfy the + following requirements: + + - Starts with letter. + - Contains only letters, numbers and + underscore. + - Must be unique and has length < 256. + + This corresponds to the ``data_table_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.chronicle_v1.types.DataTable: + DataTable represents the data table + resource. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent, data_table, data_table_id] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, gcc_data_table.CreateDataTableRequest): + request = gcc_data_table.CreateDataTableRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if data_table is not None: + request.data_table = data_table + if data_table_id is not None: + request.data_table_id = data_table_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_data_table] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_data_tables( + self, + request: Optional[Union[data_table.ListDataTablesRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListDataTablesPager: + r"""List data tables. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import chronicle_v1 + + def sample_list_data_tables(): + # Create a client + client = chronicle_v1.DataTableServiceClient() + + # Initialize request argument(s) + request = chronicle_v1.ListDataTablesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_data_tables(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.chronicle_v1.types.ListDataTablesRequest, dict]): + The request object. A request for a list of data tables. + parent (str): + Required. The parent resource where + this data table will be created. Format: + projects/{project}/locations/{location}/instances/{instance} + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.chronicle_v1.services.data_table_service.pagers.ListDataTablesPager: + Response message for listing data + tables. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, data_table.ListDataTablesRequest): + request = data_table.ListDataTablesRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_data_tables] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListDataTablesPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_data_table( + self, + request: Optional[Union[data_table.GetDataTableRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> data_table.DataTable: + r"""Get data table info. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import chronicle_v1 + + def sample_get_data_table(): + # Create a client + client = chronicle_v1.DataTableServiceClient() + + # Initialize request argument(s) + request = chronicle_v1.GetDataTableRequest( + name="name_value", + ) + + # Make the request + response = client.get_data_table(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.chronicle_v1.types.GetDataTableRequest, dict]): + The request object. A request to get details about a data + table. + name (str): + Required. The resource name of the data table to + retrieve. Format: + projects/{project}/locations/{location}/instances/{instances}/dataTables/{data_table} + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.chronicle_v1.types.DataTable: + DataTable represents the data table + resource. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, data_table.GetDataTableRequest): + request = data_table.GetDataTableRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_data_table] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def update_data_table( + self, + request: Optional[Union[gcc_data_table.UpdateDataTableRequest, dict]] = None, + *, + data_table: Optional[gcc_data_table.DataTable] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> gcc_data_table.DataTable: + r"""Update data table. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import chronicle_v1 + + def sample_update_data_table(): + # Create a client + client = chronicle_v1.DataTableServiceClient() + + # Initialize request argument(s) + data_table = chronicle_v1.DataTable() + data_table.description = "description_value" + + request = chronicle_v1.UpdateDataTableRequest( + data_table=data_table, + ) + + # Make the request + response = client.update_data_table(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.chronicle_v1.types.UpdateDataTableRequest, dict]): + The request object. A request to update details of data + table. + data_table (google.cloud.chronicle_v1.types.DataTable): + Required. This field is used to identify the datatable + to update. Format: + projects/{project}/locations/{locations}/instances/{instance}/dataTables/{data_table} + + This corresponds to the ``data_table`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Optional. The list of metadata fields to update. + Currently data tables only support updating the + ``description``, ``row_time_to_live`` and ``scope_info`` + fields. When no field mask is supplied, all non-empty + fields will be updated. A field mask of "\*" will update + all fields, whether empty or not. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.chronicle_v1.types.DataTable: + DataTable represents the data table + resource. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [data_table, update_mask] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, gcc_data_table.UpdateDataTableRequest): + request = gcc_data_table.UpdateDataTableRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if data_table is not None: + request.data_table = data_table + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_data_table] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("data_table.name", request.data_table.name),) + ), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_data_table( + self, + request: Optional[Union[data_table.DeleteDataTableRequest, dict]] = None, + *, + name: Optional[str] = None, + force: Optional[bool] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: + r"""Delete data table. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import chronicle_v1 + + def sample_delete_data_table(): + # Create a client + client = chronicle_v1.DataTableServiceClient() + + # Initialize request argument(s) + request = chronicle_v1.DeleteDataTableRequest( + name="name_value", + ) + + # Make the request + client.delete_data_table(request=request) + + Args: + request (Union[google.cloud.chronicle_v1.types.DeleteDataTableRequest, dict]): + The request object. Request message for deleting data + tables. + name (str): + Required. The resource name of the data table to delete. + Format + projects/{project}/locations/{location}/instances/{instances}/dataTables/{data_table} + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + force (bool): + Optional. If set to true, any rows + under this data table will also be + deleted. (Otherwise, the request will + only work if the data table has no + rows.) + + This corresponds to the ``force`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name, force] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, data_table.DeleteDataTableRequest): + request = data_table.DeleteDataTableRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + if force is not None: + request.force = force + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_data_table] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def create_data_table_row( + self, + request: Optional[Union[data_table.CreateDataTableRowRequest, dict]] = None, + *, + parent: Optional[str] = None, + data_table_row: Optional[data_table.DataTableRow] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> data_table.DataTableRow: + r"""Create a new data table row. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import chronicle_v1 + + def sample_create_data_table_row(): + # Create a client + client = chronicle_v1.DataTableServiceClient() + + # Initialize request argument(s) + data_table_row = chronicle_v1.DataTableRow() + data_table_row.values = ['values_value1', 'values_value2'] + + request = chronicle_v1.CreateDataTableRowRequest( + parent="parent_value", + data_table_row=data_table_row, + ) + + # Make the request + response = client.create_data_table_row(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.chronicle_v1.types.CreateDataTableRowRequest, dict]): + The request object. Request to create data table row. + parent (str): + Required. The resource id of the data table. Format: + /projects/{project}/locations/{location}/instances/{instance}/dataTables/{data_table} + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + data_table_row (google.cloud.chronicle_v1.types.DataTableRow): + Required. The data table row to + create. + + This corresponds to the ``data_table_row`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.chronicle_v1.types.DataTableRow: + DataTableRow represents a single row + in a data table. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent, data_table_row] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, data_table.CreateDataTableRowRequest): + request = data_table.CreateDataTableRowRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if data_table_row is not None: + request.data_table_row = data_table_row + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_data_table_row] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def update_data_table_row( + self, + request: Optional[Union[data_table.UpdateDataTableRowRequest, dict]] = None, + *, + data_table_row: Optional[data_table.DataTableRow] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> data_table.DataTableRow: + r"""Update data table row + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import chronicle_v1 + + def sample_update_data_table_row(): + # Create a client + client = chronicle_v1.DataTableServiceClient() + + # Initialize request argument(s) + data_table_row = chronicle_v1.DataTableRow() + data_table_row.values = ['values_value1', 'values_value2'] + + request = chronicle_v1.UpdateDataTableRowRequest( + data_table_row=data_table_row, + ) + + # Make the request + response = client.update_data_table_row(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.chronicle_v1.types.UpdateDataTableRowRequest, dict]): + The request object. Request to update data table row. + data_table_row (google.cloud.chronicle_v1.types.DataTableRow): + Required. Format: + projects/{project}/locations/{location}/instances/{instance}/dataTables/{data_table}/dataTableRows/{data_table_row} + + This corresponds to the ``data_table_row`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Optional. The list of fields to update. Currently data + table rows only support updating the ``values`` field. + When no field mask is supplied, all non-empty fields + will be updated. A field mask of "\*" will update all + fields, whether empty or not. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.chronicle_v1.types.DataTableRow: + DataTableRow represents a single row + in a data table. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [data_table_row, update_mask] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, data_table.UpdateDataTableRowRequest): + request = data_table.UpdateDataTableRowRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if data_table_row is not None: + request.data_table_row = data_table_row + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_data_table_row] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("data_table_row.name", request.data_table_row.name),) + ), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_data_table_rows( + self, + request: Optional[Union[data_table.ListDataTableRowsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListDataTableRowsPager: + r"""List data table rows. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import chronicle_v1 + + def sample_list_data_table_rows(): + # Create a client + client = chronicle_v1.DataTableServiceClient() + + # Initialize request argument(s) + request = chronicle_v1.ListDataTableRowsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_data_table_rows(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.chronicle_v1.types.ListDataTableRowsRequest, dict]): + The request object. Request to list data table rows. + parent (str): + Required. The resource id of the data table. Format: + projects/{project}/locations/{locations}/instances/{instance}/dataTables/{data_table} + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.chronicle_v1.services.data_table_service.pagers.ListDataTableRowsPager: + Response message for listing data + table rows. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, data_table.ListDataTableRowsRequest): + request = data_table.ListDataTableRowsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_data_table_rows] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListDataTableRowsPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_data_table_row( + self, + request: Optional[Union[data_table.GetDataTableRowRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> data_table.DataTableRow: + r"""Get data table row + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import chronicle_v1 + + def sample_get_data_table_row(): + # Create a client + client = chronicle_v1.DataTableServiceClient() + + # Initialize request argument(s) + request = chronicle_v1.GetDataTableRowRequest( + name="name_value", + ) + + # Make the request + response = client.get_data_table_row(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.chronicle_v1.types.GetDataTableRowRequest, dict]): + The request object. Request to get data table row. + name (str): + Required. The resource name of the data table row i,e + row_id. Format: + projects/{project}/locations/{location}/instances/{instance}/dataTables/{data_table}/dataTableRows/{data_table_row} + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.chronicle_v1.types.DataTableRow: + DataTableRow represents a single row + in a data table. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, data_table.GetDataTableRowRequest): + request = data_table.GetDataTableRowRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_data_table_row] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_data_table_row( + self, + request: Optional[Union[data_table.DeleteDataTableRowRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: + r"""Delete data table row. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import chronicle_v1 + + def sample_delete_data_table_row(): + # Create a client + client = chronicle_v1.DataTableServiceClient() + + # Initialize request argument(s) + request = chronicle_v1.DeleteDataTableRowRequest( + name="name_value", + ) + + # Make the request + client.delete_data_table_row(request=request) + + Args: + request (Union[google.cloud.chronicle_v1.types.DeleteDataTableRowRequest, dict]): + The request object. Request to delete data table row. + name (str): + Required. The resource name of the data table row i,e + row_id. Format: + projects/{project}/locations/{location}/instances/{instance}/dataTables/{data_table}/dataTableRows/{data_table_row} + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, data_table.DeleteDataTableRowRequest): + request = data_table.DeleteDataTableRowRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_data_table_row] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def bulk_create_data_table_rows( + self, + request: Optional[ + Union[data_table.BulkCreateDataTableRowsRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + requests: Optional[ + MutableSequence[data_table.CreateDataTableRowRequest] + ] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> data_table.BulkCreateDataTableRowsResponse: + r"""Create data table rows in bulk. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import chronicle_v1 + + def sample_bulk_create_data_table_rows(): + # Create a client + client = chronicle_v1.DataTableServiceClient() + + # Initialize request argument(s) + requests = chronicle_v1.CreateDataTableRowRequest() + requests.parent = "parent_value" + requests.data_table_row.values = ['values_value1', 'values_value2'] + + request = chronicle_v1.BulkCreateDataTableRowsRequest( + parent="parent_value", + requests=requests, + ) + + # Make the request + response = client.bulk_create_data_table_rows(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.chronicle_v1.types.BulkCreateDataTableRowsRequest, dict]): + The request object. Request to create data table rows in + bulk. + parent (str): + Required. The resource id of the data table. Format: + /projects/{project}/locations/{location}/instances/{instance}/dataTables/{data_table} + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + requests (MutableSequence[google.cloud.chronicle_v1.types.CreateDataTableRowRequest]): + Required. Data table rows to create. + A maximum of 1000 rows (for sync + requests) or 2000 rows (for async + requests) can be created in a single + request. Total size of the rows should + be less than 4MB. + + This corresponds to the ``requests`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.chronicle_v1.types.BulkCreateDataTableRowsResponse: + Response message with created data + table rows. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent, requests] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, data_table.BulkCreateDataTableRowsRequest): + request = data_table.BulkCreateDataTableRowsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if requests is not None: + request.requests = requests + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.bulk_create_data_table_rows + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def bulk_get_data_table_rows( + self, + request: Optional[Union[data_table.BulkGetDataTableRowsRequest, dict]] = None, + *, + parent: Optional[str] = None, + requests: Optional[MutableSequence[data_table.GetDataTableRowRequest]] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> data_table.BulkGetDataTableRowsResponse: + r"""Get data table rows in bulk. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import chronicle_v1 + + def sample_bulk_get_data_table_rows(): + # Create a client + client = chronicle_v1.DataTableServiceClient() + + # Initialize request argument(s) + requests = chronicle_v1.GetDataTableRowRequest() + requests.name = "name_value" + + request = chronicle_v1.BulkGetDataTableRowsRequest( + parent="parent_value", + requests=requests, + ) + + # Make the request + response = client.bulk_get_data_table_rows(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.chronicle_v1.types.BulkGetDataTableRowsRequest, dict]): + The request object. Request to get data table rows in + bulk. + parent (str): + Required. The resource id of the data table. Format: + /projects/{project}/locations/{location}/instances/{instance}/dataTables/{data_table} + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + requests (MutableSequence[google.cloud.chronicle_v1.types.GetDataTableRowRequest]): + Required. Data table rows to get. At + max 1,000 rows can be there in a + request. + + This corresponds to the ``requests`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.chronicle_v1.types.BulkGetDataTableRowsResponse: + Response message with data table + rows. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent, requests] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, data_table.BulkGetDataTableRowsRequest): + request = data_table.BulkGetDataTableRowsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if requests is not None: + request.requests = requests + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.bulk_get_data_table_rows] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def bulk_replace_data_table_rows( + self, + request: Optional[ + Union[data_table.BulkReplaceDataTableRowsRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + requests: Optional[ + MutableSequence[data_table.CreateDataTableRowRequest] + ] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> data_table.BulkReplaceDataTableRowsResponse: + r"""Replace all existing data table rows with new data + table rows. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import chronicle_v1 + + def sample_bulk_replace_data_table_rows(): + # Create a client + client = chronicle_v1.DataTableServiceClient() + + # Initialize request argument(s) + requests = chronicle_v1.CreateDataTableRowRequest() + requests.parent = "parent_value" + requests.data_table_row.values = ['values_value1', 'values_value2'] + + request = chronicle_v1.BulkReplaceDataTableRowsRequest( + parent="parent_value", + requests=requests, + ) + + # Make the request + response = client.bulk_replace_data_table_rows(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.chronicle_v1.types.BulkReplaceDataTableRowsRequest, dict]): + The request object. Request to replace data table rows in + bulk. + parent (str): + Required. The resource id of the data table. Format: + /projects/{project}/locations/{location}/instances/{instance}/dataTables/{data_table} + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + requests (MutableSequence[google.cloud.chronicle_v1.types.CreateDataTableRowRequest]): + Required. Data table rows to replace + the existing data table rows. A maximum + of 1000 rows (for sync requests) or 2000 + rows (for async requests) can be + replaced in a single request. Total size + of the rows should be less than 4MB. + + This corresponds to the ``requests`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.chronicle_v1.types.BulkReplaceDataTableRowsResponse: + Response message with data table rows + that replaced existing data table rows. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent, requests] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, data_table.BulkReplaceDataTableRowsRequest): + request = data_table.BulkReplaceDataTableRowsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if requests is not None: + request.requests = requests + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.bulk_replace_data_table_rows + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def bulk_update_data_table_rows( + self, + request: Optional[ + Union[data_table.BulkUpdateDataTableRowsRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + requests: Optional[ + MutableSequence[data_table.UpdateDataTableRowRequest] + ] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> data_table.BulkUpdateDataTableRowsResponse: + r"""Update data table rows in bulk. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import chronicle_v1 + + def sample_bulk_update_data_table_rows(): + # Create a client + client = chronicle_v1.DataTableServiceClient() + + # Initialize request argument(s) + requests = chronicle_v1.UpdateDataTableRowRequest() + requests.data_table_row.values = ['values_value1', 'values_value2'] + + request = chronicle_v1.BulkUpdateDataTableRowsRequest( + parent="parent_value", + requests=requests, + ) + + # Make the request + response = client.bulk_update_data_table_rows(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.chronicle_v1.types.BulkUpdateDataTableRowsRequest, dict]): + The request object. Request to update data table rows in + bulk. + parent (str): + Required. The resource id of the data table. Format: + /projects/{project}/locations/{location}/instances/{instance}/dataTables/{data_table} + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + requests (MutableSequence[google.cloud.chronicle_v1.types.UpdateDataTableRowRequest]): + Required. Data table rows to update. + At max 1,000 rows (or rows with size + less than 2MB) can be there in a + request. + + This corresponds to the ``requests`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.chronicle_v1.types.BulkUpdateDataTableRowsResponse: + Response message with updated data + table rows. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent, requests] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, data_table.BulkUpdateDataTableRowsRequest): + request = data_table.BulkUpdateDataTableRowsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if requests is not None: + request.requests = requests + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.bulk_update_data_table_rows + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_data_table_operation_errors( + self, + request: Optional[ + Union[data_table.GetDataTableOperationErrorsRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> data_table.DataTableOperationErrors: + r"""Get the error for a data table operation. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import chronicle_v1 + + def sample_get_data_table_operation_errors(): + # Create a client + client = chronicle_v1.DataTableServiceClient() + + # Initialize request argument(s) + request = chronicle_v1.GetDataTableOperationErrorsRequest( + name="name_value", + ) + + # Make the request + response = client.get_data_table_operation_errors(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.chronicle_v1.types.GetDataTableOperationErrorsRequest, dict]): + The request object. The request message for + GetDataTableOperationErrors. + name (str): + Required. Resource name for the data table operation + errors. Format: + projects/{project}/locations/{location}/instances/{instance}/dataTableOperationErrors/{data_table_operation_errors} + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.chronicle_v1.types.DataTableOperationErrors: + The message containing the errors for + a data table operation. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, data_table.GetDataTableOperationErrorsRequest): + request = data_table.GetDataTableOperationErrorsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.get_data_table_operation_errors + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def __enter__(self) -> "DataTableServiceClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + def list_operations( + self, + request: Optional[Union[operations_pb2.ListOperationsRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Lists operations that match the specified filter in the request. + + Args: + request (:class:`~.operations_pb2.ListOperationsRequest`): + The request object. Request message for + `ListOperations` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.operations_pb2.ListOperationsResponse: + Response message for ``ListOperations`` method. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if request is None: + request_pb = operations_pb2.ListOperationsRequest() + elif isinstance(request, dict): + request_pb = operations_pb2.ListOperationsRequest(**request) + else: + request_pb = request + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_operations] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request_pb.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + try: + # Send the request. + response = rpc( + request_pb, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e + + def get_operation( + self, + request: Optional[Union[operations_pb2.GetOperationRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if request is None: + request_pb = operations_pb2.GetOperationRequest() + elif isinstance(request, dict): + request_pb = operations_pb2.GetOperationRequest(**request) + else: + request_pb = request + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_operation] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request_pb.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + try: + # Send the request. + response = rpc( + request_pb, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e + + def delete_operation( + self, + request: Optional[Union[operations_pb2.DeleteOperationRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: + r"""Deletes a long-running operation. + + This method indicates that the client is no longer interested + in the operation result. It does not cancel the operation. + If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.DeleteOperationRequest`): + The request object. Request message for + `DeleteOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if request is None: + request_pb = operations_pb2.DeleteOperationRequest() + elif isinstance(request, dict): + request_pb = operations_pb2.DeleteOperationRequest(**request) + else: + request_pb = request + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_operation] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request_pb.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + rpc( + request_pb, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def cancel_operation( + self, + request: Optional[Union[operations_pb2.CancelOperationRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: + r"""Starts asynchronous cancellation on a long-running operation. + + The server makes a best effort to cancel the operation, but success + is not guaranteed. If the server doesn't support this method, it returns + `google.rpc.Code.UNIMPLEMENTED`. + + Args: + request (:class:`~.operations_pb2.CancelOperationRequest`): + The request object. Request message for + `CancelOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + None + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if request is None: + request_pb = operations_pb2.CancelOperationRequest() + elif isinstance(request, dict): + request_pb = operations_pb2.CancelOperationRequest(**request) + else: + request_pb = request + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.cancel_operation] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request_pb.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + rpc( + request_pb, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + +if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER + DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ + +__all__ = ("DataTableServiceClient",) diff --git a/packages/google-cloud-chronicle/google/cloud/chronicle_v1/services/data_table_service/pagers.py b/packages/google-cloud-chronicle/google/cloud/chronicle_v1/services/data_table_service/pagers.py new file mode 100644 index 000000000000..625e847a8fae --- /dev/null +++ b/packages/google-cloud-chronicle/google/cloud/chronicle_v1/services/data_table_service/pagers.py @@ -0,0 +1,353 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import ( + Any, + AsyncIterator, + Awaitable, + Callable, + Iterator, + Optional, + Sequence, + Tuple, + Union, +) + +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.api_core import retry_async as retries_async + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] + OptionalAsyncRetry = Union[ + retries_async.AsyncRetry, gapic_v1.method._MethodDefault, None + ] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + OptionalAsyncRetry = Union[retries_async.AsyncRetry, object, None] # type: ignore + +from google.cloud.chronicle_v1.types import data_table + + +class ListDataTablesPager: + """A pager for iterating through ``list_data_tables`` requests. + + This class thinly wraps an initial + :class:`google.cloud.chronicle_v1.types.ListDataTablesResponse` object, and + provides an ``__iter__`` method to iterate through its + ``data_tables`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListDataTables`` requests and continue to iterate + through the ``data_tables`` field on the + corresponding responses. + + All the usual :class:`google.cloud.chronicle_v1.types.ListDataTablesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., data_table.ListDataTablesResponse], + request: data_table.ListDataTablesRequest, + response: data_table.ListDataTablesResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.chronicle_v1.types.ListDataTablesRequest): + The initial request object. + response (google.cloud.chronicle_v1.types.ListDataTablesResponse): + The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + self._method = method + self._request = data_table.ListDataTablesRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[data_table.ListDataTablesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) + yield self._response + + def __iter__(self) -> Iterator[data_table.DataTable]: + for page in self.pages: + yield from page.data_tables + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListDataTablesAsyncPager: + """A pager for iterating through ``list_data_tables`` requests. + + This class thinly wraps an initial + :class:`google.cloud.chronicle_v1.types.ListDataTablesResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``data_tables`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListDataTables`` requests and continue to iterate + through the ``data_tables`` field on the + corresponding responses. + + All the usual :class:`google.cloud.chronicle_v1.types.ListDataTablesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[data_table.ListDataTablesResponse]], + request: data_table.ListDataTablesRequest, + response: data_table.ListDataTablesResponse, + *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.chronicle_v1.types.ListDataTablesRequest): + The initial request object. + response (google.cloud.chronicle_v1.types.ListDataTablesResponse): + The initial response object. + retry (google.api_core.retry.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + self._method = method + self._request = data_table.ListDataTablesRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[data_table.ListDataTablesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) + yield self._response + + def __aiter__(self) -> AsyncIterator[data_table.DataTable]: + async def async_generator(): + async for page in self.pages: + for response in page.data_tables: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListDataTableRowsPager: + """A pager for iterating through ``list_data_table_rows`` requests. + + This class thinly wraps an initial + :class:`google.cloud.chronicle_v1.types.ListDataTableRowsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``data_table_rows`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListDataTableRows`` requests and continue to iterate + through the ``data_table_rows`` field on the + corresponding responses. + + All the usual :class:`google.cloud.chronicle_v1.types.ListDataTableRowsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., data_table.ListDataTableRowsResponse], + request: data_table.ListDataTableRowsRequest, + response: data_table.ListDataTableRowsResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.chronicle_v1.types.ListDataTableRowsRequest): + The initial request object. + response (google.cloud.chronicle_v1.types.ListDataTableRowsResponse): + The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + self._method = method + self._request = data_table.ListDataTableRowsRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[data_table.ListDataTableRowsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) + yield self._response + + def __iter__(self) -> Iterator[data_table.DataTableRow]: + for page in self.pages: + yield from page.data_table_rows + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListDataTableRowsAsyncPager: + """A pager for iterating through ``list_data_table_rows`` requests. + + This class thinly wraps an initial + :class:`google.cloud.chronicle_v1.types.ListDataTableRowsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``data_table_rows`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListDataTableRows`` requests and continue to iterate + through the ``data_table_rows`` field on the + corresponding responses. + + All the usual :class:`google.cloud.chronicle_v1.types.ListDataTableRowsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[data_table.ListDataTableRowsResponse]], + request: data_table.ListDataTableRowsRequest, + response: data_table.ListDataTableRowsResponse, + *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.chronicle_v1.types.ListDataTableRowsRequest): + The initial request object. + response (google.cloud.chronicle_v1.types.ListDataTableRowsResponse): + The initial response object. + retry (google.api_core.retry.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + self._method = method + self._request = data_table.ListDataTableRowsRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[data_table.ListDataTableRowsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) + yield self._response + + def __aiter__(self) -> AsyncIterator[data_table.DataTableRow]: + async def async_generator(): + async for page in self.pages: + for response in page.data_table_rows: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) diff --git a/packages/google-cloud-chronicle/google/cloud/chronicle_v1/services/data_table_service/transports/README.rst b/packages/google-cloud-chronicle/google/cloud/chronicle_v1/services/data_table_service/transports/README.rst new file mode 100644 index 000000000000..954ad3e80afc --- /dev/null +++ b/packages/google-cloud-chronicle/google/cloud/chronicle_v1/services/data_table_service/transports/README.rst @@ -0,0 +1,10 @@ + +transport inheritance structure +_______________________________ + +``DataTableServiceTransport`` is the ABC for all transports. + +- public child ``DataTableServiceGrpcTransport`` for sync gRPC transport (defined in ``grpc.py``). +- public child ``DataTableServiceGrpcAsyncIOTransport`` for async gRPC transport (defined in ``grpc_asyncio.py``). +- private child ``_BaseDataTableServiceRestTransport`` for base REST transport with inner classes ``_BaseMETHOD`` (defined in ``rest_base.py``). +- public child ``DataTableServiceRestTransport`` for sync REST transport with inner classes ``METHOD`` derived from the parent's corresponding ``_BaseMETHOD`` classes (defined in ``rest.py``). diff --git a/packages/google-cloud-chronicle/google/cloud/chronicle_v1/services/data_table_service/transports/__init__.py b/packages/google-cloud-chronicle/google/cloud/chronicle_v1/services/data_table_service/transports/__init__.py new file mode 100644 index 000000000000..883ac0bfcfc1 --- /dev/null +++ b/packages/google-cloud-chronicle/google/cloud/chronicle_v1/services/data_table_service/transports/__init__.py @@ -0,0 +1,36 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import DataTableServiceTransport +from .grpc import DataTableServiceGrpcTransport +from .grpc_asyncio import DataTableServiceGrpcAsyncIOTransport +from .rest import DataTableServiceRestInterceptor, DataTableServiceRestTransport + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[DataTableServiceTransport]] +_transport_registry["grpc"] = DataTableServiceGrpcTransport +_transport_registry["grpc_asyncio"] = DataTableServiceGrpcAsyncIOTransport +_transport_registry["rest"] = DataTableServiceRestTransport + +__all__ = ( + "DataTableServiceTransport", + "DataTableServiceGrpcTransport", + "DataTableServiceGrpcAsyncIOTransport", + "DataTableServiceRestTransport", + "DataTableServiceRestInterceptor", +) diff --git a/packages/google-cloud-chronicle/google/cloud/chronicle_v1/services/data_table_service/transports/base.py b/packages/google-cloud-chronicle/google/cloud/chronicle_v1/services/data_table_service/transports/base.py new file mode 100644 index 000000000000..61f8c65380d2 --- /dev/null +++ b/packages/google-cloud-chronicle/google/cloud/chronicle_v1/services/data_table_service/transports/base.py @@ -0,0 +1,512 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +import google.api_core +import google.auth # type: ignore +import google.protobuf +import google.protobuf.empty_pb2 as empty_pb2 # type: ignore +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.chronicle_v1 import gapic_version as package_version +from google.cloud.chronicle_v1.types import data_table +from google.cloud.chronicle_v1.types import data_table as gcc_data_table + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + +if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER + DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ + + +class DataTableServiceTransport(abc.ABC): + """Abstract transport class for DataTableService.""" + + AUTH_SCOPES = ( + "https://www.googleapis.com/auth/chronicle", + "https://www.googleapis.com/auth/chronicle.readonly", + "https://www.googleapis.com/auth/cloud-platform", + ) + + DEFAULT_HOST: str = "chronicle.googleapis.com" + + def __init__( + self, + *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'chronicle.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): Deprecated. A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. This argument will be + removed in the next major version of this library. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + api_audience (Optional[str]): The intended audience for the API calls + to the service that will be set when using certain 3rd party + authentication flows. Audience is typically a resource identifier. + If not set, the host value will be used as a default. + """ + + # Save the scopes. + self._scopes = scopes + if not hasattr(self, "_ignore_credentials"): + self._ignore_credentials: bool = False + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs( + "'credentials_file' and 'credentials' are mutually exclusive" + ) + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + default_scopes=self.AUTH_SCOPES, + ) + elif credentials is None and not self._ignore_credentials: + credentials, _ = google.auth.default( + scopes=scopes, + quota_project_id=quota_project_id, + default_scopes=self.AUTH_SCOPES, + ) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience( + api_audience if api_audience else host + ) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if ( + always_use_jwt_access + and isinstance(credentials, service_account.Credentials) + and hasattr(service_account.Credentials, "with_always_use_jwt_access") + ): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ":" not in host: + host += ":443" + self._host = host + + self._wrapped_methods: Dict[Callable, Callable] = {} + + @property + def host(self): + return self._host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.create_data_table: gapic_v1.method.wrap_method( + self.create_data_table, + default_timeout=600.0, + client_info=client_info, + ), + self.list_data_tables: gapic_v1.method.wrap_method( + self.list_data_tables, + default_retry=retries.Retry( + initial=1.0, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=600.0, + ), + default_timeout=600.0, + client_info=client_info, + ), + self.get_data_table: gapic_v1.method.wrap_method( + self.get_data_table, + default_retry=retries.Retry( + initial=1.0, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=600.0, + ), + default_timeout=600.0, + client_info=client_info, + ), + self.update_data_table: gapic_v1.method.wrap_method( + self.update_data_table, + default_timeout=600.0, + client_info=client_info, + ), + self.delete_data_table: gapic_v1.method.wrap_method( + self.delete_data_table, + default_timeout=600.0, + client_info=client_info, + ), + self.create_data_table_row: gapic_v1.method.wrap_method( + self.create_data_table_row, + default_timeout=600.0, + client_info=client_info, + ), + self.update_data_table_row: gapic_v1.method.wrap_method( + self.update_data_table_row, + default_timeout=600.0, + client_info=client_info, + ), + self.list_data_table_rows: gapic_v1.method.wrap_method( + self.list_data_table_rows, + default_retry=retries.Retry( + initial=1.0, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=600.0, + ), + default_timeout=600.0, + client_info=client_info, + ), + self.get_data_table_row: gapic_v1.method.wrap_method( + self.get_data_table_row, + default_retry=retries.Retry( + initial=1.0, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=600.0, + ), + default_timeout=600.0, + client_info=client_info, + ), + self.delete_data_table_row: gapic_v1.method.wrap_method( + self.delete_data_table_row, + default_timeout=600.0, + client_info=client_info, + ), + self.bulk_create_data_table_rows: gapic_v1.method.wrap_method( + self.bulk_create_data_table_rows, + default_timeout=600.0, + client_info=client_info, + ), + self.bulk_get_data_table_rows: gapic_v1.method.wrap_method( + self.bulk_get_data_table_rows, + default_retry=retries.Retry( + initial=1.0, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=600.0, + ), + default_timeout=600.0, + client_info=client_info, + ), + self.bulk_replace_data_table_rows: gapic_v1.method.wrap_method( + self.bulk_replace_data_table_rows, + default_timeout=600.0, + client_info=client_info, + ), + self.bulk_update_data_table_rows: gapic_v1.method.wrap_method( + self.bulk_update_data_table_rows, + default_timeout=600.0, + client_info=client_info, + ), + self.get_data_table_operation_errors: gapic_v1.method.wrap_method( + self.get_data_table_operation_errors, + default_retry=retries.Retry( + initial=1.0, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=600.0, + ), + default_timeout=600.0, + client_info=client_info, + ), + self.cancel_operation: gapic_v1.method.wrap_method( + self.cancel_operation, + default_timeout=None, + client_info=client_info, + ), + self.delete_operation: gapic_v1.method.wrap_method( + self.delete_operation, + default_timeout=None, + client_info=client_info, + ), + self.get_operation: gapic_v1.method.wrap_method( + self.get_operation, + default_timeout=None, + client_info=client_info, + ), + self.list_operations: gapic_v1.method.wrap_method( + self.list_operations, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def create_data_table( + self, + ) -> Callable[ + [gcc_data_table.CreateDataTableRequest], + Union[gcc_data_table.DataTable, Awaitable[gcc_data_table.DataTable]], + ]: + raise NotImplementedError() + + @property + def list_data_tables( + self, + ) -> Callable[ + [data_table.ListDataTablesRequest], + Union[ + data_table.ListDataTablesResponse, + Awaitable[data_table.ListDataTablesResponse], + ], + ]: + raise NotImplementedError() + + @property + def get_data_table( + self, + ) -> Callable[ + [data_table.GetDataTableRequest], + Union[data_table.DataTable, Awaitable[data_table.DataTable]], + ]: + raise NotImplementedError() + + @property + def update_data_table( + self, + ) -> Callable[ + [gcc_data_table.UpdateDataTableRequest], + Union[gcc_data_table.DataTable, Awaitable[gcc_data_table.DataTable]], + ]: + raise NotImplementedError() + + @property + def delete_data_table( + self, + ) -> Callable[ + [data_table.DeleteDataTableRequest], + Union[empty_pb2.Empty, Awaitable[empty_pb2.Empty]], + ]: + raise NotImplementedError() + + @property + def create_data_table_row( + self, + ) -> Callable[ + [data_table.CreateDataTableRowRequest], + Union[data_table.DataTableRow, Awaitable[data_table.DataTableRow]], + ]: + raise NotImplementedError() + + @property + def update_data_table_row( + self, + ) -> Callable[ + [data_table.UpdateDataTableRowRequest], + Union[data_table.DataTableRow, Awaitable[data_table.DataTableRow]], + ]: + raise NotImplementedError() + + @property + def list_data_table_rows( + self, + ) -> Callable[ + [data_table.ListDataTableRowsRequest], + Union[ + data_table.ListDataTableRowsResponse, + Awaitable[data_table.ListDataTableRowsResponse], + ], + ]: + raise NotImplementedError() + + @property + def get_data_table_row( + self, + ) -> Callable[ + [data_table.GetDataTableRowRequest], + Union[data_table.DataTableRow, Awaitable[data_table.DataTableRow]], + ]: + raise NotImplementedError() + + @property + def delete_data_table_row( + self, + ) -> Callable[ + [data_table.DeleteDataTableRowRequest], + Union[empty_pb2.Empty, Awaitable[empty_pb2.Empty]], + ]: + raise NotImplementedError() + + @property + def bulk_create_data_table_rows( + self, + ) -> Callable[ + [data_table.BulkCreateDataTableRowsRequest], + Union[ + data_table.BulkCreateDataTableRowsResponse, + Awaitable[data_table.BulkCreateDataTableRowsResponse], + ], + ]: + raise NotImplementedError() + + @property + def bulk_get_data_table_rows( + self, + ) -> Callable[ + [data_table.BulkGetDataTableRowsRequest], + Union[ + data_table.BulkGetDataTableRowsResponse, + Awaitable[data_table.BulkGetDataTableRowsResponse], + ], + ]: + raise NotImplementedError() + + @property + def bulk_replace_data_table_rows( + self, + ) -> Callable[ + [data_table.BulkReplaceDataTableRowsRequest], + Union[ + data_table.BulkReplaceDataTableRowsResponse, + Awaitable[data_table.BulkReplaceDataTableRowsResponse], + ], + ]: + raise NotImplementedError() + + @property + def bulk_update_data_table_rows( + self, + ) -> Callable[ + [data_table.BulkUpdateDataTableRowsRequest], + Union[ + data_table.BulkUpdateDataTableRowsResponse, + Awaitable[data_table.BulkUpdateDataTableRowsResponse], + ], + ]: + raise NotImplementedError() + + @property + def get_data_table_operation_errors( + self, + ) -> Callable[ + [data_table.GetDataTableOperationErrorsRequest], + Union[ + data_table.DataTableOperationErrors, + Awaitable[data_table.DataTableOperationErrors], + ], + ]: + raise NotImplementedError() + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], + Union[ + operations_pb2.ListOperationsResponse, + Awaitable[operations_pb2.ListOperationsResponse], + ], + ]: + raise NotImplementedError() + + @property + def get_operation( + self, + ) -> Callable[ + [operations_pb2.GetOperationRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def cancel_operation( + self, + ) -> Callable[ + [operations_pb2.CancelOperationRequest], + None, + ]: + raise NotImplementedError() + + @property + def delete_operation( + self, + ) -> Callable[ + [operations_pb2.DeleteOperationRequest], + None, + ]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + +__all__ = ("DataTableServiceTransport",) diff --git a/packages/google-cloud-chronicle/google/cloud/chronicle_v1/services/data_table_service/transports/grpc.py b/packages/google-cloud-chronicle/google/cloud/chronicle_v1/services/data_table_service/transports/grpc.py new file mode 100644 index 000000000000..f1020b790b6b --- /dev/null +++ b/packages/google-cloud-chronicle/google/cloud/chronicle_v1/services/data_table_service/transports/grpc.py @@ -0,0 +1,829 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import json +import logging as std_logging +import pickle +import warnings +from typing import Callable, Dict, Optional, Sequence, Tuple, Union + +import google.auth # type: ignore +import google.protobuf.empty_pb2 as empty_pb2 # type: ignore +import google.protobuf.message +import grpc # type: ignore +import proto # type: ignore +from google.api_core import gapic_v1, grpc_helpers +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf.json_format import MessageToJson + +from google.cloud.chronicle_v1.types import data_table +from google.cloud.chronicle_v1.types import data_table as gcc_data_table + +from .base import DEFAULT_CLIENT_INFO, DataTableServiceTransport + +try: + from google.api_core import client_logging # type: ignore + + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) + + +class _LoggingClientInterceptor(grpc.UnaryUnaryClientInterceptor): # pragma: NO COVER + def intercept_unary_unary(self, continuation, client_call_details, request): + logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + std_logging.DEBUG + ) + if logging_enabled: # pragma: NO COVER + request_metadata = client_call_details.metadata + if isinstance(request, proto.Message): + request_payload = type(request).to_json(request) + elif isinstance(request, google.protobuf.message.Message): + request_payload = MessageToJson(request) + else: + request_payload = f"{type(request).__name__}: {pickle.dumps(request)!r}" + + request_metadata = { + key: value.decode("utf-8") if isinstance(value, bytes) else value + for key, value in request_metadata + } + grpc_request = { + "payload": request_payload, + "requestMethod": "grpc", + "metadata": dict(request_metadata), + } + _LOGGER.debug( + f"Sending request for {client_call_details.method}", + extra={ + "serviceName": "google.cloud.chronicle.v1.DataTableService", + "rpcName": str(client_call_details.method), + "request": grpc_request, + "metadata": grpc_request["metadata"], + }, + ) + response = continuation(client_call_details, request) + if logging_enabled: # pragma: NO COVER + response_metadata = response.trailing_metadata() + # Convert gRPC metadata `` to list of tuples + metadata = ( + dict([(k, str(v)) for k, v in response_metadata]) + if response_metadata + else None + ) + result = response.result() + if isinstance(result, proto.Message): + response_payload = type(result).to_json(result) + elif isinstance(result, google.protobuf.message.Message): + response_payload = MessageToJson(result) + else: + response_payload = f"{type(result).__name__}: {pickle.dumps(result)!r}" + grpc_response = { + "payload": response_payload, + "metadata": metadata, + "status": "OK", + } + _LOGGER.debug( + f"Received response for {client_call_details.method}.", + extra={ + "serviceName": "google.cloud.chronicle.v1.DataTableService", + "rpcName": client_call_details.method, + "response": grpc_response, + "metadata": grpc_response["metadata"], + }, + ) + return response + + +class DataTableServiceGrpcTransport(DataTableServiceTransport): + """gRPC backend transport for DataTableService. + + DataTableManager provides an interface for managing data + tables. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _stubs: Dict[str, Callable] + + def __init__( + self, + *, + host: str = "chronicle.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'chronicle.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if a ``channel`` instance is provided. + credentials_file (Optional[str]): Deprecated. A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if a ``channel`` instance is provided. + This argument will be removed in the next major version of this library. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if a ``channel`` instance is provided. + channel (Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]]): + A ``Channel`` instance through which to make calls, or a Callable + that constructs and returns one. If set to None, ``self.create_channel`` + is used to create the channel. If a Callable is given, it will be called + with the same arguments as used in ``self.create_channel``. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if a ``channel`` instance is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + api_audience (Optional[str]): The intended audience for the API calls + to the service that will be set when using certain 3rd party + authentication flows. Audience is typically a resource identifier. + If not set, the host value will be used as a default. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if isinstance(channel, grpc.Channel): + # Ignore credentials if a channel was passed. + credentials = None + self._ignore_credentials = True + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + # initialize with the provided callable or the default channel + channel_init = channel or type(self).create_channel + self._grpc_channel = channel_init( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + self._interceptor = _LoggingClientInterceptor() + self._logged_channel = grpc.intercept_channel( + self._grpc_channel, self._interceptor + ) + + # Wrap messages. This must be done after self._logged_channel exists + self._prep_wrapped_messages(client_info) + + @classmethod + def create_channel( + cls, + host: str = "chronicle.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): Deprecated. A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. This argument will be + removed in the next major version of this library. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Return the channel designed to connect to this service.""" + return self._grpc_channel + + @property + def create_data_table( + self, + ) -> Callable[[gcc_data_table.CreateDataTableRequest], gcc_data_table.DataTable]: + r"""Return a callable for the create data table method over gRPC. + + Create a new data table. + + Returns: + Callable[[~.CreateDataTableRequest], + ~.DataTable]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_data_table" not in self._stubs: + self._stubs["create_data_table"] = self._logged_channel.unary_unary( + "/google.cloud.chronicle.v1.DataTableService/CreateDataTable", + request_serializer=gcc_data_table.CreateDataTableRequest.serialize, + response_deserializer=gcc_data_table.DataTable.deserialize, + ) + return self._stubs["create_data_table"] + + @property + def list_data_tables( + self, + ) -> Callable[ + [data_table.ListDataTablesRequest], data_table.ListDataTablesResponse + ]: + r"""Return a callable for the list data tables method over gRPC. + + List data tables. + + Returns: + Callable[[~.ListDataTablesRequest], + ~.ListDataTablesResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_data_tables" not in self._stubs: + self._stubs["list_data_tables"] = self._logged_channel.unary_unary( + "/google.cloud.chronicle.v1.DataTableService/ListDataTables", + request_serializer=data_table.ListDataTablesRequest.serialize, + response_deserializer=data_table.ListDataTablesResponse.deserialize, + ) + return self._stubs["list_data_tables"] + + @property + def get_data_table( + self, + ) -> Callable[[data_table.GetDataTableRequest], data_table.DataTable]: + r"""Return a callable for the get data table method over gRPC. + + Get data table info. + + Returns: + Callable[[~.GetDataTableRequest], + ~.DataTable]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_data_table" not in self._stubs: + self._stubs["get_data_table"] = self._logged_channel.unary_unary( + "/google.cloud.chronicle.v1.DataTableService/GetDataTable", + request_serializer=data_table.GetDataTableRequest.serialize, + response_deserializer=data_table.DataTable.deserialize, + ) + return self._stubs["get_data_table"] + + @property + def update_data_table( + self, + ) -> Callable[[gcc_data_table.UpdateDataTableRequest], gcc_data_table.DataTable]: + r"""Return a callable for the update data table method over gRPC. + + Update data table. + + Returns: + Callable[[~.UpdateDataTableRequest], + ~.DataTable]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_data_table" not in self._stubs: + self._stubs["update_data_table"] = self._logged_channel.unary_unary( + "/google.cloud.chronicle.v1.DataTableService/UpdateDataTable", + request_serializer=gcc_data_table.UpdateDataTableRequest.serialize, + response_deserializer=gcc_data_table.DataTable.deserialize, + ) + return self._stubs["update_data_table"] + + @property + def delete_data_table( + self, + ) -> Callable[[data_table.DeleteDataTableRequest], empty_pb2.Empty]: + r"""Return a callable for the delete data table method over gRPC. + + Delete data table. + + Returns: + Callable[[~.DeleteDataTableRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_data_table" not in self._stubs: + self._stubs["delete_data_table"] = self._logged_channel.unary_unary( + "/google.cloud.chronicle.v1.DataTableService/DeleteDataTable", + request_serializer=data_table.DeleteDataTableRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs["delete_data_table"] + + @property + def create_data_table_row( + self, + ) -> Callable[[data_table.CreateDataTableRowRequest], data_table.DataTableRow]: + r"""Return a callable for the create data table row method over gRPC. + + Create a new data table row. + + Returns: + Callable[[~.CreateDataTableRowRequest], + ~.DataTableRow]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_data_table_row" not in self._stubs: + self._stubs["create_data_table_row"] = self._logged_channel.unary_unary( + "/google.cloud.chronicle.v1.DataTableService/CreateDataTableRow", + request_serializer=data_table.CreateDataTableRowRequest.serialize, + response_deserializer=data_table.DataTableRow.deserialize, + ) + return self._stubs["create_data_table_row"] + + @property + def update_data_table_row( + self, + ) -> Callable[[data_table.UpdateDataTableRowRequest], data_table.DataTableRow]: + r"""Return a callable for the update data table row method over gRPC. + + Update data table row + + Returns: + Callable[[~.UpdateDataTableRowRequest], + ~.DataTableRow]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_data_table_row" not in self._stubs: + self._stubs["update_data_table_row"] = self._logged_channel.unary_unary( + "/google.cloud.chronicle.v1.DataTableService/UpdateDataTableRow", + request_serializer=data_table.UpdateDataTableRowRequest.serialize, + response_deserializer=data_table.DataTableRow.deserialize, + ) + return self._stubs["update_data_table_row"] + + @property + def list_data_table_rows( + self, + ) -> Callable[ + [data_table.ListDataTableRowsRequest], data_table.ListDataTableRowsResponse + ]: + r"""Return a callable for the list data table rows method over gRPC. + + List data table rows. + + Returns: + Callable[[~.ListDataTableRowsRequest], + ~.ListDataTableRowsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_data_table_rows" not in self._stubs: + self._stubs["list_data_table_rows"] = self._logged_channel.unary_unary( + "/google.cloud.chronicle.v1.DataTableService/ListDataTableRows", + request_serializer=data_table.ListDataTableRowsRequest.serialize, + response_deserializer=data_table.ListDataTableRowsResponse.deserialize, + ) + return self._stubs["list_data_table_rows"] + + @property + def get_data_table_row( + self, + ) -> Callable[[data_table.GetDataTableRowRequest], data_table.DataTableRow]: + r"""Return a callable for the get data table row method over gRPC. + + Get data table row + + Returns: + Callable[[~.GetDataTableRowRequest], + ~.DataTableRow]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_data_table_row" not in self._stubs: + self._stubs["get_data_table_row"] = self._logged_channel.unary_unary( + "/google.cloud.chronicle.v1.DataTableService/GetDataTableRow", + request_serializer=data_table.GetDataTableRowRequest.serialize, + response_deserializer=data_table.DataTableRow.deserialize, + ) + return self._stubs["get_data_table_row"] + + @property + def delete_data_table_row( + self, + ) -> Callable[[data_table.DeleteDataTableRowRequest], empty_pb2.Empty]: + r"""Return a callable for the delete data table row method over gRPC. + + Delete data table row. + + Returns: + Callable[[~.DeleteDataTableRowRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_data_table_row" not in self._stubs: + self._stubs["delete_data_table_row"] = self._logged_channel.unary_unary( + "/google.cloud.chronicle.v1.DataTableService/DeleteDataTableRow", + request_serializer=data_table.DeleteDataTableRowRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs["delete_data_table_row"] + + @property + def bulk_create_data_table_rows( + self, + ) -> Callable[ + [data_table.BulkCreateDataTableRowsRequest], + data_table.BulkCreateDataTableRowsResponse, + ]: + r"""Return a callable for the bulk create data table rows method over gRPC. + + Create data table rows in bulk. + + Returns: + Callable[[~.BulkCreateDataTableRowsRequest], + ~.BulkCreateDataTableRowsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "bulk_create_data_table_rows" not in self._stubs: + self._stubs["bulk_create_data_table_rows"] = ( + self._logged_channel.unary_unary( + "/google.cloud.chronicle.v1.DataTableService/BulkCreateDataTableRows", + request_serializer=data_table.BulkCreateDataTableRowsRequest.serialize, + response_deserializer=data_table.BulkCreateDataTableRowsResponse.deserialize, + ) + ) + return self._stubs["bulk_create_data_table_rows"] + + @property + def bulk_get_data_table_rows( + self, + ) -> Callable[ + [data_table.BulkGetDataTableRowsRequest], + data_table.BulkGetDataTableRowsResponse, + ]: + r"""Return a callable for the bulk get data table rows method over gRPC. + + Get data table rows in bulk. + + Returns: + Callable[[~.BulkGetDataTableRowsRequest], + ~.BulkGetDataTableRowsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "bulk_get_data_table_rows" not in self._stubs: + self._stubs["bulk_get_data_table_rows"] = self._logged_channel.unary_unary( + "/google.cloud.chronicle.v1.DataTableService/BulkGetDataTableRows", + request_serializer=data_table.BulkGetDataTableRowsRequest.serialize, + response_deserializer=data_table.BulkGetDataTableRowsResponse.deserialize, + ) + return self._stubs["bulk_get_data_table_rows"] + + @property + def bulk_replace_data_table_rows( + self, + ) -> Callable[ + [data_table.BulkReplaceDataTableRowsRequest], + data_table.BulkReplaceDataTableRowsResponse, + ]: + r"""Return a callable for the bulk replace data table rows method over gRPC. + + Replace all existing data table rows with new data + table rows. + + Returns: + Callable[[~.BulkReplaceDataTableRowsRequest], + ~.BulkReplaceDataTableRowsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "bulk_replace_data_table_rows" not in self._stubs: + self._stubs["bulk_replace_data_table_rows"] = ( + self._logged_channel.unary_unary( + "/google.cloud.chronicle.v1.DataTableService/BulkReplaceDataTableRows", + request_serializer=data_table.BulkReplaceDataTableRowsRequest.serialize, + response_deserializer=data_table.BulkReplaceDataTableRowsResponse.deserialize, + ) + ) + return self._stubs["bulk_replace_data_table_rows"] + + @property + def bulk_update_data_table_rows( + self, + ) -> Callable[ + [data_table.BulkUpdateDataTableRowsRequest], + data_table.BulkUpdateDataTableRowsResponse, + ]: + r"""Return a callable for the bulk update data table rows method over gRPC. + + Update data table rows in bulk. + + Returns: + Callable[[~.BulkUpdateDataTableRowsRequest], + ~.BulkUpdateDataTableRowsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "bulk_update_data_table_rows" not in self._stubs: + self._stubs["bulk_update_data_table_rows"] = ( + self._logged_channel.unary_unary( + "/google.cloud.chronicle.v1.DataTableService/BulkUpdateDataTableRows", + request_serializer=data_table.BulkUpdateDataTableRowsRequest.serialize, + response_deserializer=data_table.BulkUpdateDataTableRowsResponse.deserialize, + ) + ) + return self._stubs["bulk_update_data_table_rows"] + + @property + def get_data_table_operation_errors( + self, + ) -> Callable[ + [data_table.GetDataTableOperationErrorsRequest], + data_table.DataTableOperationErrors, + ]: + r"""Return a callable for the get data table operation + errors method over gRPC. + + Get the error for a data table operation. + + Returns: + Callable[[~.GetDataTableOperationErrorsRequest], + ~.DataTableOperationErrors]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_data_table_operation_errors" not in self._stubs: + self._stubs["get_data_table_operation_errors"] = ( + self._logged_channel.unary_unary( + "/google.cloud.chronicle.v1.DataTableService/GetDataTableOperationErrors", + request_serializer=data_table.GetDataTableOperationErrorsRequest.serialize, + response_deserializer=data_table.DataTableOperationErrors.deserialize, + ) + ) + return self._stubs["get_data_table_operation_errors"] + + def close(self): + self._logged_channel.close() + + @property + def delete_operation( + self, + ) -> Callable[[operations_pb2.DeleteOperationRequest], None]: + r"""Return a callable for the delete_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_operation" not in self._stubs: + self._stubs["delete_operation"] = self._logged_channel.unary_unary( + "/google.longrunning.Operations/DeleteOperation", + request_serializer=operations_pb2.DeleteOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["delete_operation"] + + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self._logged_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self._logged_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse + ]: + r"""Return a callable for the list_operations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self._logged_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] + + @property + def kind(self) -> str: + return "grpc" + + +__all__ = ("DataTableServiceGrpcTransport",) diff --git a/packages/google-cloud-chronicle/google/cloud/chronicle_v1/services/data_table_service/transports/grpc_asyncio.py b/packages/google-cloud-chronicle/google/cloud/chronicle_v1/services/data_table_service/transports/grpc_asyncio.py new file mode 100644 index 000000000000..d846c12cc903 --- /dev/null +++ b/packages/google-cloud-chronicle/google/cloud/chronicle_v1/services/data_table_service/transports/grpc_asyncio.py @@ -0,0 +1,1007 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import inspect +import json +import logging as std_logging +import pickle +import warnings +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union + +import google.protobuf.empty_pb2 as empty_pb2 # type: ignore +import google.protobuf.message +import grpc # type: ignore +import proto # type: ignore +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1, grpc_helpers_async +from google.api_core import retry_async as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf.json_format import MessageToJson +from grpc.experimental import aio # type: ignore + +from google.cloud.chronicle_v1.types import data_table +from google.cloud.chronicle_v1.types import data_table as gcc_data_table + +from .base import DEFAULT_CLIENT_INFO, DataTableServiceTransport +from .grpc import DataTableServiceGrpcTransport + +try: + from google.api_core import client_logging # type: ignore + + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) + + +class _LoggingClientAIOInterceptor( + grpc.aio.UnaryUnaryClientInterceptor +): # pragma: NO COVER + async def intercept_unary_unary(self, continuation, client_call_details, request): + logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + std_logging.DEBUG + ) + if logging_enabled: # pragma: NO COVER + request_metadata = client_call_details.metadata + if isinstance(request, proto.Message): + request_payload = type(request).to_json(request) + elif isinstance(request, google.protobuf.message.Message): + request_payload = MessageToJson(request) + else: + request_payload = f"{type(request).__name__}: {pickle.dumps(request)!r}" + + request_metadata = { + key: value.decode("utf-8") if isinstance(value, bytes) else value + for key, value in request_metadata + } + grpc_request = { + "payload": request_payload, + "requestMethod": "grpc", + "metadata": dict(request_metadata), + } + _LOGGER.debug( + f"Sending request for {client_call_details.method}", + extra={ + "serviceName": "google.cloud.chronicle.v1.DataTableService", + "rpcName": str(client_call_details.method), + "request": grpc_request, + "metadata": grpc_request["metadata"], + }, + ) + response = await continuation(client_call_details, request) + if logging_enabled: # pragma: NO COVER + response_metadata = await response.trailing_metadata() + # Convert gRPC metadata `` to list of tuples + metadata = ( + dict([(k, str(v)) for k, v in response_metadata]) + if response_metadata + else None + ) + result = await response + if isinstance(result, proto.Message): + response_payload = type(result).to_json(result) + elif isinstance(result, google.protobuf.message.Message): + response_payload = MessageToJson(result) + else: + response_payload = f"{type(result).__name__}: {pickle.dumps(result)!r}" + grpc_response = { + "payload": response_payload, + "metadata": metadata, + "status": "OK", + } + _LOGGER.debug( + f"Received response to rpc {client_call_details.method}.", + extra={ + "serviceName": "google.cloud.chronicle.v1.DataTableService", + "rpcName": str(client_call_details.method), + "response": grpc_response, + "metadata": grpc_response["metadata"], + }, + ) + return response + + +class DataTableServiceGrpcAsyncIOTransport(DataTableServiceTransport): + """gRPC AsyncIO backend transport for DataTableService. + + DataTableManager provides an interface for managing data + tables. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel( + cls, + host: str = "chronicle.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): Deprecated. A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. This argument will be + removed in the next major version of this library. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + def __init__( + self, + *, + host: str = "chronicle.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[Union[aio.Channel, Callable[..., aio.Channel]]] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'chronicle.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if a ``channel`` instance is provided. + credentials_file (Optional[str]): Deprecated. A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if a ``channel`` instance is provided. + This argument will be removed in the next major version of this library. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[Union[aio.Channel, Callable[..., aio.Channel]]]): + A ``Channel`` instance through which to make calls, or a Callable + that constructs and returns one. If set to None, ``self.create_channel`` + is used to create the channel. If a Callable is given, it will be called + with the same arguments as used in ``self.create_channel``. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if a ``channel`` instance is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + api_audience (Optional[str]): The intended audience for the API calls + to the service that will be set when using certain 3rd party + authentication flows. Audience is typically a resource identifier. + If not set, the host value will be used as a default. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if isinstance(channel, aio.Channel): + # Ignore credentials if a channel was passed. + credentials = None + self._ignore_credentials = True + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + # initialize with the provided callable or the default channel + channel_init = channel or type(self).create_channel + self._grpc_channel = channel_init( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + self._interceptor = _LoggingClientAIOInterceptor() + self._grpc_channel._unary_unary_interceptors.append(self._interceptor) + self._logged_channel = self._grpc_channel + self._wrap_with_kind = ( + "kind" in inspect.signature(gapic_v1.method_async.wrap_method).parameters + ) + # Wrap messages. This must be done after self._logged_channel exists + self._prep_wrapped_messages(client_info) + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def create_data_table( + self, + ) -> Callable[ + [gcc_data_table.CreateDataTableRequest], Awaitable[gcc_data_table.DataTable] + ]: + r"""Return a callable for the create data table method over gRPC. + + Create a new data table. + + Returns: + Callable[[~.CreateDataTableRequest], + Awaitable[~.DataTable]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_data_table" not in self._stubs: + self._stubs["create_data_table"] = self._logged_channel.unary_unary( + "/google.cloud.chronicle.v1.DataTableService/CreateDataTable", + request_serializer=gcc_data_table.CreateDataTableRequest.serialize, + response_deserializer=gcc_data_table.DataTable.deserialize, + ) + return self._stubs["create_data_table"] + + @property + def list_data_tables( + self, + ) -> Callable[ + [data_table.ListDataTablesRequest], Awaitable[data_table.ListDataTablesResponse] + ]: + r"""Return a callable for the list data tables method over gRPC. + + List data tables. + + Returns: + Callable[[~.ListDataTablesRequest], + Awaitable[~.ListDataTablesResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_data_tables" not in self._stubs: + self._stubs["list_data_tables"] = self._logged_channel.unary_unary( + "/google.cloud.chronicle.v1.DataTableService/ListDataTables", + request_serializer=data_table.ListDataTablesRequest.serialize, + response_deserializer=data_table.ListDataTablesResponse.deserialize, + ) + return self._stubs["list_data_tables"] + + @property + def get_data_table( + self, + ) -> Callable[[data_table.GetDataTableRequest], Awaitable[data_table.DataTable]]: + r"""Return a callable for the get data table method over gRPC. + + Get data table info. + + Returns: + Callable[[~.GetDataTableRequest], + Awaitable[~.DataTable]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_data_table" not in self._stubs: + self._stubs["get_data_table"] = self._logged_channel.unary_unary( + "/google.cloud.chronicle.v1.DataTableService/GetDataTable", + request_serializer=data_table.GetDataTableRequest.serialize, + response_deserializer=data_table.DataTable.deserialize, + ) + return self._stubs["get_data_table"] + + @property + def update_data_table( + self, + ) -> Callable[ + [gcc_data_table.UpdateDataTableRequest], Awaitable[gcc_data_table.DataTable] + ]: + r"""Return a callable for the update data table method over gRPC. + + Update data table. + + Returns: + Callable[[~.UpdateDataTableRequest], + Awaitable[~.DataTable]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_data_table" not in self._stubs: + self._stubs["update_data_table"] = self._logged_channel.unary_unary( + "/google.cloud.chronicle.v1.DataTableService/UpdateDataTable", + request_serializer=gcc_data_table.UpdateDataTableRequest.serialize, + response_deserializer=gcc_data_table.DataTable.deserialize, + ) + return self._stubs["update_data_table"] + + @property + def delete_data_table( + self, + ) -> Callable[[data_table.DeleteDataTableRequest], Awaitable[empty_pb2.Empty]]: + r"""Return a callable for the delete data table method over gRPC. + + Delete data table. + + Returns: + Callable[[~.DeleteDataTableRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_data_table" not in self._stubs: + self._stubs["delete_data_table"] = self._logged_channel.unary_unary( + "/google.cloud.chronicle.v1.DataTableService/DeleteDataTable", + request_serializer=data_table.DeleteDataTableRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs["delete_data_table"] + + @property + def create_data_table_row( + self, + ) -> Callable[ + [data_table.CreateDataTableRowRequest], Awaitable[data_table.DataTableRow] + ]: + r"""Return a callable for the create data table row method over gRPC. + + Create a new data table row. + + Returns: + Callable[[~.CreateDataTableRowRequest], + Awaitable[~.DataTableRow]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_data_table_row" not in self._stubs: + self._stubs["create_data_table_row"] = self._logged_channel.unary_unary( + "/google.cloud.chronicle.v1.DataTableService/CreateDataTableRow", + request_serializer=data_table.CreateDataTableRowRequest.serialize, + response_deserializer=data_table.DataTableRow.deserialize, + ) + return self._stubs["create_data_table_row"] + + @property + def update_data_table_row( + self, + ) -> Callable[ + [data_table.UpdateDataTableRowRequest], Awaitable[data_table.DataTableRow] + ]: + r"""Return a callable for the update data table row method over gRPC. + + Update data table row + + Returns: + Callable[[~.UpdateDataTableRowRequest], + Awaitable[~.DataTableRow]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_data_table_row" not in self._stubs: + self._stubs["update_data_table_row"] = self._logged_channel.unary_unary( + "/google.cloud.chronicle.v1.DataTableService/UpdateDataTableRow", + request_serializer=data_table.UpdateDataTableRowRequest.serialize, + response_deserializer=data_table.DataTableRow.deserialize, + ) + return self._stubs["update_data_table_row"] + + @property + def list_data_table_rows( + self, + ) -> Callable[ + [data_table.ListDataTableRowsRequest], + Awaitable[data_table.ListDataTableRowsResponse], + ]: + r"""Return a callable for the list data table rows method over gRPC. + + List data table rows. + + Returns: + Callable[[~.ListDataTableRowsRequest], + Awaitable[~.ListDataTableRowsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_data_table_rows" not in self._stubs: + self._stubs["list_data_table_rows"] = self._logged_channel.unary_unary( + "/google.cloud.chronicle.v1.DataTableService/ListDataTableRows", + request_serializer=data_table.ListDataTableRowsRequest.serialize, + response_deserializer=data_table.ListDataTableRowsResponse.deserialize, + ) + return self._stubs["list_data_table_rows"] + + @property + def get_data_table_row( + self, + ) -> Callable[ + [data_table.GetDataTableRowRequest], Awaitable[data_table.DataTableRow] + ]: + r"""Return a callable for the get data table row method over gRPC. + + Get data table row + + Returns: + Callable[[~.GetDataTableRowRequest], + Awaitable[~.DataTableRow]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_data_table_row" not in self._stubs: + self._stubs["get_data_table_row"] = self._logged_channel.unary_unary( + "/google.cloud.chronicle.v1.DataTableService/GetDataTableRow", + request_serializer=data_table.GetDataTableRowRequest.serialize, + response_deserializer=data_table.DataTableRow.deserialize, + ) + return self._stubs["get_data_table_row"] + + @property + def delete_data_table_row( + self, + ) -> Callable[[data_table.DeleteDataTableRowRequest], Awaitable[empty_pb2.Empty]]: + r"""Return a callable for the delete data table row method over gRPC. + + Delete data table row. + + Returns: + Callable[[~.DeleteDataTableRowRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_data_table_row" not in self._stubs: + self._stubs["delete_data_table_row"] = self._logged_channel.unary_unary( + "/google.cloud.chronicle.v1.DataTableService/DeleteDataTableRow", + request_serializer=data_table.DeleteDataTableRowRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs["delete_data_table_row"] + + @property + def bulk_create_data_table_rows( + self, + ) -> Callable[ + [data_table.BulkCreateDataTableRowsRequest], + Awaitable[data_table.BulkCreateDataTableRowsResponse], + ]: + r"""Return a callable for the bulk create data table rows method over gRPC. + + Create data table rows in bulk. + + Returns: + Callable[[~.BulkCreateDataTableRowsRequest], + Awaitable[~.BulkCreateDataTableRowsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "bulk_create_data_table_rows" not in self._stubs: + self._stubs["bulk_create_data_table_rows"] = ( + self._logged_channel.unary_unary( + "/google.cloud.chronicle.v1.DataTableService/BulkCreateDataTableRows", + request_serializer=data_table.BulkCreateDataTableRowsRequest.serialize, + response_deserializer=data_table.BulkCreateDataTableRowsResponse.deserialize, + ) + ) + return self._stubs["bulk_create_data_table_rows"] + + @property + def bulk_get_data_table_rows( + self, + ) -> Callable[ + [data_table.BulkGetDataTableRowsRequest], + Awaitable[data_table.BulkGetDataTableRowsResponse], + ]: + r"""Return a callable for the bulk get data table rows method over gRPC. + + Get data table rows in bulk. + + Returns: + Callable[[~.BulkGetDataTableRowsRequest], + Awaitable[~.BulkGetDataTableRowsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "bulk_get_data_table_rows" not in self._stubs: + self._stubs["bulk_get_data_table_rows"] = self._logged_channel.unary_unary( + "/google.cloud.chronicle.v1.DataTableService/BulkGetDataTableRows", + request_serializer=data_table.BulkGetDataTableRowsRequest.serialize, + response_deserializer=data_table.BulkGetDataTableRowsResponse.deserialize, + ) + return self._stubs["bulk_get_data_table_rows"] + + @property + def bulk_replace_data_table_rows( + self, + ) -> Callable[ + [data_table.BulkReplaceDataTableRowsRequest], + Awaitable[data_table.BulkReplaceDataTableRowsResponse], + ]: + r"""Return a callable for the bulk replace data table rows method over gRPC. + + Replace all existing data table rows with new data + table rows. + + Returns: + Callable[[~.BulkReplaceDataTableRowsRequest], + Awaitable[~.BulkReplaceDataTableRowsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "bulk_replace_data_table_rows" not in self._stubs: + self._stubs["bulk_replace_data_table_rows"] = ( + self._logged_channel.unary_unary( + "/google.cloud.chronicle.v1.DataTableService/BulkReplaceDataTableRows", + request_serializer=data_table.BulkReplaceDataTableRowsRequest.serialize, + response_deserializer=data_table.BulkReplaceDataTableRowsResponse.deserialize, + ) + ) + return self._stubs["bulk_replace_data_table_rows"] + + @property + def bulk_update_data_table_rows( + self, + ) -> Callable[ + [data_table.BulkUpdateDataTableRowsRequest], + Awaitable[data_table.BulkUpdateDataTableRowsResponse], + ]: + r"""Return a callable for the bulk update data table rows method over gRPC. + + Update data table rows in bulk. + + Returns: + Callable[[~.BulkUpdateDataTableRowsRequest], + Awaitable[~.BulkUpdateDataTableRowsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "bulk_update_data_table_rows" not in self._stubs: + self._stubs["bulk_update_data_table_rows"] = ( + self._logged_channel.unary_unary( + "/google.cloud.chronicle.v1.DataTableService/BulkUpdateDataTableRows", + request_serializer=data_table.BulkUpdateDataTableRowsRequest.serialize, + response_deserializer=data_table.BulkUpdateDataTableRowsResponse.deserialize, + ) + ) + return self._stubs["bulk_update_data_table_rows"] + + @property + def get_data_table_operation_errors( + self, + ) -> Callable[ + [data_table.GetDataTableOperationErrorsRequest], + Awaitable[data_table.DataTableOperationErrors], + ]: + r"""Return a callable for the get data table operation + errors method over gRPC. + + Get the error for a data table operation. + + Returns: + Callable[[~.GetDataTableOperationErrorsRequest], + Awaitable[~.DataTableOperationErrors]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_data_table_operation_errors" not in self._stubs: + self._stubs["get_data_table_operation_errors"] = ( + self._logged_channel.unary_unary( + "/google.cloud.chronicle.v1.DataTableService/GetDataTableOperationErrors", + request_serializer=data_table.GetDataTableOperationErrorsRequest.serialize, + response_deserializer=data_table.DataTableOperationErrors.deserialize, + ) + ) + return self._stubs["get_data_table_operation_errors"] + + def _prep_wrapped_messages(self, client_info): + """Precompute the wrapped methods, overriding the base class method to use async wrappers.""" + self._wrapped_methods = { + self.create_data_table: self._wrap_method( + self.create_data_table, + default_timeout=600.0, + client_info=client_info, + ), + self.list_data_tables: self._wrap_method( + self.list_data_tables, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=600.0, + ), + default_timeout=600.0, + client_info=client_info, + ), + self.get_data_table: self._wrap_method( + self.get_data_table, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=600.0, + ), + default_timeout=600.0, + client_info=client_info, + ), + self.update_data_table: self._wrap_method( + self.update_data_table, + default_timeout=600.0, + client_info=client_info, + ), + self.delete_data_table: self._wrap_method( + self.delete_data_table, + default_timeout=600.0, + client_info=client_info, + ), + self.create_data_table_row: self._wrap_method( + self.create_data_table_row, + default_timeout=600.0, + client_info=client_info, + ), + self.update_data_table_row: self._wrap_method( + self.update_data_table_row, + default_timeout=600.0, + client_info=client_info, + ), + self.list_data_table_rows: self._wrap_method( + self.list_data_table_rows, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=600.0, + ), + default_timeout=600.0, + client_info=client_info, + ), + self.get_data_table_row: self._wrap_method( + self.get_data_table_row, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=600.0, + ), + default_timeout=600.0, + client_info=client_info, + ), + self.delete_data_table_row: self._wrap_method( + self.delete_data_table_row, + default_timeout=600.0, + client_info=client_info, + ), + self.bulk_create_data_table_rows: self._wrap_method( + self.bulk_create_data_table_rows, + default_timeout=600.0, + client_info=client_info, + ), + self.bulk_get_data_table_rows: self._wrap_method( + self.bulk_get_data_table_rows, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=600.0, + ), + default_timeout=600.0, + client_info=client_info, + ), + self.bulk_replace_data_table_rows: self._wrap_method( + self.bulk_replace_data_table_rows, + default_timeout=600.0, + client_info=client_info, + ), + self.bulk_update_data_table_rows: self._wrap_method( + self.bulk_update_data_table_rows, + default_timeout=600.0, + client_info=client_info, + ), + self.get_data_table_operation_errors: self._wrap_method( + self.get_data_table_operation_errors, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=60.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=600.0, + ), + default_timeout=600.0, + client_info=client_info, + ), + self.cancel_operation: self._wrap_method( + self.cancel_operation, + default_timeout=None, + client_info=client_info, + ), + self.delete_operation: self._wrap_method( + self.delete_operation, + default_timeout=None, + client_info=client_info, + ), + self.get_operation: self._wrap_method( + self.get_operation, + default_timeout=None, + client_info=client_info, + ), + self.list_operations: self._wrap_method( + self.list_operations, + default_timeout=None, + client_info=client_info, + ), + } + + def _wrap_method(self, func, *args, **kwargs): + if self._wrap_with_kind: # pragma: NO COVER + kwargs["kind"] = self.kind + return gapic_v1.method_async.wrap_method(func, *args, **kwargs) + + def close(self): + return self._logged_channel.close() + + @property + def kind(self) -> str: + return "grpc_asyncio" + + @property + def delete_operation( + self, + ) -> Callable[[operations_pb2.DeleteOperationRequest], None]: + r"""Return a callable for the delete_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_operation" not in self._stubs: + self._stubs["delete_operation"] = self._logged_channel.unary_unary( + "/google.longrunning.Operations/DeleteOperation", + request_serializer=operations_pb2.DeleteOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["delete_operation"] + + @property + def cancel_operation( + self, + ) -> Callable[[operations_pb2.CancelOperationRequest], None]: + r"""Return a callable for the cancel_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "cancel_operation" not in self._stubs: + self._stubs["cancel_operation"] = self._logged_channel.unary_unary( + "/google.longrunning.Operations/CancelOperation", + request_serializer=operations_pb2.CancelOperationRequest.SerializeToString, + response_deserializer=None, + ) + return self._stubs["cancel_operation"] + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self._logged_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def list_operations( + self, + ) -> Callable[ + [operations_pb2.ListOperationsRequest], operations_pb2.ListOperationsResponse + ]: + r"""Return a callable for the list_operations method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_operations" not in self._stubs: + self._stubs["list_operations"] = self._logged_channel.unary_unary( + "/google.longrunning.Operations/ListOperations", + request_serializer=operations_pb2.ListOperationsRequest.SerializeToString, + response_deserializer=operations_pb2.ListOperationsResponse.FromString, + ) + return self._stubs["list_operations"] + + +__all__ = ("DataTableServiceGrpcAsyncIOTransport",) diff --git a/packages/google-cloud-chronicle/google/cloud/chronicle_v1/services/data_table_service/transports/rest.py b/packages/google-cloud-chronicle/google/cloud/chronicle_v1/services/data_table_service/transports/rest.py new file mode 100644 index 000000000000..306072c1f9a3 --- /dev/null +++ b/packages/google-cloud-chronicle/google/cloud/chronicle_v1/services/data_table_service/transports/rest.py @@ -0,0 +1,3930 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import dataclasses +import json # type: ignore +import logging +import warnings +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union + +import google.protobuf +import google.protobuf.empty_pb2 as empty_pb2 # type: ignore +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1, rest_helpers, rest_streaming +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.requests import AuthorizedSession # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import json_format +from requests import __version__ as requests_version + +from google.cloud.chronicle_v1.types import data_table +from google.cloud.chronicle_v1.types import data_table as gcc_data_table + +from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO +from .rest_base import _BaseDataTableServiceRestTransport + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + +try: + from google.api_core import client_logging # type: ignore + + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = logging.getLogger(__name__) + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=f"requests@{requests_version}", +) + +if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER + DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ + + +class DataTableServiceRestInterceptor: + """Interceptor for DataTableService. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the DataTableServiceRestTransport. + + .. code-block:: python + class MyCustomDataTableServiceInterceptor(DataTableServiceRestInterceptor): + def pre_bulk_create_data_table_rows(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_bulk_create_data_table_rows(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_bulk_get_data_table_rows(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_bulk_get_data_table_rows(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_bulk_replace_data_table_rows(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_bulk_replace_data_table_rows(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_bulk_update_data_table_rows(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_bulk_update_data_table_rows(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_create_data_table(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_data_table(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_create_data_table_row(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_data_table_row(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_delete_data_table(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def pre_delete_data_table_row(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def pre_get_data_table(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_data_table(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_data_table_operation_errors(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_data_table_operation_errors(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_data_table_row(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_data_table_row(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_data_table_rows(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_data_table_rows(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_data_tables(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_data_tables(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_update_data_table(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_data_table(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_update_data_table_row(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_data_table_row(self, response): + logging.log(f"Received response: {response}") + return response + + transport = DataTableServiceRestTransport(interceptor=MyCustomDataTableServiceInterceptor()) + client = DataTableServiceClient(transport=transport) + + + """ + + def pre_bulk_create_data_table_rows( + self, + request: data_table.BulkCreateDataTableRowsRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + data_table.BulkCreateDataTableRowsRequest, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Pre-rpc interceptor for bulk_create_data_table_rows + + Override in a subclass to manipulate the request or metadata + before they are sent to the DataTableService server. + """ + return request, metadata + + def post_bulk_create_data_table_rows( + self, response: data_table.BulkCreateDataTableRowsResponse + ) -> data_table.BulkCreateDataTableRowsResponse: + """Post-rpc interceptor for bulk_create_data_table_rows + + DEPRECATED. Please use the `post_bulk_create_data_table_rows_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the DataTableService server but before + it is returned to user code. This `post_bulk_create_data_table_rows` interceptor runs + before the `post_bulk_create_data_table_rows_with_metadata` interceptor. + """ + return response + + def post_bulk_create_data_table_rows_with_metadata( + self, + response: data_table.BulkCreateDataTableRowsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + data_table.BulkCreateDataTableRowsResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for bulk_create_data_table_rows + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DataTableService server but before it is returned to user code. + + We recommend only using this `post_bulk_create_data_table_rows_with_metadata` + interceptor in new development instead of the `post_bulk_create_data_table_rows` interceptor. + When both interceptors are used, this `post_bulk_create_data_table_rows_with_metadata` interceptor runs after the + `post_bulk_create_data_table_rows` interceptor. The (possibly modified) response returned by + `post_bulk_create_data_table_rows` will be passed to + `post_bulk_create_data_table_rows_with_metadata`. + """ + return response, metadata + + def pre_bulk_get_data_table_rows( + self, + request: data_table.BulkGetDataTableRowsRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + data_table.BulkGetDataTableRowsRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Pre-rpc interceptor for bulk_get_data_table_rows + + Override in a subclass to manipulate the request or metadata + before they are sent to the DataTableService server. + """ + return request, metadata + + def post_bulk_get_data_table_rows( + self, response: data_table.BulkGetDataTableRowsResponse + ) -> data_table.BulkGetDataTableRowsResponse: + """Post-rpc interceptor for bulk_get_data_table_rows + + DEPRECATED. Please use the `post_bulk_get_data_table_rows_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the DataTableService server but before + it is returned to user code. This `post_bulk_get_data_table_rows` interceptor runs + before the `post_bulk_get_data_table_rows_with_metadata` interceptor. + """ + return response + + def post_bulk_get_data_table_rows_with_metadata( + self, + response: data_table.BulkGetDataTableRowsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + data_table.BulkGetDataTableRowsResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for bulk_get_data_table_rows + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DataTableService server but before it is returned to user code. + + We recommend only using this `post_bulk_get_data_table_rows_with_metadata` + interceptor in new development instead of the `post_bulk_get_data_table_rows` interceptor. + When both interceptors are used, this `post_bulk_get_data_table_rows_with_metadata` interceptor runs after the + `post_bulk_get_data_table_rows` interceptor. The (possibly modified) response returned by + `post_bulk_get_data_table_rows` will be passed to + `post_bulk_get_data_table_rows_with_metadata`. + """ + return response, metadata + + def pre_bulk_replace_data_table_rows( + self, + request: data_table.BulkReplaceDataTableRowsRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + data_table.BulkReplaceDataTableRowsRequest, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Pre-rpc interceptor for bulk_replace_data_table_rows + + Override in a subclass to manipulate the request or metadata + before they are sent to the DataTableService server. + """ + return request, metadata + + def post_bulk_replace_data_table_rows( + self, response: data_table.BulkReplaceDataTableRowsResponse + ) -> data_table.BulkReplaceDataTableRowsResponse: + """Post-rpc interceptor for bulk_replace_data_table_rows + + DEPRECATED. Please use the `post_bulk_replace_data_table_rows_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the DataTableService server but before + it is returned to user code. This `post_bulk_replace_data_table_rows` interceptor runs + before the `post_bulk_replace_data_table_rows_with_metadata` interceptor. + """ + return response + + def post_bulk_replace_data_table_rows_with_metadata( + self, + response: data_table.BulkReplaceDataTableRowsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + data_table.BulkReplaceDataTableRowsResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for bulk_replace_data_table_rows + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DataTableService server but before it is returned to user code. + + We recommend only using this `post_bulk_replace_data_table_rows_with_metadata` + interceptor in new development instead of the `post_bulk_replace_data_table_rows` interceptor. + When both interceptors are used, this `post_bulk_replace_data_table_rows_with_metadata` interceptor runs after the + `post_bulk_replace_data_table_rows` interceptor. The (possibly modified) response returned by + `post_bulk_replace_data_table_rows` will be passed to + `post_bulk_replace_data_table_rows_with_metadata`. + """ + return response, metadata + + def pre_bulk_update_data_table_rows( + self, + request: data_table.BulkUpdateDataTableRowsRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + data_table.BulkUpdateDataTableRowsRequest, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Pre-rpc interceptor for bulk_update_data_table_rows + + Override in a subclass to manipulate the request or metadata + before they are sent to the DataTableService server. + """ + return request, metadata + + def post_bulk_update_data_table_rows( + self, response: data_table.BulkUpdateDataTableRowsResponse + ) -> data_table.BulkUpdateDataTableRowsResponse: + """Post-rpc interceptor for bulk_update_data_table_rows + + DEPRECATED. Please use the `post_bulk_update_data_table_rows_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the DataTableService server but before + it is returned to user code. This `post_bulk_update_data_table_rows` interceptor runs + before the `post_bulk_update_data_table_rows_with_metadata` interceptor. + """ + return response + + def post_bulk_update_data_table_rows_with_metadata( + self, + response: data_table.BulkUpdateDataTableRowsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + data_table.BulkUpdateDataTableRowsResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for bulk_update_data_table_rows + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DataTableService server but before it is returned to user code. + + We recommend only using this `post_bulk_update_data_table_rows_with_metadata` + interceptor in new development instead of the `post_bulk_update_data_table_rows` interceptor. + When both interceptors are used, this `post_bulk_update_data_table_rows_with_metadata` interceptor runs after the + `post_bulk_update_data_table_rows` interceptor. The (possibly modified) response returned by + `post_bulk_update_data_table_rows` will be passed to + `post_bulk_update_data_table_rows_with_metadata`. + """ + return response, metadata + + def pre_create_data_table( + self, + request: gcc_data_table.CreateDataTableRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + gcc_data_table.CreateDataTableRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Pre-rpc interceptor for create_data_table + + Override in a subclass to manipulate the request or metadata + before they are sent to the DataTableService server. + """ + return request, metadata + + def post_create_data_table( + self, response: gcc_data_table.DataTable + ) -> gcc_data_table.DataTable: + """Post-rpc interceptor for create_data_table + + DEPRECATED. Please use the `post_create_data_table_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the DataTableService server but before + it is returned to user code. This `post_create_data_table` interceptor runs + before the `post_create_data_table_with_metadata` interceptor. + """ + return response + + def post_create_data_table_with_metadata( + self, + response: gcc_data_table.DataTable, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[gcc_data_table.DataTable, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_data_table + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DataTableService server but before it is returned to user code. + + We recommend only using this `post_create_data_table_with_metadata` + interceptor in new development instead of the `post_create_data_table` interceptor. + When both interceptors are used, this `post_create_data_table_with_metadata` interceptor runs after the + `post_create_data_table` interceptor. The (possibly modified) response returned by + `post_create_data_table` will be passed to + `post_create_data_table_with_metadata`. + """ + return response, metadata + + def pre_create_data_table_row( + self, + request: data_table.CreateDataTableRowRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + data_table.CreateDataTableRowRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Pre-rpc interceptor for create_data_table_row + + Override in a subclass to manipulate the request or metadata + before they are sent to the DataTableService server. + """ + return request, metadata + + def post_create_data_table_row( + self, response: data_table.DataTableRow + ) -> data_table.DataTableRow: + """Post-rpc interceptor for create_data_table_row + + DEPRECATED. Please use the `post_create_data_table_row_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the DataTableService server but before + it is returned to user code. This `post_create_data_table_row` interceptor runs + before the `post_create_data_table_row_with_metadata` interceptor. + """ + return response + + def post_create_data_table_row_with_metadata( + self, + response: data_table.DataTableRow, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[data_table.DataTableRow, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_data_table_row + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DataTableService server but before it is returned to user code. + + We recommend only using this `post_create_data_table_row_with_metadata` + interceptor in new development instead of the `post_create_data_table_row` interceptor. + When both interceptors are used, this `post_create_data_table_row_with_metadata` interceptor runs after the + `post_create_data_table_row` interceptor. The (possibly modified) response returned by + `post_create_data_table_row` will be passed to + `post_create_data_table_row_with_metadata`. + """ + return response, metadata + + def pre_delete_data_table( + self, + request: data_table.DeleteDataTableRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + data_table.DeleteDataTableRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Pre-rpc interceptor for delete_data_table + + Override in a subclass to manipulate the request or metadata + before they are sent to the DataTableService server. + """ + return request, metadata + + def pre_delete_data_table_row( + self, + request: data_table.DeleteDataTableRowRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + data_table.DeleteDataTableRowRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Pre-rpc interceptor for delete_data_table_row + + Override in a subclass to manipulate the request or metadata + before they are sent to the DataTableService server. + """ + return request, metadata + + def pre_get_data_table( + self, + request: data_table.GetDataTableRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[data_table.GetDataTableRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for get_data_table + + Override in a subclass to manipulate the request or metadata + before they are sent to the DataTableService server. + """ + return request, metadata + + def post_get_data_table( + self, response: data_table.DataTable + ) -> data_table.DataTable: + """Post-rpc interceptor for get_data_table + + DEPRECATED. Please use the `post_get_data_table_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the DataTableService server but before + it is returned to user code. This `post_get_data_table` interceptor runs + before the `post_get_data_table_with_metadata` interceptor. + """ + return response + + def post_get_data_table_with_metadata( + self, + response: data_table.DataTable, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[data_table.DataTable, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_data_table + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DataTableService server but before it is returned to user code. + + We recommend only using this `post_get_data_table_with_metadata` + interceptor in new development instead of the `post_get_data_table` interceptor. + When both interceptors are used, this `post_get_data_table_with_metadata` interceptor runs after the + `post_get_data_table` interceptor. The (possibly modified) response returned by + `post_get_data_table` will be passed to + `post_get_data_table_with_metadata`. + """ + return response, metadata + + def pre_get_data_table_operation_errors( + self, + request: data_table.GetDataTableOperationErrorsRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + data_table.GetDataTableOperationErrorsRequest, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Pre-rpc interceptor for get_data_table_operation_errors + + Override in a subclass to manipulate the request or metadata + before they are sent to the DataTableService server. + """ + return request, metadata + + def post_get_data_table_operation_errors( + self, response: data_table.DataTableOperationErrors + ) -> data_table.DataTableOperationErrors: + """Post-rpc interceptor for get_data_table_operation_errors + + DEPRECATED. Please use the `post_get_data_table_operation_errors_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the DataTableService server but before + it is returned to user code. This `post_get_data_table_operation_errors` interceptor runs + before the `post_get_data_table_operation_errors_with_metadata` interceptor. + """ + return response + + def post_get_data_table_operation_errors_with_metadata( + self, + response: data_table.DataTableOperationErrors, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + data_table.DataTableOperationErrors, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for get_data_table_operation_errors + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DataTableService server but before it is returned to user code. + + We recommend only using this `post_get_data_table_operation_errors_with_metadata` + interceptor in new development instead of the `post_get_data_table_operation_errors` interceptor. + When both interceptors are used, this `post_get_data_table_operation_errors_with_metadata` interceptor runs after the + `post_get_data_table_operation_errors` interceptor. The (possibly modified) response returned by + `post_get_data_table_operation_errors` will be passed to + `post_get_data_table_operation_errors_with_metadata`. + """ + return response, metadata + + def pre_get_data_table_row( + self, + request: data_table.GetDataTableRowRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + data_table.GetDataTableRowRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Pre-rpc interceptor for get_data_table_row + + Override in a subclass to manipulate the request or metadata + before they are sent to the DataTableService server. + """ + return request, metadata + + def post_get_data_table_row( + self, response: data_table.DataTableRow + ) -> data_table.DataTableRow: + """Post-rpc interceptor for get_data_table_row + + DEPRECATED. Please use the `post_get_data_table_row_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the DataTableService server but before + it is returned to user code. This `post_get_data_table_row` interceptor runs + before the `post_get_data_table_row_with_metadata` interceptor. + """ + return response + + def post_get_data_table_row_with_metadata( + self, + response: data_table.DataTableRow, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[data_table.DataTableRow, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_data_table_row + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DataTableService server but before it is returned to user code. + + We recommend only using this `post_get_data_table_row_with_metadata` + interceptor in new development instead of the `post_get_data_table_row` interceptor. + When both interceptors are used, this `post_get_data_table_row_with_metadata` interceptor runs after the + `post_get_data_table_row` interceptor. The (possibly modified) response returned by + `post_get_data_table_row` will be passed to + `post_get_data_table_row_with_metadata`. + """ + return response, metadata + + def pre_list_data_table_rows( + self, + request: data_table.ListDataTableRowsRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + data_table.ListDataTableRowsRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Pre-rpc interceptor for list_data_table_rows + + Override in a subclass to manipulate the request or metadata + before they are sent to the DataTableService server. + """ + return request, metadata + + def post_list_data_table_rows( + self, response: data_table.ListDataTableRowsResponse + ) -> data_table.ListDataTableRowsResponse: + """Post-rpc interceptor for list_data_table_rows + + DEPRECATED. Please use the `post_list_data_table_rows_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the DataTableService server but before + it is returned to user code. This `post_list_data_table_rows` interceptor runs + before the `post_list_data_table_rows_with_metadata` interceptor. + """ + return response + + def post_list_data_table_rows_with_metadata( + self, + response: data_table.ListDataTableRowsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + data_table.ListDataTableRowsResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for list_data_table_rows + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DataTableService server but before it is returned to user code. + + We recommend only using this `post_list_data_table_rows_with_metadata` + interceptor in new development instead of the `post_list_data_table_rows` interceptor. + When both interceptors are used, this `post_list_data_table_rows_with_metadata` interceptor runs after the + `post_list_data_table_rows` interceptor. The (possibly modified) response returned by + `post_list_data_table_rows` will be passed to + `post_list_data_table_rows_with_metadata`. + """ + return response, metadata + + def pre_list_data_tables( + self, + request: data_table.ListDataTablesRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + data_table.ListDataTablesRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Pre-rpc interceptor for list_data_tables + + Override in a subclass to manipulate the request or metadata + before they are sent to the DataTableService server. + """ + return request, metadata + + def post_list_data_tables( + self, response: data_table.ListDataTablesResponse + ) -> data_table.ListDataTablesResponse: + """Post-rpc interceptor for list_data_tables + + DEPRECATED. Please use the `post_list_data_tables_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the DataTableService server but before + it is returned to user code. This `post_list_data_tables` interceptor runs + before the `post_list_data_tables_with_metadata` interceptor. + """ + return response + + def post_list_data_tables_with_metadata( + self, + response: data_table.ListDataTablesResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + data_table.ListDataTablesResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for list_data_tables + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DataTableService server but before it is returned to user code. + + We recommend only using this `post_list_data_tables_with_metadata` + interceptor in new development instead of the `post_list_data_tables` interceptor. + When both interceptors are used, this `post_list_data_tables_with_metadata` interceptor runs after the + `post_list_data_tables` interceptor. The (possibly modified) response returned by + `post_list_data_tables` will be passed to + `post_list_data_tables_with_metadata`. + """ + return response, metadata + + def pre_update_data_table( + self, + request: gcc_data_table.UpdateDataTableRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + gcc_data_table.UpdateDataTableRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Pre-rpc interceptor for update_data_table + + Override in a subclass to manipulate the request or metadata + before they are sent to the DataTableService server. + """ + return request, metadata + + def post_update_data_table( + self, response: gcc_data_table.DataTable + ) -> gcc_data_table.DataTable: + """Post-rpc interceptor for update_data_table + + DEPRECATED. Please use the `post_update_data_table_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the DataTableService server but before + it is returned to user code. This `post_update_data_table` interceptor runs + before the `post_update_data_table_with_metadata` interceptor. + """ + return response + + def post_update_data_table_with_metadata( + self, + response: gcc_data_table.DataTable, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[gcc_data_table.DataTable, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_data_table + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DataTableService server but before it is returned to user code. + + We recommend only using this `post_update_data_table_with_metadata` + interceptor in new development instead of the `post_update_data_table` interceptor. + When both interceptors are used, this `post_update_data_table_with_metadata` interceptor runs after the + `post_update_data_table` interceptor. The (possibly modified) response returned by + `post_update_data_table` will be passed to + `post_update_data_table_with_metadata`. + """ + return response, metadata + + def pre_update_data_table_row( + self, + request: data_table.UpdateDataTableRowRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + data_table.UpdateDataTableRowRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Pre-rpc interceptor for update_data_table_row + + Override in a subclass to manipulate the request or metadata + before they are sent to the DataTableService server. + """ + return request, metadata + + def post_update_data_table_row( + self, response: data_table.DataTableRow + ) -> data_table.DataTableRow: + """Post-rpc interceptor for update_data_table_row + + DEPRECATED. Please use the `post_update_data_table_row_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the DataTableService server but before + it is returned to user code. This `post_update_data_table_row` interceptor runs + before the `post_update_data_table_row_with_metadata` interceptor. + """ + return response + + def post_update_data_table_row_with_metadata( + self, + response: data_table.DataTableRow, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[data_table.DataTableRow, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_data_table_row + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the DataTableService server but before it is returned to user code. + + We recommend only using this `post_update_data_table_row_with_metadata` + interceptor in new development instead of the `post_update_data_table_row` interceptor. + When both interceptors are used, this `post_update_data_table_row_with_metadata` interceptor runs after the + `post_update_data_table_row` interceptor. The (possibly modified) response returned by + `post_update_data_table_row` will be passed to + `post_update_data_table_row_with_metadata`. + """ + return response, metadata + + def pre_cancel_operation( + self, + request: operations_pb2.CancelOperationRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + operations_pb2.CancelOperationRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Pre-rpc interceptor for cancel_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the DataTableService server. + """ + return request, metadata + + def post_cancel_operation(self, response: None) -> None: + """Post-rpc interceptor for cancel_operation + + Override in a subclass to manipulate the response + after it is returned by the DataTableService server but before + it is returned to user code. + """ + return response + + def pre_delete_operation( + self, + request: operations_pb2.DeleteOperationRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + operations_pb2.DeleteOperationRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Pre-rpc interceptor for delete_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the DataTableService server. + """ + return request, metadata + + def post_delete_operation(self, response: None) -> None: + """Post-rpc interceptor for delete_operation + + Override in a subclass to manipulate the response + after it is returned by the DataTableService server but before + it is returned to user code. + """ + return response + + def pre_get_operation( + self, + request: operations_pb2.GetOperationRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + operations_pb2.GetOperationRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Pre-rpc interceptor for get_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the DataTableService server. + """ + return request, metadata + + def post_get_operation( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for get_operation + + Override in a subclass to manipulate the response + after it is returned by the DataTableService server but before + it is returned to user code. + """ + return response + + def pre_list_operations( + self, + request: operations_pb2.ListOperationsRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + operations_pb2.ListOperationsRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Pre-rpc interceptor for list_operations + + Override in a subclass to manipulate the request or metadata + before they are sent to the DataTableService server. + """ + return request, metadata + + def post_list_operations( + self, response: operations_pb2.ListOperationsResponse + ) -> operations_pb2.ListOperationsResponse: + """Post-rpc interceptor for list_operations + + Override in a subclass to manipulate the response + after it is returned by the DataTableService server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class DataTableServiceRestStub: + _session: AuthorizedSession + _host: str + _interceptor: DataTableServiceRestInterceptor + + +class DataTableServiceRestTransport(_BaseDataTableServiceRestTransport): + """REST backend synchronous transport for DataTableService. + + DataTableManager provides an interface for managing data + tables. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + """ + + def __init__( + self, + *, + host: str = "chronicle.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + interceptor: Optional[DataTableServiceRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'chronicle.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): Deprecated. A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. This argument will be + removed in the next major version of this library. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + interceptor (Optional[DataTableServiceRestInterceptor]): Interceptor used + to manipulate requests, request metadata, and responses. + api_audience (Optional[str]): The intended audience for the API calls + to the service that will be set when using certain 3rd party + authentication flows. Audience is typically a resource identifier. + If not set, the host value will be used as a default. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + url_scheme=url_scheme, + api_audience=api_audience, + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST + ) + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or DataTableServiceRestInterceptor() + self._prep_wrapped_messages(client_info) + + class _BulkCreateDataTableRows( + _BaseDataTableServiceRestTransport._BaseBulkCreateDataTableRows, + DataTableServiceRestStub, + ): + def __hash__(self): + return hash("DataTableServiceRestTransport.BulkCreateDataTableRows") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__( + self, + request: data_table.BulkCreateDataTableRowsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> data_table.BulkCreateDataTableRowsResponse: + r"""Call the bulk create data table + rows method over HTTP. + + Args: + request (~.data_table.BulkCreateDataTableRowsRequest): + The request object. Request to create data table rows in + bulk. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.data_table.BulkCreateDataTableRowsResponse: + Response message with created data + table rows. + + """ + + http_options = _BaseDataTableServiceRestTransport._BaseBulkCreateDataTableRows._get_http_options() + + request, metadata = self._interceptor.pre_bulk_create_data_table_rows( + request, metadata + ) + transcoded_request = _BaseDataTableServiceRestTransport._BaseBulkCreateDataTableRows._get_transcoded_request( + http_options, request + ) + + body = _BaseDataTableServiceRestTransport._BaseBulkCreateDataTableRows._get_request_body_json( + transcoded_request + ) + + # Jsonify the query params + query_params = _BaseDataTableServiceRestTransport._BaseBulkCreateDataTableRows._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.chronicle_v1.DataTableServiceClient.BulkCreateDataTableRows", + extra={ + "serviceName": "google.cloud.chronicle.v1.DataTableService", + "rpcName": "BulkCreateDataTableRows", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = ( + DataTableServiceRestTransport._BulkCreateDataTableRows._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = data_table.BulkCreateDataTableRowsResponse() + pb_resp = data_table.BulkCreateDataTableRowsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_bulk_create_data_table_rows(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_bulk_create_data_table_rows_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = ( + data_table.BulkCreateDataTableRowsResponse.to_json(response) + ) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.chronicle_v1.DataTableServiceClient.bulk_create_data_table_rows", + extra={ + "serviceName": "google.cloud.chronicle.v1.DataTableService", + "rpcName": "BulkCreateDataTableRows", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _BulkGetDataTableRows( + _BaseDataTableServiceRestTransport._BaseBulkGetDataTableRows, + DataTableServiceRestStub, + ): + def __hash__(self): + return hash("DataTableServiceRestTransport.BulkGetDataTableRows") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__( + self, + request: data_table.BulkGetDataTableRowsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> data_table.BulkGetDataTableRowsResponse: + r"""Call the bulk get data table rows method over HTTP. + + Args: + request (~.data_table.BulkGetDataTableRowsRequest): + The request object. Request to get data table rows in + bulk. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.data_table.BulkGetDataTableRowsResponse: + Response message with data table + rows. + + """ + + http_options = _BaseDataTableServiceRestTransport._BaseBulkGetDataTableRows._get_http_options() + + request, metadata = self._interceptor.pre_bulk_get_data_table_rows( + request, metadata + ) + transcoded_request = _BaseDataTableServiceRestTransport._BaseBulkGetDataTableRows._get_transcoded_request( + http_options, request + ) + + body = _BaseDataTableServiceRestTransport._BaseBulkGetDataTableRows._get_request_body_json( + transcoded_request + ) + + # Jsonify the query params + query_params = _BaseDataTableServiceRestTransport._BaseBulkGetDataTableRows._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.chronicle_v1.DataTableServiceClient.BulkGetDataTableRows", + extra={ + "serviceName": "google.cloud.chronicle.v1.DataTableService", + "rpcName": "BulkGetDataTableRows", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = ( + DataTableServiceRestTransport._BulkGetDataTableRows._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = data_table.BulkGetDataTableRowsResponse() + pb_resp = data_table.BulkGetDataTableRowsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_bulk_get_data_table_rows(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_bulk_get_data_table_rows_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = data_table.BulkGetDataTableRowsResponse.to_json( + response + ) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.chronicle_v1.DataTableServiceClient.bulk_get_data_table_rows", + extra={ + "serviceName": "google.cloud.chronicle.v1.DataTableService", + "rpcName": "BulkGetDataTableRows", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _BulkReplaceDataTableRows( + _BaseDataTableServiceRestTransport._BaseBulkReplaceDataTableRows, + DataTableServiceRestStub, + ): + def __hash__(self): + return hash("DataTableServiceRestTransport.BulkReplaceDataTableRows") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__( + self, + request: data_table.BulkReplaceDataTableRowsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> data_table.BulkReplaceDataTableRowsResponse: + r"""Call the bulk replace data table + rows method over HTTP. + + Args: + request (~.data_table.BulkReplaceDataTableRowsRequest): + The request object. Request to replace data table rows in + bulk. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.data_table.BulkReplaceDataTableRowsResponse: + Response message with data table rows + that replaced existing data table rows. + + """ + + http_options = _BaseDataTableServiceRestTransport._BaseBulkReplaceDataTableRows._get_http_options() + + request, metadata = self._interceptor.pre_bulk_replace_data_table_rows( + request, metadata + ) + transcoded_request = _BaseDataTableServiceRestTransport._BaseBulkReplaceDataTableRows._get_transcoded_request( + http_options, request + ) + + body = _BaseDataTableServiceRestTransport._BaseBulkReplaceDataTableRows._get_request_body_json( + transcoded_request + ) + + # Jsonify the query params + query_params = _BaseDataTableServiceRestTransport._BaseBulkReplaceDataTableRows._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.chronicle_v1.DataTableServiceClient.BulkReplaceDataTableRows", + extra={ + "serviceName": "google.cloud.chronicle.v1.DataTableService", + "rpcName": "BulkReplaceDataTableRows", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = ( + DataTableServiceRestTransport._BulkReplaceDataTableRows._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = data_table.BulkReplaceDataTableRowsResponse() + pb_resp = data_table.BulkReplaceDataTableRowsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_bulk_replace_data_table_rows(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_bulk_replace_data_table_rows_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = ( + data_table.BulkReplaceDataTableRowsResponse.to_json(response) + ) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.chronicle_v1.DataTableServiceClient.bulk_replace_data_table_rows", + extra={ + "serviceName": "google.cloud.chronicle.v1.DataTableService", + "rpcName": "BulkReplaceDataTableRows", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _BulkUpdateDataTableRows( + _BaseDataTableServiceRestTransport._BaseBulkUpdateDataTableRows, + DataTableServiceRestStub, + ): + def __hash__(self): + return hash("DataTableServiceRestTransport.BulkUpdateDataTableRows") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__( + self, + request: data_table.BulkUpdateDataTableRowsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> data_table.BulkUpdateDataTableRowsResponse: + r"""Call the bulk update data table + rows method over HTTP. + + Args: + request (~.data_table.BulkUpdateDataTableRowsRequest): + The request object. Request to update data table rows in + bulk. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.data_table.BulkUpdateDataTableRowsResponse: + Response message with updated data + table rows. + + """ + + http_options = _BaseDataTableServiceRestTransport._BaseBulkUpdateDataTableRows._get_http_options() + + request, metadata = self._interceptor.pre_bulk_update_data_table_rows( + request, metadata + ) + transcoded_request = _BaseDataTableServiceRestTransport._BaseBulkUpdateDataTableRows._get_transcoded_request( + http_options, request + ) + + body = _BaseDataTableServiceRestTransport._BaseBulkUpdateDataTableRows._get_request_body_json( + transcoded_request + ) + + # Jsonify the query params + query_params = _BaseDataTableServiceRestTransport._BaseBulkUpdateDataTableRows._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.chronicle_v1.DataTableServiceClient.BulkUpdateDataTableRows", + extra={ + "serviceName": "google.cloud.chronicle.v1.DataTableService", + "rpcName": "BulkUpdateDataTableRows", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = ( + DataTableServiceRestTransport._BulkUpdateDataTableRows._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = data_table.BulkUpdateDataTableRowsResponse() + pb_resp = data_table.BulkUpdateDataTableRowsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_bulk_update_data_table_rows(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_bulk_update_data_table_rows_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = ( + data_table.BulkUpdateDataTableRowsResponse.to_json(response) + ) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.chronicle_v1.DataTableServiceClient.bulk_update_data_table_rows", + extra={ + "serviceName": "google.cloud.chronicle.v1.DataTableService", + "rpcName": "BulkUpdateDataTableRows", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _CreateDataTable( + _BaseDataTableServiceRestTransport._BaseCreateDataTable, + DataTableServiceRestStub, + ): + def __hash__(self): + return hash("DataTableServiceRestTransport.CreateDataTable") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__( + self, + request: gcc_data_table.CreateDataTableRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> gcc_data_table.DataTable: + r"""Call the create data table method over HTTP. + + Args: + request (~.gcc_data_table.CreateDataTableRequest): + The request object. A request to create DataTable. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.gcc_data_table.DataTable: + DataTable represents the data table + resource. + + """ + + http_options = _BaseDataTableServiceRestTransport._BaseCreateDataTable._get_http_options() + + request, metadata = self._interceptor.pre_create_data_table( + request, metadata + ) + transcoded_request = _BaseDataTableServiceRestTransport._BaseCreateDataTable._get_transcoded_request( + http_options, request + ) + + body = _BaseDataTableServiceRestTransport._BaseCreateDataTable._get_request_body_json( + transcoded_request + ) + + # Jsonify the query params + query_params = _BaseDataTableServiceRestTransport._BaseCreateDataTable._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.chronicle_v1.DataTableServiceClient.CreateDataTable", + extra={ + "serviceName": "google.cloud.chronicle.v1.DataTableService", + "rpcName": "CreateDataTable", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = DataTableServiceRestTransport._CreateDataTable._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = gcc_data_table.DataTable() + pb_resp = gcc_data_table.DataTable.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_create_data_table(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_data_table_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = gcc_data_table.DataTable.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.chronicle_v1.DataTableServiceClient.create_data_table", + extra={ + "serviceName": "google.cloud.chronicle.v1.DataTableService", + "rpcName": "CreateDataTable", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _CreateDataTableRow( + _BaseDataTableServiceRestTransport._BaseCreateDataTableRow, + DataTableServiceRestStub, + ): + def __hash__(self): + return hash("DataTableServiceRestTransport.CreateDataTableRow") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__( + self, + request: data_table.CreateDataTableRowRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> data_table.DataTableRow: + r"""Call the create data table row method over HTTP. + + Args: + request (~.data_table.CreateDataTableRowRequest): + The request object. Request to create data table row. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.data_table.DataTableRow: + DataTableRow represents a single row + in a data table. + + """ + + http_options = _BaseDataTableServiceRestTransport._BaseCreateDataTableRow._get_http_options() + + request, metadata = self._interceptor.pre_create_data_table_row( + request, metadata + ) + transcoded_request = _BaseDataTableServiceRestTransport._BaseCreateDataTableRow._get_transcoded_request( + http_options, request + ) + + body = _BaseDataTableServiceRestTransport._BaseCreateDataTableRow._get_request_body_json( + transcoded_request + ) + + # Jsonify the query params + query_params = _BaseDataTableServiceRestTransport._BaseCreateDataTableRow._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.chronicle_v1.DataTableServiceClient.CreateDataTableRow", + extra={ + "serviceName": "google.cloud.chronicle.v1.DataTableService", + "rpcName": "CreateDataTableRow", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = DataTableServiceRestTransport._CreateDataTableRow._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = data_table.DataTableRow() + pb_resp = data_table.DataTableRow.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_create_data_table_row(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_data_table_row_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = data_table.DataTableRow.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.chronicle_v1.DataTableServiceClient.create_data_table_row", + extra={ + "serviceName": "google.cloud.chronicle.v1.DataTableService", + "rpcName": "CreateDataTableRow", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _DeleteDataTable( + _BaseDataTableServiceRestTransport._BaseDeleteDataTable, + DataTableServiceRestStub, + ): + def __hash__(self): + return hash("DataTableServiceRestTransport.DeleteDataTable") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: data_table.DeleteDataTableRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ): + r"""Call the delete data table method over HTTP. + + Args: + request (~.data_table.DeleteDataTableRequest): + The request object. Request message for deleting data + tables. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + + http_options = _BaseDataTableServiceRestTransport._BaseDeleteDataTable._get_http_options() + + request, metadata = self._interceptor.pre_delete_data_table( + request, metadata + ) + transcoded_request = _BaseDataTableServiceRestTransport._BaseDeleteDataTable._get_transcoded_request( + http_options, request + ) + + # Jsonify the query params + query_params = _BaseDataTableServiceRestTransport._BaseDeleteDataTable._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.chronicle_v1.DataTableServiceClient.DeleteDataTable", + extra={ + "serviceName": "google.cloud.chronicle.v1.DataTableService", + "rpcName": "DeleteDataTable", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = DataTableServiceRestTransport._DeleteDataTable._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + class _DeleteDataTableRow( + _BaseDataTableServiceRestTransport._BaseDeleteDataTableRow, + DataTableServiceRestStub, + ): + def __hash__(self): + return hash("DataTableServiceRestTransport.DeleteDataTableRow") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: data_table.DeleteDataTableRowRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ): + r"""Call the delete data table row method over HTTP. + + Args: + request (~.data_table.DeleteDataTableRowRequest): + The request object. Request to delete data table row. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + + http_options = _BaseDataTableServiceRestTransport._BaseDeleteDataTableRow._get_http_options() + + request, metadata = self._interceptor.pre_delete_data_table_row( + request, metadata + ) + transcoded_request = _BaseDataTableServiceRestTransport._BaseDeleteDataTableRow._get_transcoded_request( + http_options, request + ) + + # Jsonify the query params + query_params = _BaseDataTableServiceRestTransport._BaseDeleteDataTableRow._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.chronicle_v1.DataTableServiceClient.DeleteDataTableRow", + extra={ + "serviceName": "google.cloud.chronicle.v1.DataTableService", + "rpcName": "DeleteDataTableRow", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = DataTableServiceRestTransport._DeleteDataTableRow._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + class _GetDataTable( + _BaseDataTableServiceRestTransport._BaseGetDataTable, DataTableServiceRestStub + ): + def __hash__(self): + return hash("DataTableServiceRestTransport.GetDataTable") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: data_table.GetDataTableRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> data_table.DataTable: + r"""Call the get data table method over HTTP. + + Args: + request (~.data_table.GetDataTableRequest): + The request object. A request to get details about a data + table. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.data_table.DataTable: + DataTable represents the data table + resource. + + """ + + http_options = ( + _BaseDataTableServiceRestTransport._BaseGetDataTable._get_http_options() + ) + + request, metadata = self._interceptor.pre_get_data_table(request, metadata) + transcoded_request = _BaseDataTableServiceRestTransport._BaseGetDataTable._get_transcoded_request( + http_options, request + ) + + # Jsonify the query params + query_params = _BaseDataTableServiceRestTransport._BaseGetDataTable._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.chronicle_v1.DataTableServiceClient.GetDataTable", + extra={ + "serviceName": "google.cloud.chronicle.v1.DataTableService", + "rpcName": "GetDataTable", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = DataTableServiceRestTransport._GetDataTable._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = data_table.DataTable() + pb_resp = data_table.DataTable.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_get_data_table(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_data_table_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = data_table.DataTable.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.chronicle_v1.DataTableServiceClient.get_data_table", + extra={ + "serviceName": "google.cloud.chronicle.v1.DataTableService", + "rpcName": "GetDataTable", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _GetDataTableOperationErrors( + _BaseDataTableServiceRestTransport._BaseGetDataTableOperationErrors, + DataTableServiceRestStub, + ): + def __hash__(self): + return hash("DataTableServiceRestTransport.GetDataTableOperationErrors") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: data_table.GetDataTableOperationErrorsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> data_table.DataTableOperationErrors: + r"""Call the get data table operation + errors method over HTTP. + + Args: + request (~.data_table.GetDataTableOperationErrorsRequest): + The request object. The request message for + GetDataTableOperationErrors. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.data_table.DataTableOperationErrors: + The message containing the errors for + a data table operation. + + """ + + http_options = _BaseDataTableServiceRestTransport._BaseGetDataTableOperationErrors._get_http_options() + + request, metadata = self._interceptor.pre_get_data_table_operation_errors( + request, metadata + ) + transcoded_request = _BaseDataTableServiceRestTransport._BaseGetDataTableOperationErrors._get_transcoded_request( + http_options, request + ) + + # Jsonify the query params + query_params = _BaseDataTableServiceRestTransport._BaseGetDataTableOperationErrors._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.chronicle_v1.DataTableServiceClient.GetDataTableOperationErrors", + extra={ + "serviceName": "google.cloud.chronicle.v1.DataTableService", + "rpcName": "GetDataTableOperationErrors", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = DataTableServiceRestTransport._GetDataTableOperationErrors._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = data_table.DataTableOperationErrors() + pb_resp = data_table.DataTableOperationErrors.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_get_data_table_operation_errors(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = ( + self._interceptor.post_get_data_table_operation_errors_with_metadata( + resp, response_metadata + ) + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = data_table.DataTableOperationErrors.to_json( + response + ) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.chronicle_v1.DataTableServiceClient.get_data_table_operation_errors", + extra={ + "serviceName": "google.cloud.chronicle.v1.DataTableService", + "rpcName": "GetDataTableOperationErrors", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _GetDataTableRow( + _BaseDataTableServiceRestTransport._BaseGetDataTableRow, + DataTableServiceRestStub, + ): + def __hash__(self): + return hash("DataTableServiceRestTransport.GetDataTableRow") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: data_table.GetDataTableRowRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> data_table.DataTableRow: + r"""Call the get data table row method over HTTP. + + Args: + request (~.data_table.GetDataTableRowRequest): + The request object. Request to get data table row. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.data_table.DataTableRow: + DataTableRow represents a single row + in a data table. + + """ + + http_options = _BaseDataTableServiceRestTransport._BaseGetDataTableRow._get_http_options() + + request, metadata = self._interceptor.pre_get_data_table_row( + request, metadata + ) + transcoded_request = _BaseDataTableServiceRestTransport._BaseGetDataTableRow._get_transcoded_request( + http_options, request + ) + + # Jsonify the query params + query_params = _BaseDataTableServiceRestTransport._BaseGetDataTableRow._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.chronicle_v1.DataTableServiceClient.GetDataTableRow", + extra={ + "serviceName": "google.cloud.chronicle.v1.DataTableService", + "rpcName": "GetDataTableRow", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = DataTableServiceRestTransport._GetDataTableRow._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = data_table.DataTableRow() + pb_resp = data_table.DataTableRow.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_get_data_table_row(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_data_table_row_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = data_table.DataTableRow.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.chronicle_v1.DataTableServiceClient.get_data_table_row", + extra={ + "serviceName": "google.cloud.chronicle.v1.DataTableService", + "rpcName": "GetDataTableRow", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _ListDataTableRows( + _BaseDataTableServiceRestTransport._BaseListDataTableRows, + DataTableServiceRestStub, + ): + def __hash__(self): + return hash("DataTableServiceRestTransport.ListDataTableRows") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: data_table.ListDataTableRowsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> data_table.ListDataTableRowsResponse: + r"""Call the list data table rows method over HTTP. + + Args: + request (~.data_table.ListDataTableRowsRequest): + The request object. Request to list data table rows. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.data_table.ListDataTableRowsResponse: + Response message for listing data + table rows. + + """ + + http_options = _BaseDataTableServiceRestTransport._BaseListDataTableRows._get_http_options() + + request, metadata = self._interceptor.pre_list_data_table_rows( + request, metadata + ) + transcoded_request = _BaseDataTableServiceRestTransport._BaseListDataTableRows._get_transcoded_request( + http_options, request + ) + + # Jsonify the query params + query_params = _BaseDataTableServiceRestTransport._BaseListDataTableRows._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.chronicle_v1.DataTableServiceClient.ListDataTableRows", + extra={ + "serviceName": "google.cloud.chronicle.v1.DataTableService", + "rpcName": "ListDataTableRows", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = DataTableServiceRestTransport._ListDataTableRows._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = data_table.ListDataTableRowsResponse() + pb_resp = data_table.ListDataTableRowsResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_list_data_table_rows(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_data_table_rows_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = data_table.ListDataTableRowsResponse.to_json( + response + ) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.chronicle_v1.DataTableServiceClient.list_data_table_rows", + extra={ + "serviceName": "google.cloud.chronicle.v1.DataTableService", + "rpcName": "ListDataTableRows", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _ListDataTables( + _BaseDataTableServiceRestTransport._BaseListDataTables, DataTableServiceRestStub + ): + def __hash__(self): + return hash("DataTableServiceRestTransport.ListDataTables") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: data_table.ListDataTablesRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> data_table.ListDataTablesResponse: + r"""Call the list data tables method over HTTP. + + Args: + request (~.data_table.ListDataTablesRequest): + The request object. A request for a list of data tables. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.data_table.ListDataTablesResponse: + Response message for listing data + tables. + + """ + + http_options = _BaseDataTableServiceRestTransport._BaseListDataTables._get_http_options() + + request, metadata = self._interceptor.pre_list_data_tables( + request, metadata + ) + transcoded_request = _BaseDataTableServiceRestTransport._BaseListDataTables._get_transcoded_request( + http_options, request + ) + + # Jsonify the query params + query_params = _BaseDataTableServiceRestTransport._BaseListDataTables._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.chronicle_v1.DataTableServiceClient.ListDataTables", + extra={ + "serviceName": "google.cloud.chronicle.v1.DataTableService", + "rpcName": "ListDataTables", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = DataTableServiceRestTransport._ListDataTables._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = data_table.ListDataTablesResponse() + pb_resp = data_table.ListDataTablesResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_list_data_tables(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_data_tables_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = data_table.ListDataTablesResponse.to_json( + response + ) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.chronicle_v1.DataTableServiceClient.list_data_tables", + extra={ + "serviceName": "google.cloud.chronicle.v1.DataTableService", + "rpcName": "ListDataTables", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _UpdateDataTable( + _BaseDataTableServiceRestTransport._BaseUpdateDataTable, + DataTableServiceRestStub, + ): + def __hash__(self): + return hash("DataTableServiceRestTransport.UpdateDataTable") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__( + self, + request: gcc_data_table.UpdateDataTableRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> gcc_data_table.DataTable: + r"""Call the update data table method over HTTP. + + Args: + request (~.gcc_data_table.UpdateDataTableRequest): + The request object. A request to update details of data + table. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.gcc_data_table.DataTable: + DataTable represents the data table + resource. + + """ + + http_options = _BaseDataTableServiceRestTransport._BaseUpdateDataTable._get_http_options() + + request, metadata = self._interceptor.pre_update_data_table( + request, metadata + ) + transcoded_request = _BaseDataTableServiceRestTransport._BaseUpdateDataTable._get_transcoded_request( + http_options, request + ) + + body = _BaseDataTableServiceRestTransport._BaseUpdateDataTable._get_request_body_json( + transcoded_request + ) + + # Jsonify the query params + query_params = _BaseDataTableServiceRestTransport._BaseUpdateDataTable._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.chronicle_v1.DataTableServiceClient.UpdateDataTable", + extra={ + "serviceName": "google.cloud.chronicle.v1.DataTableService", + "rpcName": "UpdateDataTable", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = DataTableServiceRestTransport._UpdateDataTable._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = gcc_data_table.DataTable() + pb_resp = gcc_data_table.DataTable.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_update_data_table(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_data_table_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = gcc_data_table.DataTable.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.chronicle_v1.DataTableServiceClient.update_data_table", + extra={ + "serviceName": "google.cloud.chronicle.v1.DataTableService", + "rpcName": "UpdateDataTable", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _UpdateDataTableRow( + _BaseDataTableServiceRestTransport._BaseUpdateDataTableRow, + DataTableServiceRestStub, + ): + def __hash__(self): + return hash("DataTableServiceRestTransport.UpdateDataTableRow") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__( + self, + request: data_table.UpdateDataTableRowRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> data_table.DataTableRow: + r"""Call the update data table row method over HTTP. + + Args: + request (~.data_table.UpdateDataTableRowRequest): + The request object. Request to update data table row. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.data_table.DataTableRow: + DataTableRow represents a single row + in a data table. + + """ + + http_options = _BaseDataTableServiceRestTransport._BaseUpdateDataTableRow._get_http_options() + + request, metadata = self._interceptor.pre_update_data_table_row( + request, metadata + ) + transcoded_request = _BaseDataTableServiceRestTransport._BaseUpdateDataTableRow._get_transcoded_request( + http_options, request + ) + + body = _BaseDataTableServiceRestTransport._BaseUpdateDataTableRow._get_request_body_json( + transcoded_request + ) + + # Jsonify the query params + query_params = _BaseDataTableServiceRestTransport._BaseUpdateDataTableRow._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.chronicle_v1.DataTableServiceClient.UpdateDataTableRow", + extra={ + "serviceName": "google.cloud.chronicle.v1.DataTableService", + "rpcName": "UpdateDataTableRow", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = DataTableServiceRestTransport._UpdateDataTableRow._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = data_table.DataTableRow() + pb_resp = data_table.DataTableRow.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_update_data_table_row(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_data_table_row_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = data_table.DataTableRow.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.chronicle_v1.DataTableServiceClient.update_data_table_row", + extra={ + "serviceName": "google.cloud.chronicle.v1.DataTableService", + "rpcName": "UpdateDataTableRow", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + @property + def bulk_create_data_table_rows( + self, + ) -> Callable[ + [data_table.BulkCreateDataTableRowsRequest], + data_table.BulkCreateDataTableRowsResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._BulkCreateDataTableRows( + self._session, self._host, self._interceptor + ) # type: ignore + + @property + def bulk_get_data_table_rows( + self, + ) -> Callable[ + [data_table.BulkGetDataTableRowsRequest], + data_table.BulkGetDataTableRowsResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._BulkGetDataTableRows(self._session, self._host, self._interceptor) # type: ignore + + @property + def bulk_replace_data_table_rows( + self, + ) -> Callable[ + [data_table.BulkReplaceDataTableRowsRequest], + data_table.BulkReplaceDataTableRowsResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._BulkReplaceDataTableRows( + self._session, self._host, self._interceptor + ) # type: ignore + + @property + def bulk_update_data_table_rows( + self, + ) -> Callable[ + [data_table.BulkUpdateDataTableRowsRequest], + data_table.BulkUpdateDataTableRowsResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._BulkUpdateDataTableRows( + self._session, self._host, self._interceptor + ) # type: ignore + + @property + def create_data_table( + self, + ) -> Callable[[gcc_data_table.CreateDataTableRequest], gcc_data_table.DataTable]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateDataTable(self._session, self._host, self._interceptor) # type: ignore + + @property + def create_data_table_row( + self, + ) -> Callable[[data_table.CreateDataTableRowRequest], data_table.DataTableRow]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateDataTableRow(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_data_table( + self, + ) -> Callable[[data_table.DeleteDataTableRequest], empty_pb2.Empty]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteDataTable(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_data_table_row( + self, + ) -> Callable[[data_table.DeleteDataTableRowRequest], empty_pb2.Empty]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteDataTableRow(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_data_table( + self, + ) -> Callable[[data_table.GetDataTableRequest], data_table.DataTable]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetDataTable(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_data_table_operation_errors( + self, + ) -> Callable[ + [data_table.GetDataTableOperationErrorsRequest], + data_table.DataTableOperationErrors, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetDataTableOperationErrors( + self._session, self._host, self._interceptor + ) # type: ignore + + @property + def get_data_table_row( + self, + ) -> Callable[[data_table.GetDataTableRowRequest], data_table.DataTableRow]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetDataTableRow(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_data_table_rows( + self, + ) -> Callable[ + [data_table.ListDataTableRowsRequest], data_table.ListDataTableRowsResponse + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListDataTableRows(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_data_tables( + self, + ) -> Callable[ + [data_table.ListDataTablesRequest], data_table.ListDataTablesResponse + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListDataTables(self._session, self._host, self._interceptor) # type: ignore + + @property + def update_data_table( + self, + ) -> Callable[[gcc_data_table.UpdateDataTableRequest], gcc_data_table.DataTable]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateDataTable(self._session, self._host, self._interceptor) # type: ignore + + @property + def update_data_table_row( + self, + ) -> Callable[[data_table.UpdateDataTableRowRequest], data_table.DataTableRow]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateDataTableRow(self._session, self._host, self._interceptor) # type: ignore + + @property + def cancel_operation(self): + return self._CancelOperation(self._session, self._host, self._interceptor) # type: ignore + + class _CancelOperation( + _BaseDataTableServiceRestTransport._BaseCancelOperation, + DataTableServiceRestStub, + ): + def __hash__(self): + return hash("DataTableServiceRestTransport.CancelOperation") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__( + self, + request: operations_pb2.CancelOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: + r"""Call the cancel operation method over HTTP. + + Args: + request (operations_pb2.CancelOperationRequest): + The request object for CancelOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + + http_options = _BaseDataTableServiceRestTransport._BaseCancelOperation._get_http_options() + + request, metadata = self._interceptor.pre_cancel_operation( + request, metadata + ) + transcoded_request = _BaseDataTableServiceRestTransport._BaseCancelOperation._get_transcoded_request( + http_options, request + ) + + body = _BaseDataTableServiceRestTransport._BaseCancelOperation._get_request_body_json( + transcoded_request + ) + + # Jsonify the query params + query_params = _BaseDataTableServiceRestTransport._BaseCancelOperation._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.chronicle_v1.DataTableServiceClient.CancelOperation", + extra={ + "serviceName": "google.cloud.chronicle.v1.DataTableService", + "rpcName": "CancelOperation", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = DataTableServiceRestTransport._CancelOperation._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + return self._interceptor.post_cancel_operation(None) + + @property + def delete_operation(self): + return self._DeleteOperation(self._session, self._host, self._interceptor) # type: ignore + + class _DeleteOperation( + _BaseDataTableServiceRestTransport._BaseDeleteOperation, + DataTableServiceRestStub, + ): + def __hash__(self): + return hash("DataTableServiceRestTransport.DeleteOperation") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: operations_pb2.DeleteOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> None: + r"""Call the delete operation method over HTTP. + + Args: + request (operations_pb2.DeleteOperationRequest): + The request object for DeleteOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + + http_options = _BaseDataTableServiceRestTransport._BaseDeleteOperation._get_http_options() + + request, metadata = self._interceptor.pre_delete_operation( + request, metadata + ) + transcoded_request = _BaseDataTableServiceRestTransport._BaseDeleteOperation._get_transcoded_request( + http_options, request + ) + + # Jsonify the query params + query_params = _BaseDataTableServiceRestTransport._BaseDeleteOperation._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.chronicle_v1.DataTableServiceClient.DeleteOperation", + extra={ + "serviceName": "google.cloud.chronicle.v1.DataTableService", + "rpcName": "DeleteOperation", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = DataTableServiceRestTransport._DeleteOperation._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + return self._interceptor.post_delete_operation(None) + + @property + def get_operation(self): + return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore + + class _GetOperation( + _BaseDataTableServiceRestTransport._BaseGetOperation, DataTableServiceRestStub + ): + def __hash__(self): + return hash("DataTableServiceRestTransport.GetOperation") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: operations_pb2.GetOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.Operation: + r"""Call the get operation method over HTTP. + + Args: + request (operations_pb2.GetOperationRequest): + The request object for GetOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + operations_pb2.Operation: Response from GetOperation method. + """ + + http_options = ( + _BaseDataTableServiceRestTransport._BaseGetOperation._get_http_options() + ) + + request, metadata = self._interceptor.pre_get_operation(request, metadata) + transcoded_request = _BaseDataTableServiceRestTransport._BaseGetOperation._get_transcoded_request( + http_options, request + ) + + # Jsonify the query params + query_params = _BaseDataTableServiceRestTransport._BaseGetOperation._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.chronicle_v1.DataTableServiceClient.GetOperation", + extra={ + "serviceName": "google.cloud.chronicle.v1.DataTableService", + "rpcName": "GetOperation", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = DataTableServiceRestTransport._GetOperation._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + content = response.content.decode("utf-8") + resp = operations_pb2.Operation() + resp = json_format.Parse(content, resp) + resp = self._interceptor.post_get_operation(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.chronicle_v1.DataTableServiceAsyncClient.GetOperation", + extra={ + "serviceName": "google.cloud.chronicle.v1.DataTableService", + "rpcName": "GetOperation", + "httpResponse": http_response, + "metadata": http_response["headers"], + }, + ) + return resp + + @property + def list_operations(self): + return self._ListOperations(self._session, self._host, self._interceptor) # type: ignore + + class _ListOperations( + _BaseDataTableServiceRestTransport._BaseListOperations, DataTableServiceRestStub + ): + def __hash__(self): + return hash("DataTableServiceRestTransport.ListOperations") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: operations_pb2.ListOperationsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.ListOperationsResponse: + r"""Call the list operations method over HTTP. + + Args: + request (operations_pb2.ListOperationsRequest): + The request object for ListOperations method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + operations_pb2.ListOperationsResponse: Response from ListOperations method. + """ + + http_options = _BaseDataTableServiceRestTransport._BaseListOperations._get_http_options() + + request, metadata = self._interceptor.pre_list_operations(request, metadata) + transcoded_request = _BaseDataTableServiceRestTransport._BaseListOperations._get_transcoded_request( + http_options, request + ) + + # Jsonify the query params + query_params = _BaseDataTableServiceRestTransport._BaseListOperations._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.chronicle_v1.DataTableServiceClient.ListOperations", + extra={ + "serviceName": "google.cloud.chronicle.v1.DataTableService", + "rpcName": "ListOperations", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = DataTableServiceRestTransport._ListOperations._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + content = response.content.decode("utf-8") + resp = operations_pb2.ListOperationsResponse() + resp = json_format.Parse(content, resp) + resp = self._interceptor.post_list_operations(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.chronicle_v1.DataTableServiceAsyncClient.ListOperations", + extra={ + "serviceName": "google.cloud.chronicle.v1.DataTableService", + "rpcName": "ListOperations", + "httpResponse": http_response, + "metadata": http_response["headers"], + }, + ) + return resp + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__ = ("DataTableServiceRestTransport",) diff --git a/packages/google-cloud-chronicle/google/cloud/chronicle_v1/services/data_table_service/transports/rest_base.py b/packages/google-cloud-chronicle/google/cloud/chronicle_v1/services/data_table_service/transports/rest_base.py new file mode 100644 index 000000000000..80a4e8054c4f --- /dev/null +++ b/packages/google-cloud-chronicle/google/cloud/chronicle_v1/services/data_table_service/transports/rest_base.py @@ -0,0 +1,987 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import json # type: ignore +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union + +import google.protobuf.empty_pb2 as empty_pb2 # type: ignore +from google.api_core import gapic_v1, path_template +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import json_format + +from google.cloud.chronicle_v1.types import data_table +from google.cloud.chronicle_v1.types import data_table as gcc_data_table + +from .base import DEFAULT_CLIENT_INFO, DataTableServiceTransport + + +class _BaseDataTableServiceRestTransport(DataTableServiceTransport): + """Base REST backend transport for DataTableService. + + Note: This class is not meant to be used directly. Use its sync and + async sub-classes instead. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + """ + + def __init__( + self, + *, + host: str = "chronicle.googleapis.com", + credentials: Optional[Any] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + Args: + host (Optional[str]): + The hostname to connect to (default: 'chronicle.googleapis.com'). + credentials (Optional[Any]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError( + f"Unexpected hostname structure: {host}" + ) # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + class _BaseBulkCreateDataTableRows: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{parent=projects/*/locations/*/instances/*/dataTables/*}/dataTableRows:bulkCreate", + "body": "*", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = data_table.BulkCreateDataTableRowsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + return body + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseDataTableServiceRestTransport._BaseBulkCreateDataTableRows._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseBulkGetDataTableRows: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{parent=projects/*/locations/*/instances/*/dataTables/*}/dataTableRows:bulkGet", + "body": "*", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = data_table.BulkGetDataTableRowsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + return body + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseDataTableServiceRestTransport._BaseBulkGetDataTableRows._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseBulkReplaceDataTableRows: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{parent=projects/*/locations/*/instances/*/dataTables/*}/dataTableRows:bulkReplace", + "body": "*", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = data_table.BulkReplaceDataTableRowsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + return body + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseDataTableServiceRestTransport._BaseBulkReplaceDataTableRows._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseBulkUpdateDataTableRows: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{parent=projects/*/locations/*/instances/*/dataTables/*}/dataTableRows:bulkUpdate", + "body": "*", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = data_table.BulkUpdateDataTableRowsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + return body + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseDataTableServiceRestTransport._BaseBulkUpdateDataTableRows._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseCreateDataTable: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "dataTableId": "", + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{parent=projects/*/locations/*/instances/*}/dataTables", + "body": "data_table", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = gcc_data_table.CreateDataTableRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + return body + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseDataTableServiceRestTransport._BaseCreateDataTable._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseCreateDataTableRow: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{parent=projects/*/locations/*/instances/*/dataTables/*}/dataTableRows", + "body": "data_table_row", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = data_table.CreateDataTableRowRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + return body + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseDataTableServiceRestTransport._BaseCreateDataTableRow._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseDeleteDataTable: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1/{name=projects/*/locations/*/instances/*/dataTables/*}", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = data_table.DeleteDataTableRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseDataTableServiceRestTransport._BaseDeleteDataTable._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseDeleteDataTableRow: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1/{name=projects/*/locations/*/instances/*/dataTables/*/dataTableRows/*}", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = data_table.DeleteDataTableRowRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseDataTableServiceRestTransport._BaseDeleteDataTableRow._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseGetDataTable: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/instances/*/dataTables/*}", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = data_table.GetDataTableRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseDataTableServiceRestTransport._BaseGetDataTable._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseGetDataTableOperationErrors: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/instances/*/dataTableOperationErrors/*}", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = data_table.GetDataTableOperationErrorsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseDataTableServiceRestTransport._BaseGetDataTableOperationErrors._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseGetDataTableRow: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/instances/*/dataTables/*/dataTableRows/*}", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = data_table.GetDataTableRowRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseDataTableServiceRestTransport._BaseGetDataTableRow._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseListDataTableRows: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{parent=projects/*/locations/*/instances/*/dataTables/*}/dataTableRows", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = data_table.ListDataTableRowsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseDataTableServiceRestTransport._BaseListDataTableRows._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseListDataTables: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{parent=projects/*/locations/*/instances/*}/dataTables", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = data_table.ListDataTablesRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseDataTableServiceRestTransport._BaseListDataTables._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseUpdateDataTable: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "patch", + "uri": "/v1/{data_table.name=projects/*/locations/*/instances/*/dataTables/*}", + "body": "data_table", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = gcc_data_table.UpdateDataTableRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + return body + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseDataTableServiceRestTransport._BaseUpdateDataTable._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseUpdateDataTableRow: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "patch", + "uri": "/v1/{data_table_row.name=projects/*/locations/*/instances/*/dataTables/*/dataTableRows/*}", + "body": "data_table_row", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = data_table.UpdateDataTableRowRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + return body + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseDataTableServiceRestTransport._BaseUpdateDataTableRow._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseCancelOperation: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{name=projects/*/locations/*/instances/*/operations/*}:cancel", + "body": "*", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + body = json.dumps(transcoded_request["body"]) + return body + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + return query_params + + class _BaseDeleteOperation: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1/{name=projects/*/locations/*/instances/*/operations/*}", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + return query_params + + class _BaseGetOperation: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/instances/*/operations/*}", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + return query_params + + class _BaseListOperations: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/instances/*}/operations", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + return query_params + + +__all__ = ("_BaseDataTableServiceRestTransport",) diff --git a/packages/google-cloud-chronicle/google/cloud/chronicle_v1/types/__init__.py b/packages/google-cloud-chronicle/google/cloud/chronicle_v1/types/__init__.py index 00886c66dedf..376b5dc19a2c 100644 --- a/packages/google-cloud-chronicle/google/cloud/chronicle_v1/types/__init__.py +++ b/packages/google-cloud-chronicle/google/cloud/chronicle_v1/types/__init__.py @@ -31,6 +31,35 @@ UpdateDataAccessLabelRequest, UpdateDataAccessScopeRequest, ) +from .data_table import ( + BulkCreateDataTableRowsRequest, + BulkCreateDataTableRowsResponse, + BulkGetDataTableRowsRequest, + BulkGetDataTableRowsResponse, + BulkReplaceDataTableRowsRequest, + BulkReplaceDataTableRowsResponse, + BulkUpdateDataTableRowsRequest, + BulkUpdateDataTableRowsResponse, + CreateDataTableRequest, + CreateDataTableRowRequest, + DataTable, + DataTableColumnInfo, + DataTableOperationErrors, + DataTableRow, + DataTableScopeInfo, + DataTableUpdateSource, + DeleteDataTableRequest, + DeleteDataTableRowRequest, + GetDataTableOperationErrorsRequest, + GetDataTableRequest, + GetDataTableRowRequest, + ListDataTableRowsRequest, + ListDataTableRowsResponse, + ListDataTablesRequest, + ListDataTablesResponse, + UpdateDataTableRequest, + UpdateDataTableRowRequest, +) from .entity import ( CreateWatchlistRequest, DeleteWatchlistRequest, @@ -105,6 +134,33 @@ "ListDataAccessScopesResponse", "UpdateDataAccessLabelRequest", "UpdateDataAccessScopeRequest", + "BulkCreateDataTableRowsRequest", + "BulkCreateDataTableRowsResponse", + "BulkGetDataTableRowsRequest", + "BulkGetDataTableRowsResponse", + "BulkReplaceDataTableRowsRequest", + "BulkReplaceDataTableRowsResponse", + "BulkUpdateDataTableRowsRequest", + "BulkUpdateDataTableRowsResponse", + "CreateDataTableRequest", + "CreateDataTableRowRequest", + "DataTable", + "DataTableColumnInfo", + "DataTableOperationErrors", + "DataTableRow", + "DataTableScopeInfo", + "DeleteDataTableRequest", + "DeleteDataTableRowRequest", + "GetDataTableOperationErrorsRequest", + "GetDataTableRequest", + "GetDataTableRowRequest", + "ListDataTableRowsRequest", + "ListDataTableRowsResponse", + "ListDataTablesRequest", + "ListDataTablesResponse", + "UpdateDataTableRequest", + "UpdateDataTableRowRequest", + "DataTableUpdateSource", "CreateWatchlistRequest", "DeleteWatchlistRequest", "GetWatchlistRequest", diff --git a/packages/google-cloud-chronicle/google/cloud/chronicle_v1/types/data_table.py b/packages/google-cloud-chronicle/google/cloud/chronicle_v1/types/data_table.py new file mode 100644 index 000000000000..3864583d3d34 --- /dev/null +++ b/packages/google-cloud-chronicle/google/cloud/chronicle_v1/types/data_table.py @@ -0,0 +1,891 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import google.protobuf.field_mask_pb2 as field_mask_pb2 # type: ignore +import google.protobuf.timestamp_pb2 as timestamp_pb2 # type: ignore +import google.rpc.status_pb2 as status_pb2 # type: ignore +import proto # type: ignore + +__protobuf__ = proto.module( + package="google.cloud.chronicle.v1", + manifest={ + "DataTableUpdateSource", + "CreateDataTableRequest", + "GetDataTableRequest", + "UpdateDataTableRequest", + "ListDataTablesRequest", + "DeleteDataTableRequest", + "ListDataTablesResponse", + "CreateDataTableRowRequest", + "UpdateDataTableRowRequest", + "ListDataTableRowsRequest", + "ListDataTableRowsResponse", + "GetDataTableRowRequest", + "DeleteDataTableRowRequest", + "BulkCreateDataTableRowsRequest", + "BulkCreateDataTableRowsResponse", + "BulkGetDataTableRowsRequest", + "BulkGetDataTableRowsResponse", + "BulkReplaceDataTableRowsRequest", + "BulkReplaceDataTableRowsResponse", + "BulkUpdateDataTableRowsRequest", + "BulkUpdateDataTableRowsResponse", + "DataTableScopeInfo", + "DataTable", + "DataTableRow", + "DataTableColumnInfo", + "GetDataTableOperationErrorsRequest", + "DataTableOperationErrors", + }, +) + + +class DataTableUpdateSource(proto.Enum): + r"""DataTableUpdateSource denotes the source that updated the + data table. + + Values: + DATA_TABLE_UPDATE_SOURCE_UNSPECIFIED (0): + The data table is updated by the user. + USER (1): + The data table is updated by the user. + RULE (2): + The data table is updated by the rule. + SEARCH (3): + The data table is updated by the search. + """ + + DATA_TABLE_UPDATE_SOURCE_UNSPECIFIED = 0 + USER = 1 + RULE = 2 + SEARCH = 3 + + +class CreateDataTableRequest(proto.Message): + r"""A request to create DataTable. + + Attributes: + parent (str): + Required. The parent resource where this data + table will be created. Format: + projects/{project}/locations/{location}/instances/{instance} + data_table (google.cloud.chronicle_v1.types.DataTable): + Required. The data table being created. + data_table_id (str): + Required. The ID to use for the data table. + This is also the display name for the data + table. It must satisfy the following + requirements: + + - Starts with letter. + - Contains only letters, numbers and underscore. + - Must be unique and has length < 256. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + data_table: "DataTable" = proto.Field( + proto.MESSAGE, + number=2, + message="DataTable", + ) + data_table_id: str = proto.Field( + proto.STRING, + number=3, + ) + + +class GetDataTableRequest(proto.Message): + r"""A request to get details about a data table. + + Attributes: + name (str): + Required. The resource name of the data table to retrieve. + Format: + projects/{project}/locations/{location}/instances/{instances}/dataTables/{data_table} + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class UpdateDataTableRequest(proto.Message): + r"""A request to update details of data table. + + Attributes: + data_table (google.cloud.chronicle_v1.types.DataTable): + Required. This field is used to identify the datatable to + update. Format: + projects/{project}/locations/{locations}/instances/{instance}/dataTables/{data_table} + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Optional. The list of metadata fields to update. Currently + data tables only support updating the ``description``, + ``row_time_to_live`` and ``scope_info`` fields. When no + field mask is supplied, all non-empty fields will be + updated. A field mask of "\*" will update all fields, + whether empty or not. + """ + + data_table: "DataTable" = proto.Field( + proto.MESSAGE, + number=1, + message="DataTable", + ) + update_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=2, + message=field_mask_pb2.FieldMask, + ) + + +class ListDataTablesRequest(proto.Message): + r"""A request for a list of data tables. + + Attributes: + parent (str): + Required. The parent resource where this data + table will be created. Format: + projects/{project}/locations/{location}/instances/{instance} + page_size (int): + Optional. The maximum number of data tables + to return. The service may return fewer than + this value. If unspecified, at most 100 data + tables will be returned. The maximum value is + 1000; values above 1000 will be coerced to 1000. + page_token (str): + Optional. A page token, received from a previous + ``ListDataTables`` call. Provide this to retrieve the + subsequent page. When paginating, all other parameters + provided to ``ListDataTables`` must match the call that + provided the page token. + order_by (str): + Optional. Configures ordering of DataTables in the response. + Note: Our implementation currently supports order by + "create_time asc" only + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + order_by: str = proto.Field( + proto.STRING, + number=4, + ) + + +class DeleteDataTableRequest(proto.Message): + r"""Request message for deleting data tables. + + Attributes: + name (str): + Required. The resource name of the data table to delete. + Format + projects/{project}/locations/{location}/instances/{instances}/dataTables/{data_table} + force (bool): + Optional. If set to true, any rows under this + data table will also be deleted. (Otherwise, the + request will only work if the data table has no + rows.) + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + force: bool = proto.Field( + proto.BOOL, + number=2, + ) + + +class ListDataTablesResponse(proto.Message): + r"""Response message for listing data tables. + + Attributes: + data_tables (MutableSequence[google.cloud.chronicle_v1.types.DataTable]): + The list of the data tables returned. + next_page_token (str): + A token, which can be sent as ``page_token`` to retrieve the + next page. If this field is omitted, there are no subsequent + pages. + """ + + @property + def raw_page(self): + return self + + data_tables: MutableSequence["DataTable"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="DataTable", + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class CreateDataTableRowRequest(proto.Message): + r"""Request to create data table row. + + Attributes: + parent (str): + Required. The resource id of the data table. Format: + /projects/{project}/locations/{location}/instances/{instance}/dataTables/{data_table} + data_table_row (google.cloud.chronicle_v1.types.DataTableRow): + Required. The data table row to create. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + data_table_row: "DataTableRow" = proto.Field( + proto.MESSAGE, + number=2, + message="DataTableRow", + ) + + +class UpdateDataTableRowRequest(proto.Message): + r"""Request to update data table row. + + Attributes: + data_table_row (google.cloud.chronicle_v1.types.DataTableRow): + Required. Format: + projects/{project}/locations/{location}/instances/{instance}/dataTables/{data_table}/dataTableRows/{data_table_row} + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Optional. The list of fields to update. Currently data table + rows only support updating the ``values`` field. When no + field mask is supplied, all non-empty fields will be + updated. A field mask of "\*" will update all fields, + whether empty or not. + """ + + data_table_row: "DataTableRow" = proto.Field( + proto.MESSAGE, + number=1, + message="DataTableRow", + ) + update_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=2, + message=field_mask_pb2.FieldMask, + ) + + +class ListDataTableRowsRequest(proto.Message): + r"""Request to list data table rows. + + Attributes: + parent (str): + Required. The resource id of the data table. Format: + projects/{project}/locations/{locations}/instances/{instance}/dataTables/{data_table} + page_size (int): + Optional. The maximum number of data table + rows to return. The service may return fewer + than this value. If unspecified, at most 100 + data table rows will be returned. The maximum + value is 1000; values above 1000 will be coerced + to 1000. + page_token (str): + Optional. A page token, received from a previous + ``ListDataTableRows`` call. + order_by (str): + Optional. Configures ordering of DataTables in the response. + Note: Our implementation currently supports order by + "create_time asc" only + filter (str): + Optional. Filter facilitating search over + data table rows. This filter performs a + case-insensitive substring match on the row + values. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + order_by: str = proto.Field( + proto.STRING, + number=4, + ) + filter: str = proto.Field( + proto.STRING, + number=5, + ) + + +class ListDataTableRowsResponse(proto.Message): + r"""Response message for listing data table rows. + + Attributes: + data_table_rows (MutableSequence[google.cloud.chronicle_v1.types.DataTableRow]): + The list of the data table rows returned. + next_page_token (str): + Optional. A token, which can be sent as ``page_token`` to + retrieve the next page. If this field is omitted, there are + no subsequent pages. + """ + + @property + def raw_page(self): + return self + + data_table_rows: MutableSequence["DataTableRow"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="DataTableRow", + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class GetDataTableRowRequest(proto.Message): + r"""Request to get data table row. + + Attributes: + name (str): + Required. The resource name of the data table row i,e + row_id. Format: + projects/{project}/locations/{location}/instances/{instance}/dataTables/{data_table}/dataTableRows/{data_table_row} + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class DeleteDataTableRowRequest(proto.Message): + r"""Request to delete data table row. + + Attributes: + name (str): + Required. The resource name of the data table row i,e + row_id. Format: + projects/{project}/locations/{location}/instances/{instance}/dataTables/{data_table}/dataTableRows/{data_table_row} + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class BulkCreateDataTableRowsRequest(proto.Message): + r"""Request to create data table rows in bulk. + + Attributes: + parent (str): + Required. The resource id of the data table. Format: + /projects/{project}/locations/{location}/instances/{instance}/dataTables/{data_table} + requests (MutableSequence[google.cloud.chronicle_v1.types.CreateDataTableRowRequest]): + Required. Data table rows to create. A + maximum of 1000 rows (for sync requests) or 2000 + rows (for async requests) can be created in a + single request. Total size of the rows should be + less than 4MB. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + requests: MutableSequence["CreateDataTableRowRequest"] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message="CreateDataTableRowRequest", + ) + + +class BulkCreateDataTableRowsResponse(proto.Message): + r"""Response message with created data table rows. + + Attributes: + data_table_rows (MutableSequence[google.cloud.chronicle_v1.types.DataTableRow]): + DataTableRows created + """ + + data_table_rows: MutableSequence["DataTableRow"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="DataTableRow", + ) + + +class BulkGetDataTableRowsRequest(proto.Message): + r"""Request to get data table rows in bulk. + + Attributes: + parent (str): + Required. The resource id of the data table. Format: + /projects/{project}/locations/{location}/instances/{instance}/dataTables/{data_table} + requests (MutableSequence[google.cloud.chronicle_v1.types.GetDataTableRowRequest]): + Required. Data table rows to get. At max + 1,000 rows can be there in a request. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + requests: MutableSequence["GetDataTableRowRequest"] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message="GetDataTableRowRequest", + ) + + +class BulkGetDataTableRowsResponse(proto.Message): + r"""Response message with data table rows. + + Attributes: + data_table_rows (MutableSequence[google.cloud.chronicle_v1.types.DataTableRow]): + The requested data table rows. + """ + + data_table_rows: MutableSequence["DataTableRow"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="DataTableRow", + ) + + +class BulkReplaceDataTableRowsRequest(proto.Message): + r"""Request to replace data table rows in bulk. + + Attributes: + parent (str): + Required. The resource id of the data table. Format: + /projects/{project}/locations/{location}/instances/{instance}/dataTables/{data_table} + requests (MutableSequence[google.cloud.chronicle_v1.types.CreateDataTableRowRequest]): + Required. Data table rows to replace the + existing data table rows. A maximum of 1000 rows + (for sync requests) or 2000 rows (for async + requests) can be replaced in a single request. + Total size of the rows should be less than 4MB. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + requests: MutableSequence["CreateDataTableRowRequest"] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message="CreateDataTableRowRequest", + ) + + +class BulkReplaceDataTableRowsResponse(proto.Message): + r"""Response message with data table rows that replaced existing + data table rows. + + Attributes: + data_table_rows (MutableSequence[google.cloud.chronicle_v1.types.DataTableRow]): + DataTableRows that replaced existing data + table rows + """ + + data_table_rows: MutableSequence["DataTableRow"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="DataTableRow", + ) + + +class BulkUpdateDataTableRowsRequest(proto.Message): + r"""Request to update data table rows in bulk. + + Attributes: + parent (str): + Required. The resource id of the data table. Format: + /projects/{project}/locations/{location}/instances/{instance}/dataTables/{data_table} + requests (MutableSequence[google.cloud.chronicle_v1.types.UpdateDataTableRowRequest]): + Required. Data table rows to update. At max + 1,000 rows (or rows with size less than 2MB) can + be there in a request. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + requests: MutableSequence["UpdateDataTableRowRequest"] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message="UpdateDataTableRowRequest", + ) + + +class BulkUpdateDataTableRowsResponse(proto.Message): + r"""Response message with updated data table rows. + + Attributes: + data_table_rows (MutableSequence[google.cloud.chronicle_v1.types.DataTableRow]): + DataTableRows updated + """ + + data_table_rows: MutableSequence["DataTableRow"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="DataTableRow", + ) + + +class DataTableScopeInfo(proto.Message): + r"""DataTableScopeInfo specifies the scope info of the data + table. + + Attributes: + data_access_scopes (MutableSequence[str]): + Required. Contains the list of scope names of the data + table. If the list is empty, the data table is treated as + unscoped. The scope names should be full resource names and + should be of the format: + "projects/{project}/locations/{location}/instances/{instance}/dataAccessScopes/{scope_name}". + """ + + data_access_scopes: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=1, + ) + + +class DataTable(proto.Message): + r"""DataTable represents the data table resource. + + Attributes: + name (str): + Identifier. The resource name of the data table Format: + "{project}/locations/{location}/instances/{instance}/dataTables/{data_table}". + display_name (str): + Output only. The unique display name of the + data table. + description (str): + Required. A user-provided description of the + data table. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. Table create time + update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. Table update time + column_info (MutableSequence[google.cloud.chronicle_v1.types.DataTableColumnInfo]): + Immutable. Details of all the columns in the + table + data_table_uuid (str): + Output only. Data table unique id + rules (MutableSequence[str]): + Output only. The resource names for the + associated Rules that use this data table. + Format: + + projects/{project}/locations/{location}/instances/{instance}/rules/{rule}. + {rule} here refers to the rule id. + rule_associations_count (int): + Output only. The count of rules using the + data table. + row_time_to_live (str): + Optional. User-provided TTL of the data + table. + approximate_row_count (int): + Output only. The count of rows in the data + table. + scope_info (google.cloud.chronicle_v1.types.DataTableScopeInfo): + Optional. The scope info of the data table. During data + table creation, if this field is not set, the data table + without scopes (an unscoped table) will be created for a + global user. For a scoped user, this field must be set. + During data table update, if scope_info is requested to be + updated, this field must be set. + update_source (google.cloud.chronicle_v1.types.DataTableUpdateSource): + Output only. Source of the data table update. + row_time_to_live_update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. Last update time of the TTL of + the data table. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + display_name: str = proto.Field( + proto.STRING, + number=2, + ) + description: str = proto.Field( + proto.STRING, + number=3, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=4, + message=timestamp_pb2.Timestamp, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=5, + message=timestamp_pb2.Timestamp, + ) + column_info: MutableSequence["DataTableColumnInfo"] = proto.RepeatedField( + proto.MESSAGE, + number=6, + message="DataTableColumnInfo", + ) + data_table_uuid: str = proto.Field( + proto.STRING, + number=7, + ) + rules: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=8, + ) + rule_associations_count: int = proto.Field( + proto.INT32, + number=9, + ) + row_time_to_live: str = proto.Field( + proto.STRING, + number=10, + ) + approximate_row_count: int = proto.Field( + proto.INT64, + number=11, + ) + scope_info: "DataTableScopeInfo" = proto.Field( + proto.MESSAGE, + number=12, + message="DataTableScopeInfo", + ) + update_source: "DataTableUpdateSource" = proto.Field( + proto.ENUM, + number=13, + enum="DataTableUpdateSource", + ) + row_time_to_live_update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=14, + message=timestamp_pb2.Timestamp, + ) + + +class DataTableRow(proto.Message): + r"""DataTableRow represents a single row in a data table. + + Attributes: + name (str): + Identifier. The resource name of the data table Format: + projects/{project}/locations/{location}/instances/{instance}/dataTables/{data_table}/dataTableRows/{data_table_row} + values (MutableSequence[str]): + Required. All column values for a single row. + The values should be in the same order as the + columns of the data tables. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. DataTableRow create time + update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. DataTableRow update time + row_time_to_live (str): + Optional. User-provided TTL of the data table + row. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + values: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=2, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=3, + message=timestamp_pb2.Timestamp, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=4, + message=timestamp_pb2.Timestamp, + ) + row_time_to_live: str = proto.Field( + proto.STRING, + number=5, + ) + + +class DataTableColumnInfo(proto.Message): + r"""DataTableColumnInfo represents the column metadata of the datatable. + The column_index represents the ordering of the values in + DataTableRow. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + mapped_column_path (str): + Entity proto field path that the column is + mapped to + + This field is a member of `oneof`_ ``path_or_type``. + column_type (google.cloud.chronicle_v1.types.DataTableColumnInfo.DataTableColumnType): + Column type can be STRING, CIDR (Ex- + 10.1.1.0/24), REGEX + + This field is a member of `oneof`_ ``path_or_type``. + column_index (int): + Required. Column Index. 0,1,2... + original_column (str): + Required. Original column name of the Data + Table (present in the CSV header in case of + creation of data tables using file uploads). It + must satisfy the following requirements: + + - Starts with letter. + - Contains only letters, numbers and underscore. + - Must be unique and has length < 256. + key_column (bool): + Optional. Whether to include this column in the calculation + of the row ID. If no columns have key_column = true, all + columns will be included in the calculation of the row ID. + repeated_values (bool): + Optional. Whether the column is a repeated + values column. + """ + + class DataTableColumnType(proto.Enum): + r"""DataTableColumnType denotes the type of the column to be + referenced in the rule. + + Values: + DATA_TABLE_COLUMN_TYPE_UNSPECIFIED (0): + The default Data Table Column Type. + STRING (1): + Denotes the type of the column as STRING. + REGEX (2): + Denotes the type of the column as REGEX. + CIDR (3): + Denotes the type of the column as CIDR. + NUMBER (4): + Denotes the type of the column as NUMBER + (includes int and float). + """ + + DATA_TABLE_COLUMN_TYPE_UNSPECIFIED = 0 + STRING = 1 + REGEX = 2 + CIDR = 3 + NUMBER = 4 + + mapped_column_path: str = proto.Field( + proto.STRING, + number=3, + oneof="path_or_type", + ) + column_type: DataTableColumnType = proto.Field( + proto.ENUM, + number=4, + oneof="path_or_type", + enum=DataTableColumnType, + ) + column_index: int = proto.Field( + proto.INT32, + number=1, + ) + original_column: str = proto.Field( + proto.STRING, + number=2, + ) + key_column: bool = proto.Field( + proto.BOOL, + number=5, + ) + repeated_values: bool = proto.Field( + proto.BOOL, + number=6, + ) + + +class GetDataTableOperationErrorsRequest(proto.Message): + r"""The request message for GetDataTableOperationErrors. + + Attributes: + name (str): + Required. Resource name for the data table operation errors. + Format: + projects/{project}/locations/{location}/instances/{instance}/dataTableOperationErrors/{data_table_operation_errors} + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class DataTableOperationErrors(proto.Message): + r"""The message containing the errors for a data table operation. + + Attributes: + name (str): + Identifier. Resource name for the data table operation + errors. Format: + projects/{project}/locations/{location}/instances/{instance}/dataTableOperationErrors/{data_table_operation_errors} + rpc_errors (MutableSequence[google.rpc.status_pb2.Status]): + The list of errors. Replaces the deprecated ``errors`` + field. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + rpc_errors: MutableSequence[status_pb2.Status] = proto.RepeatedField( + proto.MESSAGE, + number=3, + message=status_pb2.Status, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-chronicle/samples/generated_samples/chronicle_v1_generated_data_table_service_bulk_create_data_table_rows_async.py b/packages/google-cloud-chronicle/samples/generated_samples/chronicle_v1_generated_data_table_service_bulk_create_data_table_rows_async.py new file mode 100644 index 000000000000..84606b82152c --- /dev/null +++ b/packages/google-cloud-chronicle/samples/generated_samples/chronicle_v1_generated_data_table_service_bulk_create_data_table_rows_async.py @@ -0,0 +1,58 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for BulkCreateDataTableRows +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-chronicle + + +# [START chronicle_v1_generated_DataTableService_BulkCreateDataTableRows_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import chronicle_v1 + + +async def sample_bulk_create_data_table_rows(): + # Create a client + client = chronicle_v1.DataTableServiceAsyncClient() + + # Initialize request argument(s) + requests = chronicle_v1.CreateDataTableRowRequest() + requests.parent = "parent_value" + requests.data_table_row.values = ["values_value1", "values_value2"] + + request = chronicle_v1.BulkCreateDataTableRowsRequest( + parent="parent_value", + requests=requests, + ) + + # Make the request + response = await client.bulk_create_data_table_rows(request=request) + + # Handle the response + print(response) + + +# [END chronicle_v1_generated_DataTableService_BulkCreateDataTableRows_async] diff --git a/packages/google-cloud-chronicle/samples/generated_samples/chronicle_v1_generated_data_table_service_bulk_create_data_table_rows_sync.py b/packages/google-cloud-chronicle/samples/generated_samples/chronicle_v1_generated_data_table_service_bulk_create_data_table_rows_sync.py new file mode 100644 index 000000000000..7c531f8f9edc --- /dev/null +++ b/packages/google-cloud-chronicle/samples/generated_samples/chronicle_v1_generated_data_table_service_bulk_create_data_table_rows_sync.py @@ -0,0 +1,58 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for BulkCreateDataTableRows +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-chronicle + + +# [START chronicle_v1_generated_DataTableService_BulkCreateDataTableRows_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import chronicle_v1 + + +def sample_bulk_create_data_table_rows(): + # Create a client + client = chronicle_v1.DataTableServiceClient() + + # Initialize request argument(s) + requests = chronicle_v1.CreateDataTableRowRequest() + requests.parent = "parent_value" + requests.data_table_row.values = ["values_value1", "values_value2"] + + request = chronicle_v1.BulkCreateDataTableRowsRequest( + parent="parent_value", + requests=requests, + ) + + # Make the request + response = client.bulk_create_data_table_rows(request=request) + + # Handle the response + print(response) + + +# [END chronicle_v1_generated_DataTableService_BulkCreateDataTableRows_sync] diff --git a/packages/google-cloud-chronicle/samples/generated_samples/chronicle_v1_generated_data_table_service_bulk_get_data_table_rows_async.py b/packages/google-cloud-chronicle/samples/generated_samples/chronicle_v1_generated_data_table_service_bulk_get_data_table_rows_async.py new file mode 100644 index 000000000000..76eb2bfeeda2 --- /dev/null +++ b/packages/google-cloud-chronicle/samples/generated_samples/chronicle_v1_generated_data_table_service_bulk_get_data_table_rows_async.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for BulkGetDataTableRows +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-chronicle + + +# [START chronicle_v1_generated_DataTableService_BulkGetDataTableRows_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import chronicle_v1 + + +async def sample_bulk_get_data_table_rows(): + # Create a client + client = chronicle_v1.DataTableServiceAsyncClient() + + # Initialize request argument(s) + requests = chronicle_v1.GetDataTableRowRequest() + requests.name = "name_value" + + request = chronicle_v1.BulkGetDataTableRowsRequest( + parent="parent_value", + requests=requests, + ) + + # Make the request + response = await client.bulk_get_data_table_rows(request=request) + + # Handle the response + print(response) + + +# [END chronicle_v1_generated_DataTableService_BulkGetDataTableRows_async] diff --git a/packages/google-cloud-chronicle/samples/generated_samples/chronicle_v1_generated_data_table_service_bulk_get_data_table_rows_sync.py b/packages/google-cloud-chronicle/samples/generated_samples/chronicle_v1_generated_data_table_service_bulk_get_data_table_rows_sync.py new file mode 100644 index 000000000000..38370190e497 --- /dev/null +++ b/packages/google-cloud-chronicle/samples/generated_samples/chronicle_v1_generated_data_table_service_bulk_get_data_table_rows_sync.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for BulkGetDataTableRows +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-chronicle + + +# [START chronicle_v1_generated_DataTableService_BulkGetDataTableRows_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import chronicle_v1 + + +def sample_bulk_get_data_table_rows(): + # Create a client + client = chronicle_v1.DataTableServiceClient() + + # Initialize request argument(s) + requests = chronicle_v1.GetDataTableRowRequest() + requests.name = "name_value" + + request = chronicle_v1.BulkGetDataTableRowsRequest( + parent="parent_value", + requests=requests, + ) + + # Make the request + response = client.bulk_get_data_table_rows(request=request) + + # Handle the response + print(response) + + +# [END chronicle_v1_generated_DataTableService_BulkGetDataTableRows_sync] diff --git a/packages/google-cloud-chronicle/samples/generated_samples/chronicle_v1_generated_data_table_service_bulk_replace_data_table_rows_async.py b/packages/google-cloud-chronicle/samples/generated_samples/chronicle_v1_generated_data_table_service_bulk_replace_data_table_rows_async.py new file mode 100644 index 000000000000..b77b987f6d29 --- /dev/null +++ b/packages/google-cloud-chronicle/samples/generated_samples/chronicle_v1_generated_data_table_service_bulk_replace_data_table_rows_async.py @@ -0,0 +1,58 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for BulkReplaceDataTableRows +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-chronicle + + +# [START chronicle_v1_generated_DataTableService_BulkReplaceDataTableRows_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import chronicle_v1 + + +async def sample_bulk_replace_data_table_rows(): + # Create a client + client = chronicle_v1.DataTableServiceAsyncClient() + + # Initialize request argument(s) + requests = chronicle_v1.CreateDataTableRowRequest() + requests.parent = "parent_value" + requests.data_table_row.values = ["values_value1", "values_value2"] + + request = chronicle_v1.BulkReplaceDataTableRowsRequest( + parent="parent_value", + requests=requests, + ) + + # Make the request + response = await client.bulk_replace_data_table_rows(request=request) + + # Handle the response + print(response) + + +# [END chronicle_v1_generated_DataTableService_BulkReplaceDataTableRows_async] diff --git a/packages/google-cloud-chronicle/samples/generated_samples/chronicle_v1_generated_data_table_service_bulk_replace_data_table_rows_sync.py b/packages/google-cloud-chronicle/samples/generated_samples/chronicle_v1_generated_data_table_service_bulk_replace_data_table_rows_sync.py new file mode 100644 index 000000000000..bdef76d07a11 --- /dev/null +++ b/packages/google-cloud-chronicle/samples/generated_samples/chronicle_v1_generated_data_table_service_bulk_replace_data_table_rows_sync.py @@ -0,0 +1,58 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for BulkReplaceDataTableRows +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-chronicle + + +# [START chronicle_v1_generated_DataTableService_BulkReplaceDataTableRows_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import chronicle_v1 + + +def sample_bulk_replace_data_table_rows(): + # Create a client + client = chronicle_v1.DataTableServiceClient() + + # Initialize request argument(s) + requests = chronicle_v1.CreateDataTableRowRequest() + requests.parent = "parent_value" + requests.data_table_row.values = ["values_value1", "values_value2"] + + request = chronicle_v1.BulkReplaceDataTableRowsRequest( + parent="parent_value", + requests=requests, + ) + + # Make the request + response = client.bulk_replace_data_table_rows(request=request) + + # Handle the response + print(response) + + +# [END chronicle_v1_generated_DataTableService_BulkReplaceDataTableRows_sync] diff --git a/packages/google-cloud-chronicle/samples/generated_samples/chronicle_v1_generated_data_table_service_bulk_update_data_table_rows_async.py b/packages/google-cloud-chronicle/samples/generated_samples/chronicle_v1_generated_data_table_service_bulk_update_data_table_rows_async.py new file mode 100644 index 000000000000..df0c1a8437c7 --- /dev/null +++ b/packages/google-cloud-chronicle/samples/generated_samples/chronicle_v1_generated_data_table_service_bulk_update_data_table_rows_async.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for BulkUpdateDataTableRows +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-chronicle + + +# [START chronicle_v1_generated_DataTableService_BulkUpdateDataTableRows_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import chronicle_v1 + + +async def sample_bulk_update_data_table_rows(): + # Create a client + client = chronicle_v1.DataTableServiceAsyncClient() + + # Initialize request argument(s) + requests = chronicle_v1.UpdateDataTableRowRequest() + requests.data_table_row.values = ["values_value1", "values_value2"] + + request = chronicle_v1.BulkUpdateDataTableRowsRequest( + parent="parent_value", + requests=requests, + ) + + # Make the request + response = await client.bulk_update_data_table_rows(request=request) + + # Handle the response + print(response) + + +# [END chronicle_v1_generated_DataTableService_BulkUpdateDataTableRows_async] diff --git a/packages/google-cloud-chronicle/samples/generated_samples/chronicle_v1_generated_data_table_service_bulk_update_data_table_rows_sync.py b/packages/google-cloud-chronicle/samples/generated_samples/chronicle_v1_generated_data_table_service_bulk_update_data_table_rows_sync.py new file mode 100644 index 000000000000..9c83222b7fb8 --- /dev/null +++ b/packages/google-cloud-chronicle/samples/generated_samples/chronicle_v1_generated_data_table_service_bulk_update_data_table_rows_sync.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for BulkUpdateDataTableRows +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-chronicle + + +# [START chronicle_v1_generated_DataTableService_BulkUpdateDataTableRows_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import chronicle_v1 + + +def sample_bulk_update_data_table_rows(): + # Create a client + client = chronicle_v1.DataTableServiceClient() + + # Initialize request argument(s) + requests = chronicle_v1.UpdateDataTableRowRequest() + requests.data_table_row.values = ["values_value1", "values_value2"] + + request = chronicle_v1.BulkUpdateDataTableRowsRequest( + parent="parent_value", + requests=requests, + ) + + # Make the request + response = client.bulk_update_data_table_rows(request=request) + + # Handle the response + print(response) + + +# [END chronicle_v1_generated_DataTableService_BulkUpdateDataTableRows_sync] diff --git a/packages/google-cloud-chronicle/samples/generated_samples/chronicle_v1_generated_data_table_service_create_data_table_async.py b/packages/google-cloud-chronicle/samples/generated_samples/chronicle_v1_generated_data_table_service_create_data_table_async.py new file mode 100644 index 000000000000..80bdbe49d6ea --- /dev/null +++ b/packages/google-cloud-chronicle/samples/generated_samples/chronicle_v1_generated_data_table_service_create_data_table_async.py @@ -0,0 +1,58 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateDataTable +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-chronicle + + +# [START chronicle_v1_generated_DataTableService_CreateDataTable_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import chronicle_v1 + + +async def sample_create_data_table(): + # Create a client + client = chronicle_v1.DataTableServiceAsyncClient() + + # Initialize request argument(s) + data_table = chronicle_v1.DataTable() + data_table.description = "description_value" + + request = chronicle_v1.CreateDataTableRequest( + parent="parent_value", + data_table=data_table, + data_table_id="data_table_id_value", + ) + + # Make the request + response = await client.create_data_table(request=request) + + # Handle the response + print(response) + + +# [END chronicle_v1_generated_DataTableService_CreateDataTable_async] diff --git a/packages/google-cloud-chronicle/samples/generated_samples/chronicle_v1_generated_data_table_service_create_data_table_row_async.py b/packages/google-cloud-chronicle/samples/generated_samples/chronicle_v1_generated_data_table_service_create_data_table_row_async.py new file mode 100644 index 000000000000..a2865feea3f3 --- /dev/null +++ b/packages/google-cloud-chronicle/samples/generated_samples/chronicle_v1_generated_data_table_service_create_data_table_row_async.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateDataTableRow +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-chronicle + + +# [START chronicle_v1_generated_DataTableService_CreateDataTableRow_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import chronicle_v1 + + +async def sample_create_data_table_row(): + # Create a client + client = chronicle_v1.DataTableServiceAsyncClient() + + # Initialize request argument(s) + data_table_row = chronicle_v1.DataTableRow() + data_table_row.values = ["values_value1", "values_value2"] + + request = chronicle_v1.CreateDataTableRowRequest( + parent="parent_value", + data_table_row=data_table_row, + ) + + # Make the request + response = await client.create_data_table_row(request=request) + + # Handle the response + print(response) + + +# [END chronicle_v1_generated_DataTableService_CreateDataTableRow_async] diff --git a/packages/google-cloud-chronicle/samples/generated_samples/chronicle_v1_generated_data_table_service_create_data_table_row_sync.py b/packages/google-cloud-chronicle/samples/generated_samples/chronicle_v1_generated_data_table_service_create_data_table_row_sync.py new file mode 100644 index 000000000000..a725b3031e16 --- /dev/null +++ b/packages/google-cloud-chronicle/samples/generated_samples/chronicle_v1_generated_data_table_service_create_data_table_row_sync.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateDataTableRow +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-chronicle + + +# [START chronicle_v1_generated_DataTableService_CreateDataTableRow_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import chronicle_v1 + + +def sample_create_data_table_row(): + # Create a client + client = chronicle_v1.DataTableServiceClient() + + # Initialize request argument(s) + data_table_row = chronicle_v1.DataTableRow() + data_table_row.values = ["values_value1", "values_value2"] + + request = chronicle_v1.CreateDataTableRowRequest( + parent="parent_value", + data_table_row=data_table_row, + ) + + # Make the request + response = client.create_data_table_row(request=request) + + # Handle the response + print(response) + + +# [END chronicle_v1_generated_DataTableService_CreateDataTableRow_sync] diff --git a/packages/google-cloud-chronicle/samples/generated_samples/chronicle_v1_generated_data_table_service_create_data_table_sync.py b/packages/google-cloud-chronicle/samples/generated_samples/chronicle_v1_generated_data_table_service_create_data_table_sync.py new file mode 100644 index 000000000000..548389397e63 --- /dev/null +++ b/packages/google-cloud-chronicle/samples/generated_samples/chronicle_v1_generated_data_table_service_create_data_table_sync.py @@ -0,0 +1,58 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateDataTable +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-chronicle + + +# [START chronicle_v1_generated_DataTableService_CreateDataTable_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import chronicle_v1 + + +def sample_create_data_table(): + # Create a client + client = chronicle_v1.DataTableServiceClient() + + # Initialize request argument(s) + data_table = chronicle_v1.DataTable() + data_table.description = "description_value" + + request = chronicle_v1.CreateDataTableRequest( + parent="parent_value", + data_table=data_table, + data_table_id="data_table_id_value", + ) + + # Make the request + response = client.create_data_table(request=request) + + # Handle the response + print(response) + + +# [END chronicle_v1_generated_DataTableService_CreateDataTable_sync] diff --git a/packages/google-cloud-chronicle/samples/generated_samples/chronicle_v1_generated_data_table_service_delete_data_table_async.py b/packages/google-cloud-chronicle/samples/generated_samples/chronicle_v1_generated_data_table_service_delete_data_table_async.py new file mode 100644 index 000000000000..c16ae3985ceb --- /dev/null +++ b/packages/google-cloud-chronicle/samples/generated_samples/chronicle_v1_generated_data_table_service_delete_data_table_async.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteDataTable +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-chronicle + + +# [START chronicle_v1_generated_DataTableService_DeleteDataTable_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import chronicle_v1 + + +async def sample_delete_data_table(): + # Create a client + client = chronicle_v1.DataTableServiceAsyncClient() + + # Initialize request argument(s) + request = chronicle_v1.DeleteDataTableRequest( + name="name_value", + ) + + # Make the request + await client.delete_data_table(request=request) + + +# [END chronicle_v1_generated_DataTableService_DeleteDataTable_async] diff --git a/packages/google-cloud-chronicle/samples/generated_samples/chronicle_v1_generated_data_table_service_delete_data_table_row_async.py b/packages/google-cloud-chronicle/samples/generated_samples/chronicle_v1_generated_data_table_service_delete_data_table_row_async.py new file mode 100644 index 000000000000..2ef8aaec12f8 --- /dev/null +++ b/packages/google-cloud-chronicle/samples/generated_samples/chronicle_v1_generated_data_table_service_delete_data_table_row_async.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteDataTableRow +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-chronicle + + +# [START chronicle_v1_generated_DataTableService_DeleteDataTableRow_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import chronicle_v1 + + +async def sample_delete_data_table_row(): + # Create a client + client = chronicle_v1.DataTableServiceAsyncClient() + + # Initialize request argument(s) + request = chronicle_v1.DeleteDataTableRowRequest( + name="name_value", + ) + + # Make the request + await client.delete_data_table_row(request=request) + + +# [END chronicle_v1_generated_DataTableService_DeleteDataTableRow_async] diff --git a/packages/google-cloud-chronicle/samples/generated_samples/chronicle_v1_generated_data_table_service_delete_data_table_row_sync.py b/packages/google-cloud-chronicle/samples/generated_samples/chronicle_v1_generated_data_table_service_delete_data_table_row_sync.py new file mode 100644 index 000000000000..6fd7f122e321 --- /dev/null +++ b/packages/google-cloud-chronicle/samples/generated_samples/chronicle_v1_generated_data_table_service_delete_data_table_row_sync.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteDataTableRow +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-chronicle + + +# [START chronicle_v1_generated_DataTableService_DeleteDataTableRow_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import chronicle_v1 + + +def sample_delete_data_table_row(): + # Create a client + client = chronicle_v1.DataTableServiceClient() + + # Initialize request argument(s) + request = chronicle_v1.DeleteDataTableRowRequest( + name="name_value", + ) + + # Make the request + client.delete_data_table_row(request=request) + + +# [END chronicle_v1_generated_DataTableService_DeleteDataTableRow_sync] diff --git a/packages/google-cloud-chronicle/samples/generated_samples/chronicle_v1_generated_data_table_service_delete_data_table_sync.py b/packages/google-cloud-chronicle/samples/generated_samples/chronicle_v1_generated_data_table_service_delete_data_table_sync.py new file mode 100644 index 000000000000..8f5cc5cf204e --- /dev/null +++ b/packages/google-cloud-chronicle/samples/generated_samples/chronicle_v1_generated_data_table_service_delete_data_table_sync.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteDataTable +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-chronicle + + +# [START chronicle_v1_generated_DataTableService_DeleteDataTable_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import chronicle_v1 + + +def sample_delete_data_table(): + # Create a client + client = chronicle_v1.DataTableServiceClient() + + # Initialize request argument(s) + request = chronicle_v1.DeleteDataTableRequest( + name="name_value", + ) + + # Make the request + client.delete_data_table(request=request) + + +# [END chronicle_v1_generated_DataTableService_DeleteDataTable_sync] diff --git a/packages/google-cloud-chronicle/samples/generated_samples/chronicle_v1_generated_data_table_service_get_data_table_async.py b/packages/google-cloud-chronicle/samples/generated_samples/chronicle_v1_generated_data_table_service_get_data_table_async.py new file mode 100644 index 000000000000..4418161e2da3 --- /dev/null +++ b/packages/google-cloud-chronicle/samples/generated_samples/chronicle_v1_generated_data_table_service_get_data_table_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetDataTable +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-chronicle + + +# [START chronicle_v1_generated_DataTableService_GetDataTable_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import chronicle_v1 + + +async def sample_get_data_table(): + # Create a client + client = chronicle_v1.DataTableServiceAsyncClient() + + # Initialize request argument(s) + request = chronicle_v1.GetDataTableRequest( + name="name_value", + ) + + # Make the request + response = await client.get_data_table(request=request) + + # Handle the response + print(response) + + +# [END chronicle_v1_generated_DataTableService_GetDataTable_async] diff --git a/packages/google-cloud-chronicle/samples/generated_samples/chronicle_v1_generated_data_table_service_get_data_table_operation_errors_async.py b/packages/google-cloud-chronicle/samples/generated_samples/chronicle_v1_generated_data_table_service_get_data_table_operation_errors_async.py new file mode 100644 index 000000000000..a4272e0590c4 --- /dev/null +++ b/packages/google-cloud-chronicle/samples/generated_samples/chronicle_v1_generated_data_table_service_get_data_table_operation_errors_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetDataTableOperationErrors +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-chronicle + + +# [START chronicle_v1_generated_DataTableService_GetDataTableOperationErrors_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import chronicle_v1 + + +async def sample_get_data_table_operation_errors(): + # Create a client + client = chronicle_v1.DataTableServiceAsyncClient() + + # Initialize request argument(s) + request = chronicle_v1.GetDataTableOperationErrorsRequest( + name="name_value", + ) + + # Make the request + response = await client.get_data_table_operation_errors(request=request) + + # Handle the response + print(response) + + +# [END chronicle_v1_generated_DataTableService_GetDataTableOperationErrors_async] diff --git a/packages/google-cloud-chronicle/samples/generated_samples/chronicle_v1_generated_data_table_service_get_data_table_operation_errors_sync.py b/packages/google-cloud-chronicle/samples/generated_samples/chronicle_v1_generated_data_table_service_get_data_table_operation_errors_sync.py new file mode 100644 index 000000000000..28888a5e845f --- /dev/null +++ b/packages/google-cloud-chronicle/samples/generated_samples/chronicle_v1_generated_data_table_service_get_data_table_operation_errors_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetDataTableOperationErrors +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-chronicle + + +# [START chronicle_v1_generated_DataTableService_GetDataTableOperationErrors_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import chronicle_v1 + + +def sample_get_data_table_operation_errors(): + # Create a client + client = chronicle_v1.DataTableServiceClient() + + # Initialize request argument(s) + request = chronicle_v1.GetDataTableOperationErrorsRequest( + name="name_value", + ) + + # Make the request + response = client.get_data_table_operation_errors(request=request) + + # Handle the response + print(response) + + +# [END chronicle_v1_generated_DataTableService_GetDataTableOperationErrors_sync] diff --git a/packages/google-cloud-chronicle/samples/generated_samples/chronicle_v1_generated_data_table_service_get_data_table_row_async.py b/packages/google-cloud-chronicle/samples/generated_samples/chronicle_v1_generated_data_table_service_get_data_table_row_async.py new file mode 100644 index 000000000000..382ae941f817 --- /dev/null +++ b/packages/google-cloud-chronicle/samples/generated_samples/chronicle_v1_generated_data_table_service_get_data_table_row_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetDataTableRow +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-chronicle + + +# [START chronicle_v1_generated_DataTableService_GetDataTableRow_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import chronicle_v1 + + +async def sample_get_data_table_row(): + # Create a client + client = chronicle_v1.DataTableServiceAsyncClient() + + # Initialize request argument(s) + request = chronicle_v1.GetDataTableRowRequest( + name="name_value", + ) + + # Make the request + response = await client.get_data_table_row(request=request) + + # Handle the response + print(response) + + +# [END chronicle_v1_generated_DataTableService_GetDataTableRow_async] diff --git a/packages/google-cloud-chronicle/samples/generated_samples/chronicle_v1_generated_data_table_service_get_data_table_row_sync.py b/packages/google-cloud-chronicle/samples/generated_samples/chronicle_v1_generated_data_table_service_get_data_table_row_sync.py new file mode 100644 index 000000000000..ec3d0415eae9 --- /dev/null +++ b/packages/google-cloud-chronicle/samples/generated_samples/chronicle_v1_generated_data_table_service_get_data_table_row_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetDataTableRow +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-chronicle + + +# [START chronicle_v1_generated_DataTableService_GetDataTableRow_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import chronicle_v1 + + +def sample_get_data_table_row(): + # Create a client + client = chronicle_v1.DataTableServiceClient() + + # Initialize request argument(s) + request = chronicle_v1.GetDataTableRowRequest( + name="name_value", + ) + + # Make the request + response = client.get_data_table_row(request=request) + + # Handle the response + print(response) + + +# [END chronicle_v1_generated_DataTableService_GetDataTableRow_sync] diff --git a/packages/google-cloud-chronicle/samples/generated_samples/chronicle_v1_generated_data_table_service_get_data_table_sync.py b/packages/google-cloud-chronicle/samples/generated_samples/chronicle_v1_generated_data_table_service_get_data_table_sync.py new file mode 100644 index 000000000000..e0b133b83933 --- /dev/null +++ b/packages/google-cloud-chronicle/samples/generated_samples/chronicle_v1_generated_data_table_service_get_data_table_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetDataTable +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-chronicle + + +# [START chronicle_v1_generated_DataTableService_GetDataTable_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import chronicle_v1 + + +def sample_get_data_table(): + # Create a client + client = chronicle_v1.DataTableServiceClient() + + # Initialize request argument(s) + request = chronicle_v1.GetDataTableRequest( + name="name_value", + ) + + # Make the request + response = client.get_data_table(request=request) + + # Handle the response + print(response) + + +# [END chronicle_v1_generated_DataTableService_GetDataTable_sync] diff --git a/packages/google-cloud-chronicle/samples/generated_samples/chronicle_v1_generated_data_table_service_list_data_table_rows_async.py b/packages/google-cloud-chronicle/samples/generated_samples/chronicle_v1_generated_data_table_service_list_data_table_rows_async.py new file mode 100644 index 000000000000..27dd5ec836d7 --- /dev/null +++ b/packages/google-cloud-chronicle/samples/generated_samples/chronicle_v1_generated_data_table_service_list_data_table_rows_async.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListDataTableRows +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-chronicle + + +# [START chronicle_v1_generated_DataTableService_ListDataTableRows_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import chronicle_v1 + + +async def sample_list_data_table_rows(): + # Create a client + client = chronicle_v1.DataTableServiceAsyncClient() + + # Initialize request argument(s) + request = chronicle_v1.ListDataTableRowsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_data_table_rows(request=request) + + # Handle the response + async for response in page_result: + print(response) + + +# [END chronicle_v1_generated_DataTableService_ListDataTableRows_async] diff --git a/packages/google-cloud-chronicle/samples/generated_samples/chronicle_v1_generated_data_table_service_list_data_table_rows_sync.py b/packages/google-cloud-chronicle/samples/generated_samples/chronicle_v1_generated_data_table_service_list_data_table_rows_sync.py new file mode 100644 index 000000000000..352910cf4aa2 --- /dev/null +++ b/packages/google-cloud-chronicle/samples/generated_samples/chronicle_v1_generated_data_table_service_list_data_table_rows_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListDataTableRows +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-chronicle + + +# [START chronicle_v1_generated_DataTableService_ListDataTableRows_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import chronicle_v1 + + +def sample_list_data_table_rows(): + # Create a client + client = chronicle_v1.DataTableServiceClient() + + # Initialize request argument(s) + request = chronicle_v1.ListDataTableRowsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_data_table_rows(request=request) + + # Handle the response + for response in page_result: + print(response) + + +# [END chronicle_v1_generated_DataTableService_ListDataTableRows_sync] diff --git a/packages/google-cloud-chronicle/samples/generated_samples/chronicle_v1_generated_data_table_service_list_data_tables_async.py b/packages/google-cloud-chronicle/samples/generated_samples/chronicle_v1_generated_data_table_service_list_data_tables_async.py new file mode 100644 index 000000000000..4524c33b3928 --- /dev/null +++ b/packages/google-cloud-chronicle/samples/generated_samples/chronicle_v1_generated_data_table_service_list_data_tables_async.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListDataTables +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-chronicle + + +# [START chronicle_v1_generated_DataTableService_ListDataTables_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import chronicle_v1 + + +async def sample_list_data_tables(): + # Create a client + client = chronicle_v1.DataTableServiceAsyncClient() + + # Initialize request argument(s) + request = chronicle_v1.ListDataTablesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_data_tables(request=request) + + # Handle the response + async for response in page_result: + print(response) + + +# [END chronicle_v1_generated_DataTableService_ListDataTables_async] diff --git a/packages/google-cloud-chronicle/samples/generated_samples/chronicle_v1_generated_data_table_service_list_data_tables_sync.py b/packages/google-cloud-chronicle/samples/generated_samples/chronicle_v1_generated_data_table_service_list_data_tables_sync.py new file mode 100644 index 000000000000..cd646fc8e77e --- /dev/null +++ b/packages/google-cloud-chronicle/samples/generated_samples/chronicle_v1_generated_data_table_service_list_data_tables_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListDataTables +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-chronicle + + +# [START chronicle_v1_generated_DataTableService_ListDataTables_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import chronicle_v1 + + +def sample_list_data_tables(): + # Create a client + client = chronicle_v1.DataTableServiceClient() + + # Initialize request argument(s) + request = chronicle_v1.ListDataTablesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_data_tables(request=request) + + # Handle the response + for response in page_result: + print(response) + + +# [END chronicle_v1_generated_DataTableService_ListDataTables_sync] diff --git a/packages/google-cloud-chronicle/samples/generated_samples/chronicle_v1_generated_data_table_service_update_data_table_async.py b/packages/google-cloud-chronicle/samples/generated_samples/chronicle_v1_generated_data_table_service_update_data_table_async.py new file mode 100644 index 000000000000..b5dc71670ce7 --- /dev/null +++ b/packages/google-cloud-chronicle/samples/generated_samples/chronicle_v1_generated_data_table_service_update_data_table_async.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateDataTable +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-chronicle + + +# [START chronicle_v1_generated_DataTableService_UpdateDataTable_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import chronicle_v1 + + +async def sample_update_data_table(): + # Create a client + client = chronicle_v1.DataTableServiceAsyncClient() + + # Initialize request argument(s) + data_table = chronicle_v1.DataTable() + data_table.description = "description_value" + + request = chronicle_v1.UpdateDataTableRequest( + data_table=data_table, + ) + + # Make the request + response = await client.update_data_table(request=request) + + # Handle the response + print(response) + + +# [END chronicle_v1_generated_DataTableService_UpdateDataTable_async] diff --git a/packages/google-cloud-chronicle/samples/generated_samples/chronicle_v1_generated_data_table_service_update_data_table_row_async.py b/packages/google-cloud-chronicle/samples/generated_samples/chronicle_v1_generated_data_table_service_update_data_table_row_async.py new file mode 100644 index 000000000000..6dd831735872 --- /dev/null +++ b/packages/google-cloud-chronicle/samples/generated_samples/chronicle_v1_generated_data_table_service_update_data_table_row_async.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateDataTableRow +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-chronicle + + +# [START chronicle_v1_generated_DataTableService_UpdateDataTableRow_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import chronicle_v1 + + +async def sample_update_data_table_row(): + # Create a client + client = chronicle_v1.DataTableServiceAsyncClient() + + # Initialize request argument(s) + data_table_row = chronicle_v1.DataTableRow() + data_table_row.values = ["values_value1", "values_value2"] + + request = chronicle_v1.UpdateDataTableRowRequest( + data_table_row=data_table_row, + ) + + # Make the request + response = await client.update_data_table_row(request=request) + + # Handle the response + print(response) + + +# [END chronicle_v1_generated_DataTableService_UpdateDataTableRow_async] diff --git a/packages/google-cloud-chronicle/samples/generated_samples/chronicle_v1_generated_data_table_service_update_data_table_row_sync.py b/packages/google-cloud-chronicle/samples/generated_samples/chronicle_v1_generated_data_table_service_update_data_table_row_sync.py new file mode 100644 index 000000000000..f57826df0a80 --- /dev/null +++ b/packages/google-cloud-chronicle/samples/generated_samples/chronicle_v1_generated_data_table_service_update_data_table_row_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateDataTableRow +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-chronicle + + +# [START chronicle_v1_generated_DataTableService_UpdateDataTableRow_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import chronicle_v1 + + +def sample_update_data_table_row(): + # Create a client + client = chronicle_v1.DataTableServiceClient() + + # Initialize request argument(s) + data_table_row = chronicle_v1.DataTableRow() + data_table_row.values = ["values_value1", "values_value2"] + + request = chronicle_v1.UpdateDataTableRowRequest( + data_table_row=data_table_row, + ) + + # Make the request + response = client.update_data_table_row(request=request) + + # Handle the response + print(response) + + +# [END chronicle_v1_generated_DataTableService_UpdateDataTableRow_sync] diff --git a/packages/google-cloud-chronicle/samples/generated_samples/chronicle_v1_generated_data_table_service_update_data_table_sync.py b/packages/google-cloud-chronicle/samples/generated_samples/chronicle_v1_generated_data_table_service_update_data_table_sync.py new file mode 100644 index 000000000000..388e52bd07c3 --- /dev/null +++ b/packages/google-cloud-chronicle/samples/generated_samples/chronicle_v1_generated_data_table_service_update_data_table_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateDataTable +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-chronicle + + +# [START chronicle_v1_generated_DataTableService_UpdateDataTable_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import chronicle_v1 + + +def sample_update_data_table(): + # Create a client + client = chronicle_v1.DataTableServiceClient() + + # Initialize request argument(s) + data_table = chronicle_v1.DataTable() + data_table.description = "description_value" + + request = chronicle_v1.UpdateDataTableRequest( + data_table=data_table, + ) + + # Make the request + response = client.update_data_table(request=request) + + # Handle the response + print(response) + + +# [END chronicle_v1_generated_DataTableService_UpdateDataTable_sync] diff --git a/packages/google-cloud-chronicle/samples/generated_samples/snippet_metadata_google.cloud.chronicle.v1.json b/packages/google-cloud-chronicle/samples/generated_samples/snippet_metadata_google.cloud.chronicle.v1.json index cda1af98b51f..3a542c89ca78 100644 --- a/packages/google-cloud-chronicle/samples/generated_samples/snippet_metadata_google.cloud.chronicle.v1.json +++ b/packages/google-cloud-chronicle/samples/generated_samples/snippet_metadata_google.cloud.chronicle.v1.json @@ -1657,6 +1657,2489 @@ ], "title": "chronicle_v1_generated_data_access_control_service_update_data_access_scope_sync.py" }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.chronicle_v1.DataTableServiceAsyncClient", + "shortName": "DataTableServiceAsyncClient" + }, + "fullName": "google.cloud.chronicle_v1.DataTableServiceAsyncClient.bulk_create_data_table_rows", + "method": { + "fullName": "google.cloud.chronicle.v1.DataTableService.BulkCreateDataTableRows", + "service": { + "fullName": "google.cloud.chronicle.v1.DataTableService", + "shortName": "DataTableService" + }, + "shortName": "BulkCreateDataTableRows" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.chronicle_v1.types.BulkCreateDataTableRowsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "requests", + "type": "MutableSequence[google.cloud.chronicle_v1.types.CreateDataTableRowRequest]" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.chronicle_v1.types.BulkCreateDataTableRowsResponse", + "shortName": "bulk_create_data_table_rows" + }, + "description": "Sample for BulkCreateDataTableRows", + "file": "chronicle_v1_generated_data_table_service_bulk_create_data_table_rows_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "chronicle_v1_generated_DataTableService_BulkCreateDataTableRows_async", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 50, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 51, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "chronicle_v1_generated_data_table_service_bulk_create_data_table_rows_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.chronicle_v1.DataTableServiceClient", + "shortName": "DataTableServiceClient" + }, + "fullName": "google.cloud.chronicle_v1.DataTableServiceClient.bulk_create_data_table_rows", + "method": { + "fullName": "google.cloud.chronicle.v1.DataTableService.BulkCreateDataTableRows", + "service": { + "fullName": "google.cloud.chronicle.v1.DataTableService", + "shortName": "DataTableService" + }, + "shortName": "BulkCreateDataTableRows" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.chronicle_v1.types.BulkCreateDataTableRowsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "requests", + "type": "MutableSequence[google.cloud.chronicle_v1.types.CreateDataTableRowRequest]" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.chronicle_v1.types.BulkCreateDataTableRowsResponse", + "shortName": "bulk_create_data_table_rows" + }, + "description": "Sample for BulkCreateDataTableRows", + "file": "chronicle_v1_generated_data_table_service_bulk_create_data_table_rows_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "chronicle_v1_generated_DataTableService_BulkCreateDataTableRows_sync", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 50, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 51, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "chronicle_v1_generated_data_table_service_bulk_create_data_table_rows_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.chronicle_v1.DataTableServiceAsyncClient", + "shortName": "DataTableServiceAsyncClient" + }, + "fullName": "google.cloud.chronicle_v1.DataTableServiceAsyncClient.bulk_get_data_table_rows", + "method": { + "fullName": "google.cloud.chronicle.v1.DataTableService.BulkGetDataTableRows", + "service": { + "fullName": "google.cloud.chronicle.v1.DataTableService", + "shortName": "DataTableService" + }, + "shortName": "BulkGetDataTableRows" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.chronicle_v1.types.BulkGetDataTableRowsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "requests", + "type": "MutableSequence[google.cloud.chronicle_v1.types.GetDataTableRowRequest]" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.chronicle_v1.types.BulkGetDataTableRowsResponse", + "shortName": "bulk_get_data_table_rows" + }, + "description": "Sample for BulkGetDataTableRows", + "file": "chronicle_v1_generated_data_table_service_bulk_get_data_table_rows_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "chronicle_v1_generated_DataTableService_BulkGetDataTableRows_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 49, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 50, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "chronicle_v1_generated_data_table_service_bulk_get_data_table_rows_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.chronicle_v1.DataTableServiceClient", + "shortName": "DataTableServiceClient" + }, + "fullName": "google.cloud.chronicle_v1.DataTableServiceClient.bulk_get_data_table_rows", + "method": { + "fullName": "google.cloud.chronicle.v1.DataTableService.BulkGetDataTableRows", + "service": { + "fullName": "google.cloud.chronicle.v1.DataTableService", + "shortName": "DataTableService" + }, + "shortName": "BulkGetDataTableRows" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.chronicle_v1.types.BulkGetDataTableRowsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "requests", + "type": "MutableSequence[google.cloud.chronicle_v1.types.GetDataTableRowRequest]" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.chronicle_v1.types.BulkGetDataTableRowsResponse", + "shortName": "bulk_get_data_table_rows" + }, + "description": "Sample for BulkGetDataTableRows", + "file": "chronicle_v1_generated_data_table_service_bulk_get_data_table_rows_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "chronicle_v1_generated_DataTableService_BulkGetDataTableRows_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 49, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 50, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "chronicle_v1_generated_data_table_service_bulk_get_data_table_rows_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.chronicle_v1.DataTableServiceAsyncClient", + "shortName": "DataTableServiceAsyncClient" + }, + "fullName": "google.cloud.chronicle_v1.DataTableServiceAsyncClient.bulk_replace_data_table_rows", + "method": { + "fullName": "google.cloud.chronicle.v1.DataTableService.BulkReplaceDataTableRows", + "service": { + "fullName": "google.cloud.chronicle.v1.DataTableService", + "shortName": "DataTableService" + }, + "shortName": "BulkReplaceDataTableRows" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.chronicle_v1.types.BulkReplaceDataTableRowsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "requests", + "type": "MutableSequence[google.cloud.chronicle_v1.types.CreateDataTableRowRequest]" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.chronicle_v1.types.BulkReplaceDataTableRowsResponse", + "shortName": "bulk_replace_data_table_rows" + }, + "description": "Sample for BulkReplaceDataTableRows", + "file": "chronicle_v1_generated_data_table_service_bulk_replace_data_table_rows_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "chronicle_v1_generated_DataTableService_BulkReplaceDataTableRows_async", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 50, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 51, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "chronicle_v1_generated_data_table_service_bulk_replace_data_table_rows_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.chronicle_v1.DataTableServiceClient", + "shortName": "DataTableServiceClient" + }, + "fullName": "google.cloud.chronicle_v1.DataTableServiceClient.bulk_replace_data_table_rows", + "method": { + "fullName": "google.cloud.chronicle.v1.DataTableService.BulkReplaceDataTableRows", + "service": { + "fullName": "google.cloud.chronicle.v1.DataTableService", + "shortName": "DataTableService" + }, + "shortName": "BulkReplaceDataTableRows" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.chronicle_v1.types.BulkReplaceDataTableRowsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "requests", + "type": "MutableSequence[google.cloud.chronicle_v1.types.CreateDataTableRowRequest]" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.chronicle_v1.types.BulkReplaceDataTableRowsResponse", + "shortName": "bulk_replace_data_table_rows" + }, + "description": "Sample for BulkReplaceDataTableRows", + "file": "chronicle_v1_generated_data_table_service_bulk_replace_data_table_rows_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "chronicle_v1_generated_DataTableService_BulkReplaceDataTableRows_sync", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 50, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 51, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "chronicle_v1_generated_data_table_service_bulk_replace_data_table_rows_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.chronicle_v1.DataTableServiceAsyncClient", + "shortName": "DataTableServiceAsyncClient" + }, + "fullName": "google.cloud.chronicle_v1.DataTableServiceAsyncClient.bulk_update_data_table_rows", + "method": { + "fullName": "google.cloud.chronicle.v1.DataTableService.BulkUpdateDataTableRows", + "service": { + "fullName": "google.cloud.chronicle.v1.DataTableService", + "shortName": "DataTableService" + }, + "shortName": "BulkUpdateDataTableRows" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.chronicle_v1.types.BulkUpdateDataTableRowsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "requests", + "type": "MutableSequence[google.cloud.chronicle_v1.types.UpdateDataTableRowRequest]" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.chronicle_v1.types.BulkUpdateDataTableRowsResponse", + "shortName": "bulk_update_data_table_rows" + }, + "description": "Sample for BulkUpdateDataTableRows", + "file": "chronicle_v1_generated_data_table_service_bulk_update_data_table_rows_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "chronicle_v1_generated_DataTableService_BulkUpdateDataTableRows_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 49, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 50, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "chronicle_v1_generated_data_table_service_bulk_update_data_table_rows_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.chronicle_v1.DataTableServiceClient", + "shortName": "DataTableServiceClient" + }, + "fullName": "google.cloud.chronicle_v1.DataTableServiceClient.bulk_update_data_table_rows", + "method": { + "fullName": "google.cloud.chronicle.v1.DataTableService.BulkUpdateDataTableRows", + "service": { + "fullName": "google.cloud.chronicle.v1.DataTableService", + "shortName": "DataTableService" + }, + "shortName": "BulkUpdateDataTableRows" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.chronicle_v1.types.BulkUpdateDataTableRowsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "requests", + "type": "MutableSequence[google.cloud.chronicle_v1.types.UpdateDataTableRowRequest]" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.chronicle_v1.types.BulkUpdateDataTableRowsResponse", + "shortName": "bulk_update_data_table_rows" + }, + "description": "Sample for BulkUpdateDataTableRows", + "file": "chronicle_v1_generated_data_table_service_bulk_update_data_table_rows_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "chronicle_v1_generated_DataTableService_BulkUpdateDataTableRows_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 49, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 50, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "chronicle_v1_generated_data_table_service_bulk_update_data_table_rows_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.chronicle_v1.DataTableServiceAsyncClient", + "shortName": "DataTableServiceAsyncClient" + }, + "fullName": "google.cloud.chronicle_v1.DataTableServiceAsyncClient.create_data_table_row", + "method": { + "fullName": "google.cloud.chronicle.v1.DataTableService.CreateDataTableRow", + "service": { + "fullName": "google.cloud.chronicle.v1.DataTableService", + "shortName": "DataTableService" + }, + "shortName": "CreateDataTableRow" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.chronicle_v1.types.CreateDataTableRowRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "data_table_row", + "type": "google.cloud.chronicle_v1.types.DataTableRow" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.chronicle_v1.types.DataTableRow", + "shortName": "create_data_table_row" + }, + "description": "Sample for CreateDataTableRow", + "file": "chronicle_v1_generated_data_table_service_create_data_table_row_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "chronicle_v1_generated_DataTableService_CreateDataTableRow_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 49, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 50, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "chronicle_v1_generated_data_table_service_create_data_table_row_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.chronicle_v1.DataTableServiceClient", + "shortName": "DataTableServiceClient" + }, + "fullName": "google.cloud.chronicle_v1.DataTableServiceClient.create_data_table_row", + "method": { + "fullName": "google.cloud.chronicle.v1.DataTableService.CreateDataTableRow", + "service": { + "fullName": "google.cloud.chronicle.v1.DataTableService", + "shortName": "DataTableService" + }, + "shortName": "CreateDataTableRow" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.chronicle_v1.types.CreateDataTableRowRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "data_table_row", + "type": "google.cloud.chronicle_v1.types.DataTableRow" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.chronicle_v1.types.DataTableRow", + "shortName": "create_data_table_row" + }, + "description": "Sample for CreateDataTableRow", + "file": "chronicle_v1_generated_data_table_service_create_data_table_row_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "chronicle_v1_generated_DataTableService_CreateDataTableRow_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 49, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 50, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "chronicle_v1_generated_data_table_service_create_data_table_row_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.chronicle_v1.DataTableServiceAsyncClient", + "shortName": "DataTableServiceAsyncClient" + }, + "fullName": "google.cloud.chronicle_v1.DataTableServiceAsyncClient.create_data_table", + "method": { + "fullName": "google.cloud.chronicle.v1.DataTableService.CreateDataTable", + "service": { + "fullName": "google.cloud.chronicle.v1.DataTableService", + "shortName": "DataTableService" + }, + "shortName": "CreateDataTable" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.chronicle_v1.types.CreateDataTableRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "data_table", + "type": "google.cloud.chronicle_v1.types.DataTable" + }, + { + "name": "data_table_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.chronicle_v1.types.DataTable", + "shortName": "create_data_table" + }, + "description": "Sample for CreateDataTable", + "file": "chronicle_v1_generated_data_table_service_create_data_table_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "chronicle_v1_generated_DataTableService_CreateDataTable_async", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 50, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 51, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "chronicle_v1_generated_data_table_service_create_data_table_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.chronicle_v1.DataTableServiceClient", + "shortName": "DataTableServiceClient" + }, + "fullName": "google.cloud.chronicle_v1.DataTableServiceClient.create_data_table", + "method": { + "fullName": "google.cloud.chronicle.v1.DataTableService.CreateDataTable", + "service": { + "fullName": "google.cloud.chronicle.v1.DataTableService", + "shortName": "DataTableService" + }, + "shortName": "CreateDataTable" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.chronicle_v1.types.CreateDataTableRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "data_table", + "type": "google.cloud.chronicle_v1.types.DataTable" + }, + { + "name": "data_table_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.chronicle_v1.types.DataTable", + "shortName": "create_data_table" + }, + "description": "Sample for CreateDataTable", + "file": "chronicle_v1_generated_data_table_service_create_data_table_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "chronicle_v1_generated_DataTableService_CreateDataTable_sync", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 50, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 51, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "chronicle_v1_generated_data_table_service_create_data_table_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.chronicle_v1.DataTableServiceAsyncClient", + "shortName": "DataTableServiceAsyncClient" + }, + "fullName": "google.cloud.chronicle_v1.DataTableServiceAsyncClient.delete_data_table_row", + "method": { + "fullName": "google.cloud.chronicle.v1.DataTableService.DeleteDataTableRow", + "service": { + "fullName": "google.cloud.chronicle.v1.DataTableService", + "shortName": "DataTableService" + }, + "shortName": "DeleteDataTableRow" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.chronicle_v1.types.DeleteDataTableRowRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "shortName": "delete_data_table_row" + }, + "description": "Sample for DeleteDataTableRow", + "file": "chronicle_v1_generated_data_table_service_delete_data_table_row_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "chronicle_v1_generated_DataTableService_DeleteDataTableRow_async", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "chronicle_v1_generated_data_table_service_delete_data_table_row_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.chronicle_v1.DataTableServiceClient", + "shortName": "DataTableServiceClient" + }, + "fullName": "google.cloud.chronicle_v1.DataTableServiceClient.delete_data_table_row", + "method": { + "fullName": "google.cloud.chronicle.v1.DataTableService.DeleteDataTableRow", + "service": { + "fullName": "google.cloud.chronicle.v1.DataTableService", + "shortName": "DataTableService" + }, + "shortName": "DeleteDataTableRow" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.chronicle_v1.types.DeleteDataTableRowRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "shortName": "delete_data_table_row" + }, + "description": "Sample for DeleteDataTableRow", + "file": "chronicle_v1_generated_data_table_service_delete_data_table_row_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "chronicle_v1_generated_DataTableService_DeleteDataTableRow_sync", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "chronicle_v1_generated_data_table_service_delete_data_table_row_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.chronicle_v1.DataTableServiceAsyncClient", + "shortName": "DataTableServiceAsyncClient" + }, + "fullName": "google.cloud.chronicle_v1.DataTableServiceAsyncClient.delete_data_table", + "method": { + "fullName": "google.cloud.chronicle.v1.DataTableService.DeleteDataTable", + "service": { + "fullName": "google.cloud.chronicle.v1.DataTableService", + "shortName": "DataTableService" + }, + "shortName": "DeleteDataTable" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.chronicle_v1.types.DeleteDataTableRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "force", + "type": "bool" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "shortName": "delete_data_table" + }, + "description": "Sample for DeleteDataTable", + "file": "chronicle_v1_generated_data_table_service_delete_data_table_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "chronicle_v1_generated_DataTableService_DeleteDataTable_async", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "chronicle_v1_generated_data_table_service_delete_data_table_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.chronicle_v1.DataTableServiceClient", + "shortName": "DataTableServiceClient" + }, + "fullName": "google.cloud.chronicle_v1.DataTableServiceClient.delete_data_table", + "method": { + "fullName": "google.cloud.chronicle.v1.DataTableService.DeleteDataTable", + "service": { + "fullName": "google.cloud.chronicle.v1.DataTableService", + "shortName": "DataTableService" + }, + "shortName": "DeleteDataTable" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.chronicle_v1.types.DeleteDataTableRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "force", + "type": "bool" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "shortName": "delete_data_table" + }, + "description": "Sample for DeleteDataTable", + "file": "chronicle_v1_generated_data_table_service_delete_data_table_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "chronicle_v1_generated_DataTableService_DeleteDataTable_sync", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "chronicle_v1_generated_data_table_service_delete_data_table_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.chronicle_v1.DataTableServiceAsyncClient", + "shortName": "DataTableServiceAsyncClient" + }, + "fullName": "google.cloud.chronicle_v1.DataTableServiceAsyncClient.get_data_table_operation_errors", + "method": { + "fullName": "google.cloud.chronicle.v1.DataTableService.GetDataTableOperationErrors", + "service": { + "fullName": "google.cloud.chronicle.v1.DataTableService", + "shortName": "DataTableService" + }, + "shortName": "GetDataTableOperationErrors" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.chronicle_v1.types.GetDataTableOperationErrorsRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.chronicle_v1.types.DataTableOperationErrors", + "shortName": "get_data_table_operation_errors" + }, + "description": "Sample for GetDataTableOperationErrors", + "file": "chronicle_v1_generated_data_table_service_get_data_table_operation_errors_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "chronicle_v1_generated_DataTableService_GetDataTableOperationErrors_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "chronicle_v1_generated_data_table_service_get_data_table_operation_errors_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.chronicle_v1.DataTableServiceClient", + "shortName": "DataTableServiceClient" + }, + "fullName": "google.cloud.chronicle_v1.DataTableServiceClient.get_data_table_operation_errors", + "method": { + "fullName": "google.cloud.chronicle.v1.DataTableService.GetDataTableOperationErrors", + "service": { + "fullName": "google.cloud.chronicle.v1.DataTableService", + "shortName": "DataTableService" + }, + "shortName": "GetDataTableOperationErrors" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.chronicle_v1.types.GetDataTableOperationErrorsRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.chronicle_v1.types.DataTableOperationErrors", + "shortName": "get_data_table_operation_errors" + }, + "description": "Sample for GetDataTableOperationErrors", + "file": "chronicle_v1_generated_data_table_service_get_data_table_operation_errors_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "chronicle_v1_generated_DataTableService_GetDataTableOperationErrors_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "chronicle_v1_generated_data_table_service_get_data_table_operation_errors_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.chronicle_v1.DataTableServiceAsyncClient", + "shortName": "DataTableServiceAsyncClient" + }, + "fullName": "google.cloud.chronicle_v1.DataTableServiceAsyncClient.get_data_table_row", + "method": { + "fullName": "google.cloud.chronicle.v1.DataTableService.GetDataTableRow", + "service": { + "fullName": "google.cloud.chronicle.v1.DataTableService", + "shortName": "DataTableService" + }, + "shortName": "GetDataTableRow" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.chronicle_v1.types.GetDataTableRowRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.chronicle_v1.types.DataTableRow", + "shortName": "get_data_table_row" + }, + "description": "Sample for GetDataTableRow", + "file": "chronicle_v1_generated_data_table_service_get_data_table_row_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "chronicle_v1_generated_DataTableService_GetDataTableRow_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "chronicle_v1_generated_data_table_service_get_data_table_row_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.chronicle_v1.DataTableServiceClient", + "shortName": "DataTableServiceClient" + }, + "fullName": "google.cloud.chronicle_v1.DataTableServiceClient.get_data_table_row", + "method": { + "fullName": "google.cloud.chronicle.v1.DataTableService.GetDataTableRow", + "service": { + "fullName": "google.cloud.chronicle.v1.DataTableService", + "shortName": "DataTableService" + }, + "shortName": "GetDataTableRow" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.chronicle_v1.types.GetDataTableRowRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.chronicle_v1.types.DataTableRow", + "shortName": "get_data_table_row" + }, + "description": "Sample for GetDataTableRow", + "file": "chronicle_v1_generated_data_table_service_get_data_table_row_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "chronicle_v1_generated_DataTableService_GetDataTableRow_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "chronicle_v1_generated_data_table_service_get_data_table_row_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.chronicle_v1.DataTableServiceAsyncClient", + "shortName": "DataTableServiceAsyncClient" + }, + "fullName": "google.cloud.chronicle_v1.DataTableServiceAsyncClient.get_data_table", + "method": { + "fullName": "google.cloud.chronicle.v1.DataTableService.GetDataTable", + "service": { + "fullName": "google.cloud.chronicle.v1.DataTableService", + "shortName": "DataTableService" + }, + "shortName": "GetDataTable" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.chronicle_v1.types.GetDataTableRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.chronicle_v1.types.DataTable", + "shortName": "get_data_table" + }, + "description": "Sample for GetDataTable", + "file": "chronicle_v1_generated_data_table_service_get_data_table_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "chronicle_v1_generated_DataTableService_GetDataTable_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "chronicle_v1_generated_data_table_service_get_data_table_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.chronicle_v1.DataTableServiceClient", + "shortName": "DataTableServiceClient" + }, + "fullName": "google.cloud.chronicle_v1.DataTableServiceClient.get_data_table", + "method": { + "fullName": "google.cloud.chronicle.v1.DataTableService.GetDataTable", + "service": { + "fullName": "google.cloud.chronicle.v1.DataTableService", + "shortName": "DataTableService" + }, + "shortName": "GetDataTable" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.chronicle_v1.types.GetDataTableRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.chronicle_v1.types.DataTable", + "shortName": "get_data_table" + }, + "description": "Sample for GetDataTable", + "file": "chronicle_v1_generated_data_table_service_get_data_table_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "chronicle_v1_generated_DataTableService_GetDataTable_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "chronicle_v1_generated_data_table_service_get_data_table_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.chronicle_v1.DataTableServiceAsyncClient", + "shortName": "DataTableServiceAsyncClient" + }, + "fullName": "google.cloud.chronicle_v1.DataTableServiceAsyncClient.list_data_table_rows", + "method": { + "fullName": "google.cloud.chronicle.v1.DataTableService.ListDataTableRows", + "service": { + "fullName": "google.cloud.chronicle.v1.DataTableService", + "shortName": "DataTableService" + }, + "shortName": "ListDataTableRows" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.chronicle_v1.types.ListDataTableRowsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.chronicle_v1.services.data_table_service.pagers.ListDataTableRowsAsyncPager", + "shortName": "list_data_table_rows" + }, + "description": "Sample for ListDataTableRows", + "file": "chronicle_v1_generated_data_table_service_list_data_table_rows_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "chronicle_v1_generated_DataTableService_ListDataTableRows_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "chronicle_v1_generated_data_table_service_list_data_table_rows_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.chronicle_v1.DataTableServiceClient", + "shortName": "DataTableServiceClient" + }, + "fullName": "google.cloud.chronicle_v1.DataTableServiceClient.list_data_table_rows", + "method": { + "fullName": "google.cloud.chronicle.v1.DataTableService.ListDataTableRows", + "service": { + "fullName": "google.cloud.chronicle.v1.DataTableService", + "shortName": "DataTableService" + }, + "shortName": "ListDataTableRows" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.chronicle_v1.types.ListDataTableRowsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.chronicle_v1.services.data_table_service.pagers.ListDataTableRowsPager", + "shortName": "list_data_table_rows" + }, + "description": "Sample for ListDataTableRows", + "file": "chronicle_v1_generated_data_table_service_list_data_table_rows_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "chronicle_v1_generated_DataTableService_ListDataTableRows_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "chronicle_v1_generated_data_table_service_list_data_table_rows_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.chronicle_v1.DataTableServiceAsyncClient", + "shortName": "DataTableServiceAsyncClient" + }, + "fullName": "google.cloud.chronicle_v1.DataTableServiceAsyncClient.list_data_tables", + "method": { + "fullName": "google.cloud.chronicle.v1.DataTableService.ListDataTables", + "service": { + "fullName": "google.cloud.chronicle.v1.DataTableService", + "shortName": "DataTableService" + }, + "shortName": "ListDataTables" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.chronicle_v1.types.ListDataTablesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.chronicle_v1.services.data_table_service.pagers.ListDataTablesAsyncPager", + "shortName": "list_data_tables" + }, + "description": "Sample for ListDataTables", + "file": "chronicle_v1_generated_data_table_service_list_data_tables_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "chronicle_v1_generated_DataTableService_ListDataTables_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "chronicle_v1_generated_data_table_service_list_data_tables_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.chronicle_v1.DataTableServiceClient", + "shortName": "DataTableServiceClient" + }, + "fullName": "google.cloud.chronicle_v1.DataTableServiceClient.list_data_tables", + "method": { + "fullName": "google.cloud.chronicle.v1.DataTableService.ListDataTables", + "service": { + "fullName": "google.cloud.chronicle.v1.DataTableService", + "shortName": "DataTableService" + }, + "shortName": "ListDataTables" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.chronicle_v1.types.ListDataTablesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.chronicle_v1.services.data_table_service.pagers.ListDataTablesPager", + "shortName": "list_data_tables" + }, + "description": "Sample for ListDataTables", + "file": "chronicle_v1_generated_data_table_service_list_data_tables_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "chronicle_v1_generated_DataTableService_ListDataTables_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "chronicle_v1_generated_data_table_service_list_data_tables_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.chronicle_v1.DataTableServiceAsyncClient", + "shortName": "DataTableServiceAsyncClient" + }, + "fullName": "google.cloud.chronicle_v1.DataTableServiceAsyncClient.update_data_table_row", + "method": { + "fullName": "google.cloud.chronicle.v1.DataTableService.UpdateDataTableRow", + "service": { + "fullName": "google.cloud.chronicle.v1.DataTableService", + "shortName": "DataTableService" + }, + "shortName": "UpdateDataTableRow" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.chronicle_v1.types.UpdateDataTableRowRequest" + }, + { + "name": "data_table_row", + "type": "google.cloud.chronicle_v1.types.DataTableRow" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.chronicle_v1.types.DataTableRow", + "shortName": "update_data_table_row" + }, + "description": "Sample for UpdateDataTableRow", + "file": "chronicle_v1_generated_data_table_service_update_data_table_row_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "chronicle_v1_generated_DataTableService_UpdateDataTableRow_async", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 48, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 51, + "start": 49, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 52, + "type": "RESPONSE_HANDLING" + } + ], + "title": "chronicle_v1_generated_data_table_service_update_data_table_row_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.chronicle_v1.DataTableServiceClient", + "shortName": "DataTableServiceClient" + }, + "fullName": "google.cloud.chronicle_v1.DataTableServiceClient.update_data_table_row", + "method": { + "fullName": "google.cloud.chronicle.v1.DataTableService.UpdateDataTableRow", + "service": { + "fullName": "google.cloud.chronicle.v1.DataTableService", + "shortName": "DataTableService" + }, + "shortName": "UpdateDataTableRow" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.chronicle_v1.types.UpdateDataTableRowRequest" + }, + { + "name": "data_table_row", + "type": "google.cloud.chronicle_v1.types.DataTableRow" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.chronicle_v1.types.DataTableRow", + "shortName": "update_data_table_row" + }, + "description": "Sample for UpdateDataTableRow", + "file": "chronicle_v1_generated_data_table_service_update_data_table_row_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "chronicle_v1_generated_DataTableService_UpdateDataTableRow_sync", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 48, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 51, + "start": 49, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 52, + "type": "RESPONSE_HANDLING" + } + ], + "title": "chronicle_v1_generated_data_table_service_update_data_table_row_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.chronicle_v1.DataTableServiceAsyncClient", + "shortName": "DataTableServiceAsyncClient" + }, + "fullName": "google.cloud.chronicle_v1.DataTableServiceAsyncClient.update_data_table", + "method": { + "fullName": "google.cloud.chronicle.v1.DataTableService.UpdateDataTable", + "service": { + "fullName": "google.cloud.chronicle.v1.DataTableService", + "shortName": "DataTableService" + }, + "shortName": "UpdateDataTable" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.chronicle_v1.types.UpdateDataTableRequest" + }, + { + "name": "data_table", + "type": "google.cloud.chronicle_v1.types.DataTable" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.chronicle_v1.types.DataTable", + "shortName": "update_data_table" + }, + "description": "Sample for UpdateDataTable", + "file": "chronicle_v1_generated_data_table_service_update_data_table_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "chronicle_v1_generated_DataTableService_UpdateDataTable_async", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 48, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 51, + "start": 49, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 52, + "type": "RESPONSE_HANDLING" + } + ], + "title": "chronicle_v1_generated_data_table_service_update_data_table_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.chronicle_v1.DataTableServiceClient", + "shortName": "DataTableServiceClient" + }, + "fullName": "google.cloud.chronicle_v1.DataTableServiceClient.update_data_table", + "method": { + "fullName": "google.cloud.chronicle.v1.DataTableService.UpdateDataTable", + "service": { + "fullName": "google.cloud.chronicle.v1.DataTableService", + "shortName": "DataTableService" + }, + "shortName": "UpdateDataTable" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.chronicle_v1.types.UpdateDataTableRequest" + }, + { + "name": "data_table", + "type": "google.cloud.chronicle_v1.types.DataTable" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.chronicle_v1.types.DataTable", + "shortName": "update_data_table" + }, + "description": "Sample for UpdateDataTable", + "file": "chronicle_v1_generated_data_table_service_update_data_table_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "chronicle_v1_generated_DataTableService_UpdateDataTable_sync", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 48, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 51, + "start": 49, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 52, + "type": "RESPONSE_HANDLING" + } + ], + "title": "chronicle_v1_generated_data_table_service_update_data_table_sync.py" + }, { "canonical": true, "clientMethod": { diff --git a/packages/google-cloud-chronicle/tests/unit/gapic/chronicle_v1/test_data_table_service.py b/packages/google-cloud-chronicle/tests/unit/gapic/chronicle_v1/test_data_table_service.py new file mode 100644 index 000000000000..d7093a56d2f2 --- /dev/null +++ b/packages/google-cloud-chronicle/tests/unit/gapic/chronicle_v1/test_data_table_service.py @@ -0,0 +1,15517 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os +import re + +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +import json +import math +from collections.abc import AsyncIterable, Iterable, Mapping, Sequence + +import grpc +import pytest +from google.api_core import api_core_version +from google.protobuf import json_format +from grpc.experimental import aio +from proto.marshal.rules import wrappers +from proto.marshal.rules.dates import DurationRule, TimestampRule +from requests import PreparedRequest, Request, Response +from requests.sessions import Session + +try: + from google.auth.aio import credentials as ga_credentials_async + + HAS_GOOGLE_AUTH_AIO = True +except ImportError: # pragma: NO COVER + HAS_GOOGLE_AUTH_AIO = False + +import google.auth +import google.protobuf.field_mask_pb2 as field_mask_pb2 # type: ignore +import google.protobuf.timestamp_pb2 as timestamp_pb2 # type: ignore +import google.rpc.status_pb2 as status_pb2 # type: ignore +from google.api_core import ( + client_options, + gapic_v1, + grpc_helpers, + grpc_helpers_async, + path_template, +) +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.longrunning import operations_pb2 # type: ignore +from google.oauth2 import service_account + +from google.cloud.chronicle_v1.services.data_table_service import ( + DataTableServiceAsyncClient, + DataTableServiceClient, + pagers, + transports, +) +from google.cloud.chronicle_v1.types import data_table +from google.cloud.chronicle_v1.types import data_table as gcc_data_table + +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + + +async def mock_async_gen(data, chunk_size=1): + for i in range(0, len(data)): # pragma: NO COVER + chunk = data[i : i + chunk_size] + yield chunk.encode("utf-8") + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# TODO: use async auth anon credentials by default once the minimum version of google-auth is upgraded. +# See related issue: https://github.com/googleapis/gapic-generator-python/issues/2107. +def async_anonymous_credentials(): + if HAS_GOOGLE_AUTH_AIO: + return ga_credentials_async.AnonymousCredentials() + return ga_credentials.AnonymousCredentials() + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return ( + "foo.googleapis.com" + if ("localhost" in client.DEFAULT_ENDPOINT) + else client.DEFAULT_ENDPOINT + ) + + +# If default endpoint template is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint template so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint_template(client): + return ( + "test.{UNIVERSE_DOMAIN}" + if ("localhost" in client._DEFAULT_ENDPOINT_TEMPLATE) + else client._DEFAULT_ENDPOINT_TEMPLATE + ) + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + custom_endpoint = ".custom" + + assert DataTableServiceClient._get_default_mtls_endpoint(None) is None + assert ( + DataTableServiceClient._get_default_mtls_endpoint(api_endpoint) + == api_mtls_endpoint + ) + assert ( + DataTableServiceClient._get_default_mtls_endpoint(api_mtls_endpoint) + == api_mtls_endpoint + ) + assert ( + DataTableServiceClient._get_default_mtls_endpoint(sandbox_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + DataTableServiceClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + DataTableServiceClient._get_default_mtls_endpoint(non_googleapi) + == non_googleapi + ) + assert ( + DataTableServiceClient._get_default_mtls_endpoint(custom_endpoint) + == custom_endpoint + ) + + +def test__read_environment_variables(): + assert DataTableServiceClient._read_environment_variables() == (False, "auto", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + assert DataTableServiceClient._read_environment_variables() == ( + True, + "auto", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + assert DataTableServiceClient._read_environment_variables() == ( + False, + "auto", + None, + ) + + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + if not hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with pytest.raises(ValueError) as excinfo: + DataTableServiceClient._read_environment_variables() + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + else: + assert DataTableServiceClient._read_environment_variables() == ( + False, + "auto", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + assert DataTableServiceClient._read_environment_variables() == ( + False, + "never", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + assert DataTableServiceClient._read_environment_variables() == ( + False, + "always", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}): + assert DataTableServiceClient._read_environment_variables() == ( + False, + "auto", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + DataTableServiceClient._read_environment_variables() + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + with mock.patch.dict(os.environ, {"GOOGLE_CLOUD_UNIVERSE_DOMAIN": "foo.com"}): + assert DataTableServiceClient._read_environment_variables() == ( + False, + "auto", + "foo.com", + ) + + +def test_use_client_cert_effective(): + # Test case 1: Test when `should_use_client_cert` returns True. + # We mock the `should_use_client_cert` function to simulate a scenario where + # the google-auth library supports automatic mTLS and determines that a + # client certificate should be used. + if hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch( + "google.auth.transport.mtls.should_use_client_cert", return_value=True + ): + assert DataTableServiceClient._use_client_cert_effective() is True + + # Test case 2: Test when `should_use_client_cert` returns False. + # We mock the `should_use_client_cert` function to simulate a scenario where + # the google-auth library supports automatic mTLS and determines that a + # client certificate should NOT be used. + if hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch( + "google.auth.transport.mtls.should_use_client_cert", return_value=False + ): + assert DataTableServiceClient._use_client_cert_effective() is False + + # Test case 3: Test when `should_use_client_cert` is unavailable and the + # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is set to "true". + if not hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + assert DataTableServiceClient._use_client_cert_effective() is True + + # Test case 4: Test when `should_use_client_cert` is unavailable and the + # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is set to "false". + if not hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"} + ): + assert DataTableServiceClient._use_client_cert_effective() is False + + # Test case 5: Test when `should_use_client_cert` is unavailable and the + # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is set to "True". + if not hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "True"}): + assert DataTableServiceClient._use_client_cert_effective() is True + + # Test case 6: Test when `should_use_client_cert` is unavailable and the + # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is set to "False". + if not hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "False"} + ): + assert DataTableServiceClient._use_client_cert_effective() is False + + # Test case 7: Test when `should_use_client_cert` is unavailable and the + # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is set to "TRUE". + if not hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "TRUE"}): + assert DataTableServiceClient._use_client_cert_effective() is True + + # Test case 8: Test when `should_use_client_cert` is unavailable and the + # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is set to "FALSE". + if not hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "FALSE"} + ): + assert DataTableServiceClient._use_client_cert_effective() is False + + # Test case 9: Test when `should_use_client_cert` is unavailable and the + # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not set. + # In this case, the method should return False, which is the default value. + if not hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch.dict(os.environ, clear=True): + assert DataTableServiceClient._use_client_cert_effective() is False + + # Test case 10: Test when `should_use_client_cert` is unavailable and the + # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is set to an invalid value. + # The method should raise a ValueError as the environment variable must be either + # "true" or "false". + if not hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "unsupported"} + ): + with pytest.raises(ValueError): + DataTableServiceClient._use_client_cert_effective() + + # Test case 11: Test when `should_use_client_cert` is available and the + # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is set to an invalid value. + # The method should return False as the environment variable is set to an invalid value. + if hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "unsupported"} + ): + assert DataTableServiceClient._use_client_cert_effective() is False + + # Test case 12: Test when `should_use_client_cert` is available and the + # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is unset. Also, + # the GOOGLE_API_CONFIG environment variable is unset. + if hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": ""}): + with mock.patch.dict(os.environ, {"GOOGLE_API_CERTIFICATE_CONFIG": ""}): + assert DataTableServiceClient._use_client_cert_effective() is False + + +def test__get_client_cert_source(): + mock_provided_cert_source = mock.Mock() + mock_default_cert_source = mock.Mock() + + assert DataTableServiceClient._get_client_cert_source(None, False) is None + assert ( + DataTableServiceClient._get_client_cert_source(mock_provided_cert_source, False) + is None + ) + assert ( + DataTableServiceClient._get_client_cert_source(mock_provided_cert_source, True) + == mock_provided_cert_source + ) + + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", return_value=True + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_default_cert_source, + ): + assert ( + DataTableServiceClient._get_client_cert_source(None, True) + is mock_default_cert_source + ) + assert ( + DataTableServiceClient._get_client_cert_source( + mock_provided_cert_source, "true" + ) + is mock_provided_cert_source + ) + + +@mock.patch.object( + DataTableServiceClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(DataTableServiceClient), +) +@mock.patch.object( + DataTableServiceAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(DataTableServiceAsyncClient), +) +def test__get_api_endpoint(): + api_override = "foo.com" + mock_client_cert_source = mock.Mock() + default_universe = DataTableServiceClient._DEFAULT_UNIVERSE + default_endpoint = DataTableServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=default_universe + ) + mock_universe = "bar.com" + mock_endpoint = DataTableServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=mock_universe + ) + + assert ( + DataTableServiceClient._get_api_endpoint( + api_override, mock_client_cert_source, default_universe, "always" + ) + == api_override + ) + assert ( + DataTableServiceClient._get_api_endpoint( + None, mock_client_cert_source, default_universe, "auto" + ) + == DataTableServiceClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + DataTableServiceClient._get_api_endpoint(None, None, default_universe, "auto") + == default_endpoint + ) + assert ( + DataTableServiceClient._get_api_endpoint(None, None, default_universe, "always") + == DataTableServiceClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + DataTableServiceClient._get_api_endpoint( + None, mock_client_cert_source, default_universe, "always" + ) + == DataTableServiceClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + DataTableServiceClient._get_api_endpoint(None, None, mock_universe, "never") + == mock_endpoint + ) + assert ( + DataTableServiceClient._get_api_endpoint(None, None, default_universe, "never") + == default_endpoint + ) + + with pytest.raises(MutualTLSChannelError) as excinfo: + DataTableServiceClient._get_api_endpoint( + None, mock_client_cert_source, mock_universe, "auto" + ) + assert ( + str(excinfo.value) + == "mTLS is not supported in any universe other than googleapis.com." + ) + + +def test__get_universe_domain(): + client_universe_domain = "foo.com" + universe_domain_env = "bar.com" + + assert ( + DataTableServiceClient._get_universe_domain( + client_universe_domain, universe_domain_env + ) + == client_universe_domain + ) + assert ( + DataTableServiceClient._get_universe_domain(None, universe_domain_env) + == universe_domain_env + ) + assert ( + DataTableServiceClient._get_universe_domain(None, None) + == DataTableServiceClient._DEFAULT_UNIVERSE + ) + + with pytest.raises(ValueError) as excinfo: + DataTableServiceClient._get_universe_domain("", None) + assert str(excinfo.value) == "Universe Domain cannot be an empty string." + + +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = DataTableServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = DataTableServiceClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (DataTableServiceClient, "grpc"), + (DataTableServiceAsyncClient, "grpc_asyncio"), + (DataTableServiceClient, "rest"), + ], +) +def test_data_table_service_client_from_service_account_info( + client_class, transport_name +): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_info" + ) as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "chronicle.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://chronicle.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_class,transport_name", + [ + (transports.DataTableServiceGrpcTransport, "grpc"), + (transports.DataTableServiceGrpcAsyncIOTransport, "grpc_asyncio"), + (transports.DataTableServiceRestTransport, "rest"), + ], +) +def test_data_table_service_client_service_account_always_use_jwt( + transport_class, transport_name +): + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (DataTableServiceClient, "grpc"), + (DataTableServiceAsyncClient, "grpc_asyncio"), + (DataTableServiceClient, "rest"), + ], +) +def test_data_table_service_client_from_service_account_file( + client_class, transport_name +): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_file" + ) as factory: + factory.return_value = creds + client = client_class.from_service_account_file( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "chronicle.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://chronicle.googleapis.com" + ) + + +def test_data_table_service_client_get_transport_class(): + transport = DataTableServiceClient.get_transport_class() + available_transports = [ + transports.DataTableServiceGrpcTransport, + transports.DataTableServiceRestTransport, + ] + assert transport in available_transports + + transport = DataTableServiceClient.get_transport_class("grpc") + assert transport == transports.DataTableServiceGrpcTransport + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (DataTableServiceClient, transports.DataTableServiceGrpcTransport, "grpc"), + ( + DataTableServiceAsyncClient, + transports.DataTableServiceGrpcAsyncIOTransport, + "grpc_asyncio", + ), + (DataTableServiceClient, transports.DataTableServiceRestTransport, "rest"), + ], +) +@mock.patch.object( + DataTableServiceClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(DataTableServiceClient), +) +@mock.patch.object( + DataTableServiceAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(DataTableServiceAsyncClient), +) +def test_data_table_service_client_client_options( + client_class, transport_class, transport_name +): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(DataTableServiceClient, "get_transport_class") as gtc: + transport = transport_class(credentials=ga_credentials.AnonymousCredentials()) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(DataTableServiceClient, "get_transport_class") as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + client = client_class(transport=transport_name) + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions( + api_audience="https://language.googleapis.com" + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com", + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,use_client_cert_env", + [ + ( + DataTableServiceClient, + transports.DataTableServiceGrpcTransport, + "grpc", + "true", + ), + ( + DataTableServiceAsyncClient, + transports.DataTableServiceGrpcAsyncIOTransport, + "grpc_asyncio", + "true", + ), + ( + DataTableServiceClient, + transports.DataTableServiceGrpcTransport, + "grpc", + "false", + ), + ( + DataTableServiceAsyncClient, + transports.DataTableServiceGrpcAsyncIOTransport, + "grpc_asyncio", + "false", + ), + ( + DataTableServiceClient, + transports.DataTableServiceRestTransport, + "rest", + "true", + ), + ( + DataTableServiceClient, + transports.DataTableServiceRestTransport, + "rest", + "false", + ), + ], +) +@mock.patch.object( + DataTableServiceClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(DataTableServiceClient), +) +@mock.patch.object( + DataTableServiceAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(DataTableServiceAsyncClient), +) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_data_table_service_client_mtls_env_auto( + client_class, transport_class, transport_name, use_client_cert_env +): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + options = client_options.ClientOptions( + client_cert_source=client_cert_source_callback + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ) + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=client_cert_source_callback, + ): + if use_client_cert_env == "false": + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ) + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class", [DataTableServiceClient, DataTableServiceAsyncClient] +) +@mock.patch.object( + DataTableServiceClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(DataTableServiceClient), +) +@mock.patch.object( + DataTableServiceAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(DataTableServiceAsyncClient), +) +def test_data_table_service_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "Unsupported". + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + if hasattr(google.auth.transport.mtls, "should_use_client_cert"): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, + api_endpoint=mock_api_endpoint, + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test cases for mTLS enablement when GOOGLE_API_USE_CLIENT_CERTIFICATE is unset. + test_cases = [ + ( + # With workloads present in config, mTLS is enabled. + { + "version": 1, + "cert_configs": { + "workload": { + "cert_path": "path/to/cert/file", + "key_path": "path/to/key/file", + } + }, + }, + mock_client_cert_source, + ), + ( + # With workloads not present in config, mTLS is disabled. + { + "version": 1, + "cert_configs": {}, + }, + None, + ), + ] + if hasattr(google.auth.transport.mtls, "should_use_client_cert"): + for config_data, expected_cert_source in test_cases: + env = os.environ.copy() + env.pop("GOOGLE_API_USE_CLIENT_CERTIFICATE", None) + with mock.patch.dict(os.environ, env, clear=True): + config_filename = "mock_certificate_config.json" + config_file_content = json.dumps(config_data) + m = mock.mock_open(read_data=config_file_content) + with mock.patch("builtins.open", m): + with mock.patch.dict( + os.environ, {"GOOGLE_API_CERTIFICATE_CONFIG": config_filename} + ): + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, + api_endpoint=mock_api_endpoint, + ) + api_endpoint, cert_source = ( + client_class.get_mtls_endpoint_and_cert_source(options) + ) + assert api_endpoint == mock_api_endpoint + assert cert_source is expected_cert_source + + # Test cases for mTLS enablement when GOOGLE_API_USE_CLIENT_CERTIFICATE is unset(empty). + test_cases = [ + ( + # With workloads present in config, mTLS is enabled. + { + "version": 1, + "cert_configs": { + "workload": { + "cert_path": "path/to/cert/file", + "key_path": "path/to/key/file", + } + }, + }, + mock_client_cert_source, + ), + ( + # With workloads not present in config, mTLS is disabled. + { + "version": 1, + "cert_configs": {}, + }, + None, + ), + ] + if hasattr(google.auth.transport.mtls, "should_use_client_cert"): + for config_data, expected_cert_source in test_cases: + env = os.environ.copy() + env.pop("GOOGLE_API_USE_CLIENT_CERTIFICATE", "") + with mock.patch.dict(os.environ, env, clear=True): + config_filename = "mock_certificate_config.json" + config_file_content = json.dumps(config_data) + m = mock.mock_open(read_data=config_file_content) + with mock.patch("builtins.open", m): + with mock.patch.dict( + os.environ, {"GOOGLE_API_CERTIFICATE_CONFIG": config_filename} + ): + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, + api_endpoint=mock_api_endpoint, + ) + api_endpoint, cert_source = ( + client_class.get_mtls_endpoint_and_cert_source(options) + ) + assert api_endpoint == mock_api_endpoint + assert cert_source is expected_cert_source + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_client_cert_source, + ): + api_endpoint, cert_source = ( + client_class.get_mtls_endpoint_and_cert_source() + ) + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + +@pytest.mark.parametrize( + "client_class", [DataTableServiceClient, DataTableServiceAsyncClient] +) +@mock.patch.object( + DataTableServiceClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(DataTableServiceClient), +) +@mock.patch.object( + DataTableServiceAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(DataTableServiceAsyncClient), +) +def test_data_table_service_client_client_api_endpoint(client_class): + mock_client_cert_source = client_cert_source_callback + api_override = "foo.com" + default_universe = DataTableServiceClient._DEFAULT_UNIVERSE + default_endpoint = DataTableServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=default_universe + ) + mock_universe = "bar.com" + mock_endpoint = DataTableServiceClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=mock_universe + ) + + # If ClientOptions.api_endpoint is set and GOOGLE_API_USE_CLIENT_CERTIFICATE="true", + # use ClientOptions.api_endpoint as the api endpoint regardless. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ): + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=api_override + ) + client = client_class( + client_options=options, + credentials=ga_credentials.AnonymousCredentials(), + ) + assert client.api_endpoint == api_override + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == default_endpoint + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="always", + # use the DEFAULT_MTLS_ENDPOINT as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + + # If ClientOptions.api_endpoint is not set, GOOGLE_API_USE_MTLS_ENDPOINT="auto" (default), + # GOOGLE_API_USE_CLIENT_CERTIFICATE="false" (default), default cert source doesn't exist, + # and ClientOptions.universe_domain="bar.com", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with universe domain as the api endpoint. + options = client_options.ClientOptions() + universe_exists = hasattr(options, "universe_domain") + if universe_exists: + options = client_options.ClientOptions(universe_domain=mock_universe) + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + else: + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + assert client.api_endpoint == ( + mock_endpoint if universe_exists else default_endpoint + ) + assert client.universe_domain == ( + mock_universe if universe_exists else default_universe + ) + + # If ClientOptions does not have a universe domain attribute and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + options = client_options.ClientOptions() + if hasattr(options, "universe_domain"): + delattr(options, "universe_domain") + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + assert client.api_endpoint == default_endpoint + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (DataTableServiceClient, transports.DataTableServiceGrpcTransport, "grpc"), + ( + DataTableServiceAsyncClient, + transports.DataTableServiceGrpcAsyncIOTransport, + "grpc_asyncio", + ), + (DataTableServiceClient, transports.DataTableServiceRestTransport, "rest"), + ], +) +def test_data_table_service_client_client_options_scopes( + client_class, transport_class, transport_name +): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + DataTableServiceClient, + transports.DataTableServiceGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + DataTableServiceAsyncClient, + transports.DataTableServiceGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + ( + DataTableServiceClient, + transports.DataTableServiceRestTransport, + "rest", + None, + ), + ], +) +def test_data_table_service_client_client_options_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +def test_data_table_service_client_client_options_from_dict(): + with mock.patch( + "google.cloud.chronicle_v1.services.data_table_service.transports.DataTableServiceGrpcTransport.__init__" + ) as grpc_transport: + grpc_transport.return_value = None + client = DataTableServiceClient( + client_options={"api_endpoint": "squid.clam.whelk"} + ) + grpc_transport.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + DataTableServiceClient, + transports.DataTableServiceGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + DataTableServiceAsyncClient, + transports.DataTableServiceGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + ], +) +def test_data_table_service_client_create_channel_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # test that the credentials from file are saved and used as the credentials. + with ( + mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, + mock.patch.object(google.auth, "default", autospec=True) as adc, + mock.patch.object(grpc_helpers, "create_channel") as create_channel, + ): + creds = ga_credentials.AnonymousCredentials() + file_creds = ga_credentials.AnonymousCredentials() + load_creds.return_value = (file_creds, None) + adc.return_value = (creds, None) + client = client_class(client_options=options, transport=transport_name) + create_channel.assert_called_with( + "chronicle.googleapis.com:443", + credentials=file_creds, + credentials_file=None, + quota_project_id=None, + default_scopes=( + "https://www.googleapis.com/auth/chronicle", + "https://www.googleapis.com/auth/chronicle.readonly", + "https://www.googleapis.com/auth/cloud-platform", + ), + scopes=None, + default_host="chronicle.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "request_type", + [ + gcc_data_table.CreateDataTableRequest, + dict, + ], +) +def test_create_data_table(request_type, transport: str = "grpc"): + client = DataTableServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_data_table), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = gcc_data_table.DataTable( + name="name_value", + display_name="display_name_value", + description="description_value", + data_table_uuid="data_table_uuid_value", + rules=["rules_value"], + rule_associations_count=2479, + row_time_to_live="row_time_to_live_value", + approximate_row_count=2281, + update_source=gcc_data_table.DataTableUpdateSource.USER, + ) + response = client.create_data_table(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = gcc_data_table.CreateDataTableRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, gcc_data_table.DataTable) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.description == "description_value" + assert response.data_table_uuid == "data_table_uuid_value" + assert response.rules == ["rules_value"] + assert response.rule_associations_count == 2479 + assert response.row_time_to_live == "row_time_to_live_value" + assert response.approximate_row_count == 2281 + assert response.update_source == gcc_data_table.DataTableUpdateSource.USER + + +def test_create_data_table_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DataTableServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = gcc_data_table.CreateDataTableRequest( + parent="parent_value", + data_table_id="data_table_id_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_data_table), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.create_data_table(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == gcc_data_table.CreateDataTableRequest( + parent="parent_value", + data_table_id="data_table_id_value", + ) + + +def test_create_data_table_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DataTableServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.create_data_table in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.create_data_table] = ( + mock_rpc + ) + request = {} + client.create_data_table(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.create_data_table(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_create_data_table_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DataTableServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.create_data_table + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.create_data_table + ] = mock_rpc + + request = {} + await client.create_data_table(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.create_data_table(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_create_data_table_async( + transport: str = "grpc_asyncio", request_type=gcc_data_table.CreateDataTableRequest +): + client = DataTableServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_data_table), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + gcc_data_table.DataTable( + name="name_value", + display_name="display_name_value", + description="description_value", + data_table_uuid="data_table_uuid_value", + rules=["rules_value"], + rule_associations_count=2479, + row_time_to_live="row_time_to_live_value", + approximate_row_count=2281, + update_source=gcc_data_table.DataTableUpdateSource.USER, + ) + ) + response = await client.create_data_table(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = gcc_data_table.CreateDataTableRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, gcc_data_table.DataTable) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.description == "description_value" + assert response.data_table_uuid == "data_table_uuid_value" + assert response.rules == ["rules_value"] + assert response.rule_associations_count == 2479 + assert response.row_time_to_live == "row_time_to_live_value" + assert response.approximate_row_count == 2281 + assert response.update_source == gcc_data_table.DataTableUpdateSource.USER + + +@pytest.mark.asyncio +async def test_create_data_table_async_from_dict(): + await test_create_data_table_async(request_type=dict) + + +def test_create_data_table_field_headers(): + client = DataTableServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = gcc_data_table.CreateDataTableRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_data_table), "__call__" + ) as call: + call.return_value = gcc_data_table.DataTable() + client.create_data_table(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_create_data_table_field_headers_async(): + client = DataTableServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = gcc_data_table.CreateDataTableRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_data_table), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + gcc_data_table.DataTable() + ) + await client.create_data_table(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_create_data_table_flattened(): + client = DataTableServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_data_table), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = gcc_data_table.DataTable() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_data_table( + parent="parent_value", + data_table=gcc_data_table.DataTable(name="name_value"), + data_table_id="data_table_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].data_table + mock_val = gcc_data_table.DataTable(name="name_value") + assert arg == mock_val + arg = args[0].data_table_id + mock_val = "data_table_id_value" + assert arg == mock_val + + +def test_create_data_table_flattened_error(): + client = DataTableServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_data_table( + gcc_data_table.CreateDataTableRequest(), + parent="parent_value", + data_table=gcc_data_table.DataTable(name="name_value"), + data_table_id="data_table_id_value", + ) + + +@pytest.mark.asyncio +async def test_create_data_table_flattened_async(): + client = DataTableServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_data_table), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = gcc_data_table.DataTable() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + gcc_data_table.DataTable() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_data_table( + parent="parent_value", + data_table=gcc_data_table.DataTable(name="name_value"), + data_table_id="data_table_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].data_table + mock_val = gcc_data_table.DataTable(name="name_value") + assert arg == mock_val + arg = args[0].data_table_id + mock_val = "data_table_id_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_create_data_table_flattened_error_async(): + client = DataTableServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_data_table( + gcc_data_table.CreateDataTableRequest(), + parent="parent_value", + data_table=gcc_data_table.DataTable(name="name_value"), + data_table_id="data_table_id_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + data_table.ListDataTablesRequest, + dict, + ], +) +def test_list_data_tables(request_type, transport: str = "grpc"): + client = DataTableServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_data_tables), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = data_table.ListDataTablesResponse( + next_page_token="next_page_token_value", + ) + response = client.list_data_tables(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = data_table.ListDataTablesRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListDataTablesPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_data_tables_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DataTableServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = data_table.ListDataTablesRequest( + parent="parent_value", + page_token="page_token_value", + order_by="order_by_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_data_tables), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.list_data_tables(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == data_table.ListDataTablesRequest( + parent="parent_value", + page_token="page_token_value", + order_by="order_by_value", + ) + + +def test_list_data_tables_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DataTableServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_data_tables in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.list_data_tables] = ( + mock_rpc + ) + request = {} + client.list_data_tables(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_data_tables(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_list_data_tables_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DataTableServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.list_data_tables + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.list_data_tables + ] = mock_rpc + + request = {} + await client.list_data_tables(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.list_data_tables(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_list_data_tables_async( + transport: str = "grpc_asyncio", request_type=data_table.ListDataTablesRequest +): + client = DataTableServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_data_tables), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + data_table.ListDataTablesResponse( + next_page_token="next_page_token_value", + ) + ) + response = await client.list_data_tables(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = data_table.ListDataTablesRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListDataTablesAsyncPager) + assert response.next_page_token == "next_page_token_value" + + +@pytest.mark.asyncio +async def test_list_data_tables_async_from_dict(): + await test_list_data_tables_async(request_type=dict) + + +def test_list_data_tables_field_headers(): + client = DataTableServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = data_table.ListDataTablesRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_data_tables), "__call__") as call: + call.return_value = data_table.ListDataTablesResponse() + client.list_data_tables(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_data_tables_field_headers_async(): + client = DataTableServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = data_table.ListDataTablesRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_data_tables), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + data_table.ListDataTablesResponse() + ) + await client.list_data_tables(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_list_data_tables_flattened(): + client = DataTableServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_data_tables), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = data_table.ListDataTablesResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_data_tables( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +def test_list_data_tables_flattened_error(): + client = DataTableServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_data_tables( + data_table.ListDataTablesRequest(), + parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_list_data_tables_flattened_async(): + client = DataTableServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_data_tables), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = data_table.ListDataTablesResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + data_table.ListDataTablesResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_data_tables( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_list_data_tables_flattened_error_async(): + client = DataTableServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_data_tables( + data_table.ListDataTablesRequest(), + parent="parent_value", + ) + + +def test_list_data_tables_pager(transport_name: str = "grpc"): + client = DataTableServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_data_tables), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + data_table.ListDataTablesResponse( + data_tables=[ + data_table.DataTable(), + data_table.DataTable(), + data_table.DataTable(), + ], + next_page_token="abc", + ), + data_table.ListDataTablesResponse( + data_tables=[], + next_page_token="def", + ), + data_table.ListDataTablesResponse( + data_tables=[ + data_table.DataTable(), + ], + next_page_token="ghi", + ), + data_table.ListDataTablesResponse( + data_tables=[ + data_table.DataTable(), + data_table.DataTable(), + ], + ), + RuntimeError, + ) + + expected_metadata = () + retry = retries.Retry() + timeout = 5 + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_data_tables(request={}, retry=retry, timeout=timeout) + + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, data_table.DataTable) for i in results) + + +def test_list_data_tables_pages(transport_name: str = "grpc"): + client = DataTableServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_data_tables), "__call__") as call: + # Set the response to a series of pages. + call.side_effect = ( + data_table.ListDataTablesResponse( + data_tables=[ + data_table.DataTable(), + data_table.DataTable(), + data_table.DataTable(), + ], + next_page_token="abc", + ), + data_table.ListDataTablesResponse( + data_tables=[], + next_page_token="def", + ), + data_table.ListDataTablesResponse( + data_tables=[ + data_table.DataTable(), + ], + next_page_token="ghi", + ), + data_table.ListDataTablesResponse( + data_tables=[ + data_table.DataTable(), + data_table.DataTable(), + ], + ), + RuntimeError, + ) + pages = list(client.list_data_tables(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_data_tables_async_pager(): + client = DataTableServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_data_tables), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + data_table.ListDataTablesResponse( + data_tables=[ + data_table.DataTable(), + data_table.DataTable(), + data_table.DataTable(), + ], + next_page_token="abc", + ), + data_table.ListDataTablesResponse( + data_tables=[], + next_page_token="def", + ), + data_table.ListDataTablesResponse( + data_tables=[ + data_table.DataTable(), + ], + next_page_token="ghi", + ), + data_table.ListDataTablesResponse( + data_tables=[ + data_table.DataTable(), + data_table.DataTable(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_data_tables( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, data_table.DataTable) for i in responses) + + +@pytest.mark.asyncio +async def test_list_data_tables_async_pages(): + client = DataTableServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_data_tables), "__call__", new_callable=mock.AsyncMock + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + data_table.ListDataTablesResponse( + data_tables=[ + data_table.DataTable(), + data_table.DataTable(), + data_table.DataTable(), + ], + next_page_token="abc", + ), + data_table.ListDataTablesResponse( + data_tables=[], + next_page_token="def", + ), + data_table.ListDataTablesResponse( + data_tables=[ + data_table.DataTable(), + ], + next_page_token="ghi", + ), + data_table.ListDataTablesResponse( + data_tables=[ + data_table.DataTable(), + data_table.DataTable(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_data_tables(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + data_table.GetDataTableRequest, + dict, + ], +) +def test_get_data_table(request_type, transport: str = "grpc"): + client = DataTableServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_data_table), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = data_table.DataTable( + name="name_value", + display_name="display_name_value", + description="description_value", + data_table_uuid="data_table_uuid_value", + rules=["rules_value"], + rule_associations_count=2479, + row_time_to_live="row_time_to_live_value", + approximate_row_count=2281, + update_source=data_table.DataTableUpdateSource.USER, + ) + response = client.get_data_table(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = data_table.GetDataTableRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, data_table.DataTable) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.description == "description_value" + assert response.data_table_uuid == "data_table_uuid_value" + assert response.rules == ["rules_value"] + assert response.rule_associations_count == 2479 + assert response.row_time_to_live == "row_time_to_live_value" + assert response.approximate_row_count == 2281 + assert response.update_source == data_table.DataTableUpdateSource.USER + + +def test_get_data_table_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DataTableServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = data_table.GetDataTableRequest( + name="name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_data_table), "__call__") as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.get_data_table(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == data_table.GetDataTableRequest( + name="name_value", + ) + + +def test_get_data_table_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DataTableServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_data_table in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get_data_table] = mock_rpc + request = {} + client.get_data_table(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_data_table(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_get_data_table_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DataTableServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.get_data_table + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.get_data_table + ] = mock_rpc + + request = {} + await client.get_data_table(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.get_data_table(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_get_data_table_async( + transport: str = "grpc_asyncio", request_type=data_table.GetDataTableRequest +): + client = DataTableServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_data_table), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + data_table.DataTable( + name="name_value", + display_name="display_name_value", + description="description_value", + data_table_uuid="data_table_uuid_value", + rules=["rules_value"], + rule_associations_count=2479, + row_time_to_live="row_time_to_live_value", + approximate_row_count=2281, + update_source=data_table.DataTableUpdateSource.USER, + ) + ) + response = await client.get_data_table(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = data_table.GetDataTableRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, data_table.DataTable) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.description == "description_value" + assert response.data_table_uuid == "data_table_uuid_value" + assert response.rules == ["rules_value"] + assert response.rule_associations_count == 2479 + assert response.row_time_to_live == "row_time_to_live_value" + assert response.approximate_row_count == 2281 + assert response.update_source == data_table.DataTableUpdateSource.USER + + +@pytest.mark.asyncio +async def test_get_data_table_async_from_dict(): + await test_get_data_table_async(request_type=dict) + + +def test_get_data_table_field_headers(): + client = DataTableServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = data_table.GetDataTableRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_data_table), "__call__") as call: + call.return_value = data_table.DataTable() + client.get_data_table(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_data_table_field_headers_async(): + client = DataTableServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = data_table.GetDataTableRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_data_table), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + data_table.DataTable() + ) + await client.get_data_table(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_get_data_table_flattened(): + client = DataTableServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_data_table), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = data_table.DataTable() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_data_table( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_get_data_table_flattened_error(): + client = DataTableServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_data_table( + data_table.GetDataTableRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_data_table_flattened_async(): + client = DataTableServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_data_table), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = data_table.DataTable() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + data_table.DataTable() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_data_table( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_data_table_flattened_error_async(): + client = DataTableServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_data_table( + data_table.GetDataTableRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + gcc_data_table.UpdateDataTableRequest, + dict, + ], +) +def test_update_data_table(request_type, transport: str = "grpc"): + client = DataTableServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_data_table), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = gcc_data_table.DataTable( + name="name_value", + display_name="display_name_value", + description="description_value", + data_table_uuid="data_table_uuid_value", + rules=["rules_value"], + rule_associations_count=2479, + row_time_to_live="row_time_to_live_value", + approximate_row_count=2281, + update_source=gcc_data_table.DataTableUpdateSource.USER, + ) + response = client.update_data_table(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = gcc_data_table.UpdateDataTableRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, gcc_data_table.DataTable) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.description == "description_value" + assert response.data_table_uuid == "data_table_uuid_value" + assert response.rules == ["rules_value"] + assert response.rule_associations_count == 2479 + assert response.row_time_to_live == "row_time_to_live_value" + assert response.approximate_row_count == 2281 + assert response.update_source == gcc_data_table.DataTableUpdateSource.USER + + +def test_update_data_table_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DataTableServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = gcc_data_table.UpdateDataTableRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_data_table), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.update_data_table(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == gcc_data_table.UpdateDataTableRequest() + + +def test_update_data_table_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DataTableServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.update_data_table in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.update_data_table] = ( + mock_rpc + ) + request = {} + client.update_data_table(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.update_data_table(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_update_data_table_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DataTableServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.update_data_table + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.update_data_table + ] = mock_rpc + + request = {} + await client.update_data_table(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.update_data_table(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_update_data_table_async( + transport: str = "grpc_asyncio", request_type=gcc_data_table.UpdateDataTableRequest +): + client = DataTableServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_data_table), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + gcc_data_table.DataTable( + name="name_value", + display_name="display_name_value", + description="description_value", + data_table_uuid="data_table_uuid_value", + rules=["rules_value"], + rule_associations_count=2479, + row_time_to_live="row_time_to_live_value", + approximate_row_count=2281, + update_source=gcc_data_table.DataTableUpdateSource.USER, + ) + ) + response = await client.update_data_table(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = gcc_data_table.UpdateDataTableRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, gcc_data_table.DataTable) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.description == "description_value" + assert response.data_table_uuid == "data_table_uuid_value" + assert response.rules == ["rules_value"] + assert response.rule_associations_count == 2479 + assert response.row_time_to_live == "row_time_to_live_value" + assert response.approximate_row_count == 2281 + assert response.update_source == gcc_data_table.DataTableUpdateSource.USER + + +@pytest.mark.asyncio +async def test_update_data_table_async_from_dict(): + await test_update_data_table_async(request_type=dict) + + +def test_update_data_table_field_headers(): + client = DataTableServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = gcc_data_table.UpdateDataTableRequest() + + request.data_table.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_data_table), "__call__" + ) as call: + call.return_value = gcc_data_table.DataTable() + client.update_data_table(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "data_table.name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_update_data_table_field_headers_async(): + client = DataTableServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = gcc_data_table.UpdateDataTableRequest() + + request.data_table.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_data_table), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + gcc_data_table.DataTable() + ) + await client.update_data_table(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "data_table.name=name_value", + ) in kw["metadata"] + + +def test_update_data_table_flattened(): + client = DataTableServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_data_table), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = gcc_data_table.DataTable() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_data_table( + data_table=gcc_data_table.DataTable(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].data_table + mock_val = gcc_data_table.DataTable(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +def test_update_data_table_flattened_error(): + client = DataTableServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_data_table( + gcc_data_table.UpdateDataTableRequest(), + data_table=gcc_data_table.DataTable(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.asyncio +async def test_update_data_table_flattened_async(): + client = DataTableServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_data_table), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = gcc_data_table.DataTable() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + gcc_data_table.DataTable() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_data_table( + data_table=gcc_data_table.DataTable(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].data_table + mock_val = gcc_data_table.DataTable(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_update_data_table_flattened_error_async(): + client = DataTableServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_data_table( + gcc_data_table.UpdateDataTableRequest(), + data_table=gcc_data_table.DataTable(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.parametrize( + "request_type", + [ + data_table.DeleteDataTableRequest, + dict, + ], +) +def test_delete_data_table(request_type, transport: str = "grpc"): + client = DataTableServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_data_table), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.delete_data_table(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = data_table.DeleteDataTableRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_data_table_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DataTableServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = data_table.DeleteDataTableRequest( + name="name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_data_table), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.delete_data_table(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == data_table.DeleteDataTableRequest( + name="name_value", + ) + + +def test_delete_data_table_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DataTableServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete_data_table in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.delete_data_table] = ( + mock_rpc + ) + request = {} + client.delete_data_table(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.delete_data_table(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_delete_data_table_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DataTableServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.delete_data_table + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.delete_data_table + ] = mock_rpc + + request = {} + await client.delete_data_table(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.delete_data_table(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_delete_data_table_async( + transport: str = "grpc_asyncio", request_type=data_table.DeleteDataTableRequest +): + client = DataTableServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_data_table), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_data_table(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = data_table.DeleteDataTableRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_delete_data_table_async_from_dict(): + await test_delete_data_table_async(request_type=dict) + + +def test_delete_data_table_field_headers(): + client = DataTableServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = data_table.DeleteDataTableRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_data_table), "__call__" + ) as call: + call.return_value = None + client.delete_data_table(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_delete_data_table_field_headers_async(): + client = DataTableServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = data_table.DeleteDataTableRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_data_table), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_data_table(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_delete_data_table_flattened(): + client = DataTableServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_data_table), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = None + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_data_table( + name="name_value", + force=True, + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + arg = args[0].force + mock_val = True + assert arg == mock_val + + +def test_delete_data_table_flattened_error(): + client = DataTableServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_data_table( + data_table.DeleteDataTableRequest(), + name="name_value", + force=True, + ) + + +@pytest.mark.asyncio +async def test_delete_data_table_flattened_async(): + client = DataTableServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_data_table), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = None + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_data_table( + name="name_value", + force=True, + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + arg = args[0].force + mock_val = True + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_delete_data_table_flattened_error_async(): + client = DataTableServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_data_table( + data_table.DeleteDataTableRequest(), + name="name_value", + force=True, + ) + + +@pytest.mark.parametrize( + "request_type", + [ + data_table.CreateDataTableRowRequest, + dict, + ], +) +def test_create_data_table_row(request_type, transport: str = "grpc"): + client = DataTableServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_data_table_row), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = data_table.DataTableRow( + name="name_value", + values=["values_value"], + row_time_to_live="row_time_to_live_value", + ) + response = client.create_data_table_row(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = data_table.CreateDataTableRowRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, data_table.DataTableRow) + assert response.name == "name_value" + assert response.values == ["values_value"] + assert response.row_time_to_live == "row_time_to_live_value" + + +def test_create_data_table_row_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DataTableServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = data_table.CreateDataTableRowRequest( + parent="parent_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_data_table_row), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.create_data_table_row(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == data_table.CreateDataTableRowRequest( + parent="parent_value", + ) + + +def test_create_data_table_row_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DataTableServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.create_data_table_row + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.create_data_table_row] = ( + mock_rpc + ) + request = {} + client.create_data_table_row(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.create_data_table_row(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_create_data_table_row_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DataTableServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.create_data_table_row + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.create_data_table_row + ] = mock_rpc + + request = {} + await client.create_data_table_row(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.create_data_table_row(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_create_data_table_row_async( + transport: str = "grpc_asyncio", request_type=data_table.CreateDataTableRowRequest +): + client = DataTableServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_data_table_row), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + data_table.DataTableRow( + name="name_value", + values=["values_value"], + row_time_to_live="row_time_to_live_value", + ) + ) + response = await client.create_data_table_row(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = data_table.CreateDataTableRowRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, data_table.DataTableRow) + assert response.name == "name_value" + assert response.values == ["values_value"] + assert response.row_time_to_live == "row_time_to_live_value" + + +@pytest.mark.asyncio +async def test_create_data_table_row_async_from_dict(): + await test_create_data_table_row_async(request_type=dict) + + +def test_create_data_table_row_field_headers(): + client = DataTableServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = data_table.CreateDataTableRowRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_data_table_row), "__call__" + ) as call: + call.return_value = data_table.DataTableRow() + client.create_data_table_row(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_create_data_table_row_field_headers_async(): + client = DataTableServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = data_table.CreateDataTableRowRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_data_table_row), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + data_table.DataTableRow() + ) + await client.create_data_table_row(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_create_data_table_row_flattened(): + client = DataTableServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_data_table_row), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = data_table.DataTableRow() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_data_table_row( + parent="parent_value", + data_table_row=data_table.DataTableRow(name="name_value"), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].data_table_row + mock_val = data_table.DataTableRow(name="name_value") + assert arg == mock_val + + +def test_create_data_table_row_flattened_error(): + client = DataTableServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_data_table_row( + data_table.CreateDataTableRowRequest(), + parent="parent_value", + data_table_row=data_table.DataTableRow(name="name_value"), + ) + + +@pytest.mark.asyncio +async def test_create_data_table_row_flattened_async(): + client = DataTableServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_data_table_row), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = data_table.DataTableRow() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + data_table.DataTableRow() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_data_table_row( + parent="parent_value", + data_table_row=data_table.DataTableRow(name="name_value"), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].data_table_row + mock_val = data_table.DataTableRow(name="name_value") + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_create_data_table_row_flattened_error_async(): + client = DataTableServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_data_table_row( + data_table.CreateDataTableRowRequest(), + parent="parent_value", + data_table_row=data_table.DataTableRow(name="name_value"), + ) + + +@pytest.mark.parametrize( + "request_type", + [ + data_table.UpdateDataTableRowRequest, + dict, + ], +) +def test_update_data_table_row(request_type, transport: str = "grpc"): + client = DataTableServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_data_table_row), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = data_table.DataTableRow( + name="name_value", + values=["values_value"], + row_time_to_live="row_time_to_live_value", + ) + response = client.update_data_table_row(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = data_table.UpdateDataTableRowRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, data_table.DataTableRow) + assert response.name == "name_value" + assert response.values == ["values_value"] + assert response.row_time_to_live == "row_time_to_live_value" + + +def test_update_data_table_row_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DataTableServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = data_table.UpdateDataTableRowRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_data_table_row), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.update_data_table_row(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == data_table.UpdateDataTableRowRequest() + + +def test_update_data_table_row_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DataTableServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.update_data_table_row + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.update_data_table_row] = ( + mock_rpc + ) + request = {} + client.update_data_table_row(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.update_data_table_row(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_update_data_table_row_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DataTableServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.update_data_table_row + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.update_data_table_row + ] = mock_rpc + + request = {} + await client.update_data_table_row(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.update_data_table_row(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_update_data_table_row_async( + transport: str = "grpc_asyncio", request_type=data_table.UpdateDataTableRowRequest +): + client = DataTableServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_data_table_row), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + data_table.DataTableRow( + name="name_value", + values=["values_value"], + row_time_to_live="row_time_to_live_value", + ) + ) + response = await client.update_data_table_row(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = data_table.UpdateDataTableRowRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, data_table.DataTableRow) + assert response.name == "name_value" + assert response.values == ["values_value"] + assert response.row_time_to_live == "row_time_to_live_value" + + +@pytest.mark.asyncio +async def test_update_data_table_row_async_from_dict(): + await test_update_data_table_row_async(request_type=dict) + + +def test_update_data_table_row_field_headers(): + client = DataTableServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = data_table.UpdateDataTableRowRequest() + + request.data_table_row.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_data_table_row), "__call__" + ) as call: + call.return_value = data_table.DataTableRow() + client.update_data_table_row(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "data_table_row.name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_update_data_table_row_field_headers_async(): + client = DataTableServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = data_table.UpdateDataTableRowRequest() + + request.data_table_row.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_data_table_row), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + data_table.DataTableRow() + ) + await client.update_data_table_row(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "data_table_row.name=name_value", + ) in kw["metadata"] + + +def test_update_data_table_row_flattened(): + client = DataTableServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_data_table_row), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = data_table.DataTableRow() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_data_table_row( + data_table_row=data_table.DataTableRow(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].data_table_row + mock_val = data_table.DataTableRow(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +def test_update_data_table_row_flattened_error(): + client = DataTableServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_data_table_row( + data_table.UpdateDataTableRowRequest(), + data_table_row=data_table.DataTableRow(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.asyncio +async def test_update_data_table_row_flattened_async(): + client = DataTableServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_data_table_row), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = data_table.DataTableRow() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + data_table.DataTableRow() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_data_table_row( + data_table_row=data_table.DataTableRow(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].data_table_row + mock_val = data_table.DataTableRow(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_update_data_table_row_flattened_error_async(): + client = DataTableServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_data_table_row( + data_table.UpdateDataTableRowRequest(), + data_table_row=data_table.DataTableRow(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.parametrize( + "request_type", + [ + data_table.ListDataTableRowsRequest, + dict, + ], +) +def test_list_data_table_rows(request_type, transport: str = "grpc"): + client = DataTableServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_data_table_rows), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = data_table.ListDataTableRowsResponse( + next_page_token="next_page_token_value", + ) + response = client.list_data_table_rows(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = data_table.ListDataTableRowsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListDataTableRowsPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_data_table_rows_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DataTableServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = data_table.ListDataTableRowsRequest( + parent="parent_value", + page_token="page_token_value", + order_by="order_by_value", + filter="filter_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_data_table_rows), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.list_data_table_rows(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == data_table.ListDataTableRowsRequest( + parent="parent_value", + page_token="page_token_value", + order_by="order_by_value", + filter="filter_value", + ) + + +def test_list_data_table_rows_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DataTableServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.list_data_table_rows in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.list_data_table_rows] = ( + mock_rpc + ) + request = {} + client.list_data_table_rows(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_data_table_rows(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_list_data_table_rows_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DataTableServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.list_data_table_rows + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.list_data_table_rows + ] = mock_rpc + + request = {} + await client.list_data_table_rows(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.list_data_table_rows(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_list_data_table_rows_async( + transport: str = "grpc_asyncio", request_type=data_table.ListDataTableRowsRequest +): + client = DataTableServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_data_table_rows), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + data_table.ListDataTableRowsResponse( + next_page_token="next_page_token_value", + ) + ) + response = await client.list_data_table_rows(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = data_table.ListDataTableRowsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListDataTableRowsAsyncPager) + assert response.next_page_token == "next_page_token_value" + + +@pytest.mark.asyncio +async def test_list_data_table_rows_async_from_dict(): + await test_list_data_table_rows_async(request_type=dict) + + +def test_list_data_table_rows_field_headers(): + client = DataTableServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = data_table.ListDataTableRowsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_data_table_rows), "__call__" + ) as call: + call.return_value = data_table.ListDataTableRowsResponse() + client.list_data_table_rows(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_data_table_rows_field_headers_async(): + client = DataTableServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = data_table.ListDataTableRowsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_data_table_rows), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + data_table.ListDataTableRowsResponse() + ) + await client.list_data_table_rows(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_list_data_table_rows_flattened(): + client = DataTableServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_data_table_rows), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = data_table.ListDataTableRowsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_data_table_rows( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +def test_list_data_table_rows_flattened_error(): + client = DataTableServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_data_table_rows( + data_table.ListDataTableRowsRequest(), + parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_list_data_table_rows_flattened_async(): + client = DataTableServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_data_table_rows), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = data_table.ListDataTableRowsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + data_table.ListDataTableRowsResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_data_table_rows( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_list_data_table_rows_flattened_error_async(): + client = DataTableServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_data_table_rows( + data_table.ListDataTableRowsRequest(), + parent="parent_value", + ) + + +def test_list_data_table_rows_pager(transport_name: str = "grpc"): + client = DataTableServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_data_table_rows), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + data_table.ListDataTableRowsResponse( + data_table_rows=[ + data_table.DataTableRow(), + data_table.DataTableRow(), + data_table.DataTableRow(), + ], + next_page_token="abc", + ), + data_table.ListDataTableRowsResponse( + data_table_rows=[], + next_page_token="def", + ), + data_table.ListDataTableRowsResponse( + data_table_rows=[ + data_table.DataTableRow(), + ], + next_page_token="ghi", + ), + data_table.ListDataTableRowsResponse( + data_table_rows=[ + data_table.DataTableRow(), + data_table.DataTableRow(), + ], + ), + RuntimeError, + ) + + expected_metadata = () + retry = retries.Retry() + timeout = 5 + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_data_table_rows(request={}, retry=retry, timeout=timeout) + + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, data_table.DataTableRow) for i in results) + + +def test_list_data_table_rows_pages(transport_name: str = "grpc"): + client = DataTableServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_data_table_rows), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + data_table.ListDataTableRowsResponse( + data_table_rows=[ + data_table.DataTableRow(), + data_table.DataTableRow(), + data_table.DataTableRow(), + ], + next_page_token="abc", + ), + data_table.ListDataTableRowsResponse( + data_table_rows=[], + next_page_token="def", + ), + data_table.ListDataTableRowsResponse( + data_table_rows=[ + data_table.DataTableRow(), + ], + next_page_token="ghi", + ), + data_table.ListDataTableRowsResponse( + data_table_rows=[ + data_table.DataTableRow(), + data_table.DataTableRow(), + ], + ), + RuntimeError, + ) + pages = list(client.list_data_table_rows(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_data_table_rows_async_pager(): + client = DataTableServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_data_table_rows), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + data_table.ListDataTableRowsResponse( + data_table_rows=[ + data_table.DataTableRow(), + data_table.DataTableRow(), + data_table.DataTableRow(), + ], + next_page_token="abc", + ), + data_table.ListDataTableRowsResponse( + data_table_rows=[], + next_page_token="def", + ), + data_table.ListDataTableRowsResponse( + data_table_rows=[ + data_table.DataTableRow(), + ], + next_page_token="ghi", + ), + data_table.ListDataTableRowsResponse( + data_table_rows=[ + data_table.DataTableRow(), + data_table.DataTableRow(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_data_table_rows( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, data_table.DataTableRow) for i in responses) + + +@pytest.mark.asyncio +async def test_list_data_table_rows_async_pages(): + client = DataTableServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_data_table_rows), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + data_table.ListDataTableRowsResponse( + data_table_rows=[ + data_table.DataTableRow(), + data_table.DataTableRow(), + data_table.DataTableRow(), + ], + next_page_token="abc", + ), + data_table.ListDataTableRowsResponse( + data_table_rows=[], + next_page_token="def", + ), + data_table.ListDataTableRowsResponse( + data_table_rows=[ + data_table.DataTableRow(), + ], + next_page_token="ghi", + ), + data_table.ListDataTableRowsResponse( + data_table_rows=[ + data_table.DataTableRow(), + data_table.DataTableRow(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_data_table_rows(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + data_table.GetDataTableRowRequest, + dict, + ], +) +def test_get_data_table_row(request_type, transport: str = "grpc"): + client = DataTableServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_data_table_row), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = data_table.DataTableRow( + name="name_value", + values=["values_value"], + row_time_to_live="row_time_to_live_value", + ) + response = client.get_data_table_row(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = data_table.GetDataTableRowRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, data_table.DataTableRow) + assert response.name == "name_value" + assert response.values == ["values_value"] + assert response.row_time_to_live == "row_time_to_live_value" + + +def test_get_data_table_row_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DataTableServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = data_table.GetDataTableRowRequest( + name="name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_data_table_row), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.get_data_table_row(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == data_table.GetDataTableRowRequest( + name="name_value", + ) + + +def test_get_data_table_row_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DataTableServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.get_data_table_row in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get_data_table_row] = ( + mock_rpc + ) + request = {} + client.get_data_table_row(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_data_table_row(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_get_data_table_row_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DataTableServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.get_data_table_row + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.get_data_table_row + ] = mock_rpc + + request = {} + await client.get_data_table_row(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.get_data_table_row(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_get_data_table_row_async( + transport: str = "grpc_asyncio", request_type=data_table.GetDataTableRowRequest +): + client = DataTableServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_data_table_row), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + data_table.DataTableRow( + name="name_value", + values=["values_value"], + row_time_to_live="row_time_to_live_value", + ) + ) + response = await client.get_data_table_row(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = data_table.GetDataTableRowRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, data_table.DataTableRow) + assert response.name == "name_value" + assert response.values == ["values_value"] + assert response.row_time_to_live == "row_time_to_live_value" + + +@pytest.mark.asyncio +async def test_get_data_table_row_async_from_dict(): + await test_get_data_table_row_async(request_type=dict) + + +def test_get_data_table_row_field_headers(): + client = DataTableServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = data_table.GetDataTableRowRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_data_table_row), "__call__" + ) as call: + call.return_value = data_table.DataTableRow() + client.get_data_table_row(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_data_table_row_field_headers_async(): + client = DataTableServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = data_table.GetDataTableRowRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_data_table_row), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + data_table.DataTableRow() + ) + await client.get_data_table_row(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_get_data_table_row_flattened(): + client = DataTableServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_data_table_row), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = data_table.DataTableRow() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_data_table_row( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_get_data_table_row_flattened_error(): + client = DataTableServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_data_table_row( + data_table.GetDataTableRowRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_data_table_row_flattened_async(): + client = DataTableServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_data_table_row), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = data_table.DataTableRow() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + data_table.DataTableRow() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_data_table_row( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_data_table_row_flattened_error_async(): + client = DataTableServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_data_table_row( + data_table.GetDataTableRowRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + data_table.DeleteDataTableRowRequest, + dict, + ], +) +def test_delete_data_table_row(request_type, transport: str = "grpc"): + client = DataTableServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_data_table_row), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.delete_data_table_row(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = data_table.DeleteDataTableRowRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_data_table_row_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DataTableServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = data_table.DeleteDataTableRowRequest( + name="name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_data_table_row), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.delete_data_table_row(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == data_table.DeleteDataTableRowRequest( + name="name_value", + ) + + +def test_delete_data_table_row_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DataTableServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.delete_data_table_row + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.delete_data_table_row] = ( + mock_rpc + ) + request = {} + client.delete_data_table_row(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.delete_data_table_row(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_delete_data_table_row_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DataTableServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.delete_data_table_row + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.delete_data_table_row + ] = mock_rpc + + request = {} + await client.delete_data_table_row(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.delete_data_table_row(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_delete_data_table_row_async( + transport: str = "grpc_asyncio", request_type=data_table.DeleteDataTableRowRequest +): + client = DataTableServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_data_table_row), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_data_table_row(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = data_table.DeleteDataTableRowRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_delete_data_table_row_async_from_dict(): + await test_delete_data_table_row_async(request_type=dict) + + +def test_delete_data_table_row_field_headers(): + client = DataTableServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = data_table.DeleteDataTableRowRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_data_table_row), "__call__" + ) as call: + call.return_value = None + client.delete_data_table_row(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_delete_data_table_row_field_headers_async(): + client = DataTableServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = data_table.DeleteDataTableRowRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_data_table_row), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_data_table_row(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_delete_data_table_row_flattened(): + client = DataTableServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_data_table_row), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = None + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_data_table_row( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_delete_data_table_row_flattened_error(): + client = DataTableServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_data_table_row( + data_table.DeleteDataTableRowRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_delete_data_table_row_flattened_async(): + client = DataTableServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_data_table_row), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = None + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_data_table_row( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_delete_data_table_row_flattened_error_async(): + client = DataTableServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_data_table_row( + data_table.DeleteDataTableRowRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + data_table.BulkCreateDataTableRowsRequest, + dict, + ], +) +def test_bulk_create_data_table_rows(request_type, transport: str = "grpc"): + client = DataTableServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.bulk_create_data_table_rows), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = data_table.BulkCreateDataTableRowsResponse() + response = client.bulk_create_data_table_rows(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = data_table.BulkCreateDataTableRowsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, data_table.BulkCreateDataTableRowsResponse) + + +def test_bulk_create_data_table_rows_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DataTableServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = data_table.BulkCreateDataTableRowsRequest( + parent="parent_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.bulk_create_data_table_rows), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.bulk_create_data_table_rows(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == data_table.BulkCreateDataTableRowsRequest( + parent="parent_value", + ) + + +def test_bulk_create_data_table_rows_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DataTableServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.bulk_create_data_table_rows + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.bulk_create_data_table_rows + ] = mock_rpc + request = {} + client.bulk_create_data_table_rows(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.bulk_create_data_table_rows(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_bulk_create_data_table_rows_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DataTableServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.bulk_create_data_table_rows + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.bulk_create_data_table_rows + ] = mock_rpc + + request = {} + await client.bulk_create_data_table_rows(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.bulk_create_data_table_rows(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_bulk_create_data_table_rows_async( + transport: str = "grpc_asyncio", + request_type=data_table.BulkCreateDataTableRowsRequest, +): + client = DataTableServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.bulk_create_data_table_rows), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + data_table.BulkCreateDataTableRowsResponse() + ) + response = await client.bulk_create_data_table_rows(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = data_table.BulkCreateDataTableRowsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, data_table.BulkCreateDataTableRowsResponse) + + +@pytest.mark.asyncio +async def test_bulk_create_data_table_rows_async_from_dict(): + await test_bulk_create_data_table_rows_async(request_type=dict) + + +def test_bulk_create_data_table_rows_field_headers(): + client = DataTableServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = data_table.BulkCreateDataTableRowsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.bulk_create_data_table_rows), "__call__" + ) as call: + call.return_value = data_table.BulkCreateDataTableRowsResponse() + client.bulk_create_data_table_rows(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_bulk_create_data_table_rows_field_headers_async(): + client = DataTableServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = data_table.BulkCreateDataTableRowsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.bulk_create_data_table_rows), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + data_table.BulkCreateDataTableRowsResponse() + ) + await client.bulk_create_data_table_rows(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_bulk_create_data_table_rows_flattened(): + client = DataTableServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.bulk_create_data_table_rows), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = data_table.BulkCreateDataTableRowsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.bulk_create_data_table_rows( + parent="parent_value", + requests=[data_table.CreateDataTableRowRequest(parent="parent_value")], + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].requests + mock_val = [data_table.CreateDataTableRowRequest(parent="parent_value")] + assert arg == mock_val + + +def test_bulk_create_data_table_rows_flattened_error(): + client = DataTableServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.bulk_create_data_table_rows( + data_table.BulkCreateDataTableRowsRequest(), + parent="parent_value", + requests=[data_table.CreateDataTableRowRequest(parent="parent_value")], + ) + + +@pytest.mark.asyncio +async def test_bulk_create_data_table_rows_flattened_async(): + client = DataTableServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.bulk_create_data_table_rows), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = data_table.BulkCreateDataTableRowsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + data_table.BulkCreateDataTableRowsResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.bulk_create_data_table_rows( + parent="parent_value", + requests=[data_table.CreateDataTableRowRequest(parent="parent_value")], + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].requests + mock_val = [data_table.CreateDataTableRowRequest(parent="parent_value")] + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_bulk_create_data_table_rows_flattened_error_async(): + client = DataTableServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.bulk_create_data_table_rows( + data_table.BulkCreateDataTableRowsRequest(), + parent="parent_value", + requests=[data_table.CreateDataTableRowRequest(parent="parent_value")], + ) + + +@pytest.mark.parametrize( + "request_type", + [ + data_table.BulkGetDataTableRowsRequest, + dict, + ], +) +def test_bulk_get_data_table_rows(request_type, transport: str = "grpc"): + client = DataTableServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.bulk_get_data_table_rows), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = data_table.BulkGetDataTableRowsResponse() + response = client.bulk_get_data_table_rows(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = data_table.BulkGetDataTableRowsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, data_table.BulkGetDataTableRowsResponse) + + +def test_bulk_get_data_table_rows_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DataTableServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = data_table.BulkGetDataTableRowsRequest( + parent="parent_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.bulk_get_data_table_rows), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.bulk_get_data_table_rows(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == data_table.BulkGetDataTableRowsRequest( + parent="parent_value", + ) + + +def test_bulk_get_data_table_rows_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DataTableServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.bulk_get_data_table_rows + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.bulk_get_data_table_rows + ] = mock_rpc + request = {} + client.bulk_get_data_table_rows(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.bulk_get_data_table_rows(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_bulk_get_data_table_rows_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DataTableServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.bulk_get_data_table_rows + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.bulk_get_data_table_rows + ] = mock_rpc + + request = {} + await client.bulk_get_data_table_rows(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.bulk_get_data_table_rows(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_bulk_get_data_table_rows_async( + transport: str = "grpc_asyncio", request_type=data_table.BulkGetDataTableRowsRequest +): + client = DataTableServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.bulk_get_data_table_rows), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + data_table.BulkGetDataTableRowsResponse() + ) + response = await client.bulk_get_data_table_rows(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = data_table.BulkGetDataTableRowsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, data_table.BulkGetDataTableRowsResponse) + + +@pytest.mark.asyncio +async def test_bulk_get_data_table_rows_async_from_dict(): + await test_bulk_get_data_table_rows_async(request_type=dict) + + +def test_bulk_get_data_table_rows_field_headers(): + client = DataTableServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = data_table.BulkGetDataTableRowsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.bulk_get_data_table_rows), "__call__" + ) as call: + call.return_value = data_table.BulkGetDataTableRowsResponse() + client.bulk_get_data_table_rows(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_bulk_get_data_table_rows_field_headers_async(): + client = DataTableServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = data_table.BulkGetDataTableRowsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.bulk_get_data_table_rows), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + data_table.BulkGetDataTableRowsResponse() + ) + await client.bulk_get_data_table_rows(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_bulk_get_data_table_rows_flattened(): + client = DataTableServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.bulk_get_data_table_rows), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = data_table.BulkGetDataTableRowsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.bulk_get_data_table_rows( + parent="parent_value", + requests=[data_table.GetDataTableRowRequest(name="name_value")], + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].requests + mock_val = [data_table.GetDataTableRowRequest(name="name_value")] + assert arg == mock_val + + +def test_bulk_get_data_table_rows_flattened_error(): + client = DataTableServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.bulk_get_data_table_rows( + data_table.BulkGetDataTableRowsRequest(), + parent="parent_value", + requests=[data_table.GetDataTableRowRequest(name="name_value")], + ) + + +@pytest.mark.asyncio +async def test_bulk_get_data_table_rows_flattened_async(): + client = DataTableServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.bulk_get_data_table_rows), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = data_table.BulkGetDataTableRowsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + data_table.BulkGetDataTableRowsResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.bulk_get_data_table_rows( + parent="parent_value", + requests=[data_table.GetDataTableRowRequest(name="name_value")], + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].requests + mock_val = [data_table.GetDataTableRowRequest(name="name_value")] + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_bulk_get_data_table_rows_flattened_error_async(): + client = DataTableServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.bulk_get_data_table_rows( + data_table.BulkGetDataTableRowsRequest(), + parent="parent_value", + requests=[data_table.GetDataTableRowRequest(name="name_value")], + ) + + +@pytest.mark.parametrize( + "request_type", + [ + data_table.BulkReplaceDataTableRowsRequest, + dict, + ], +) +def test_bulk_replace_data_table_rows(request_type, transport: str = "grpc"): + client = DataTableServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.bulk_replace_data_table_rows), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = data_table.BulkReplaceDataTableRowsResponse() + response = client.bulk_replace_data_table_rows(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = data_table.BulkReplaceDataTableRowsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, data_table.BulkReplaceDataTableRowsResponse) + + +def test_bulk_replace_data_table_rows_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DataTableServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = data_table.BulkReplaceDataTableRowsRequest( + parent="parent_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.bulk_replace_data_table_rows), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.bulk_replace_data_table_rows(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == data_table.BulkReplaceDataTableRowsRequest( + parent="parent_value", + ) + + +def test_bulk_replace_data_table_rows_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DataTableServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.bulk_replace_data_table_rows + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.bulk_replace_data_table_rows + ] = mock_rpc + request = {} + client.bulk_replace_data_table_rows(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.bulk_replace_data_table_rows(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_bulk_replace_data_table_rows_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DataTableServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.bulk_replace_data_table_rows + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.bulk_replace_data_table_rows + ] = mock_rpc + + request = {} + await client.bulk_replace_data_table_rows(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.bulk_replace_data_table_rows(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_bulk_replace_data_table_rows_async( + transport: str = "grpc_asyncio", + request_type=data_table.BulkReplaceDataTableRowsRequest, +): + client = DataTableServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.bulk_replace_data_table_rows), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + data_table.BulkReplaceDataTableRowsResponse() + ) + response = await client.bulk_replace_data_table_rows(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = data_table.BulkReplaceDataTableRowsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, data_table.BulkReplaceDataTableRowsResponse) + + +@pytest.mark.asyncio +async def test_bulk_replace_data_table_rows_async_from_dict(): + await test_bulk_replace_data_table_rows_async(request_type=dict) + + +def test_bulk_replace_data_table_rows_field_headers(): + client = DataTableServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = data_table.BulkReplaceDataTableRowsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.bulk_replace_data_table_rows), "__call__" + ) as call: + call.return_value = data_table.BulkReplaceDataTableRowsResponse() + client.bulk_replace_data_table_rows(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_bulk_replace_data_table_rows_field_headers_async(): + client = DataTableServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = data_table.BulkReplaceDataTableRowsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.bulk_replace_data_table_rows), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + data_table.BulkReplaceDataTableRowsResponse() + ) + await client.bulk_replace_data_table_rows(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_bulk_replace_data_table_rows_flattened(): + client = DataTableServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.bulk_replace_data_table_rows), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = data_table.BulkReplaceDataTableRowsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.bulk_replace_data_table_rows( + parent="parent_value", + requests=[data_table.CreateDataTableRowRequest(parent="parent_value")], + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].requests + mock_val = [data_table.CreateDataTableRowRequest(parent="parent_value")] + assert arg == mock_val + + +def test_bulk_replace_data_table_rows_flattened_error(): + client = DataTableServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.bulk_replace_data_table_rows( + data_table.BulkReplaceDataTableRowsRequest(), + parent="parent_value", + requests=[data_table.CreateDataTableRowRequest(parent="parent_value")], + ) + + +@pytest.mark.asyncio +async def test_bulk_replace_data_table_rows_flattened_async(): + client = DataTableServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.bulk_replace_data_table_rows), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = data_table.BulkReplaceDataTableRowsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + data_table.BulkReplaceDataTableRowsResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.bulk_replace_data_table_rows( + parent="parent_value", + requests=[data_table.CreateDataTableRowRequest(parent="parent_value")], + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].requests + mock_val = [data_table.CreateDataTableRowRequest(parent="parent_value")] + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_bulk_replace_data_table_rows_flattened_error_async(): + client = DataTableServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.bulk_replace_data_table_rows( + data_table.BulkReplaceDataTableRowsRequest(), + parent="parent_value", + requests=[data_table.CreateDataTableRowRequest(parent="parent_value")], + ) + + +@pytest.mark.parametrize( + "request_type", + [ + data_table.BulkUpdateDataTableRowsRequest, + dict, + ], +) +def test_bulk_update_data_table_rows(request_type, transport: str = "grpc"): + client = DataTableServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.bulk_update_data_table_rows), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = data_table.BulkUpdateDataTableRowsResponse() + response = client.bulk_update_data_table_rows(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = data_table.BulkUpdateDataTableRowsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, data_table.BulkUpdateDataTableRowsResponse) + + +def test_bulk_update_data_table_rows_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DataTableServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = data_table.BulkUpdateDataTableRowsRequest( + parent="parent_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.bulk_update_data_table_rows), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.bulk_update_data_table_rows(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == data_table.BulkUpdateDataTableRowsRequest( + parent="parent_value", + ) + + +def test_bulk_update_data_table_rows_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DataTableServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.bulk_update_data_table_rows + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.bulk_update_data_table_rows + ] = mock_rpc + request = {} + client.bulk_update_data_table_rows(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.bulk_update_data_table_rows(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_bulk_update_data_table_rows_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DataTableServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.bulk_update_data_table_rows + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.bulk_update_data_table_rows + ] = mock_rpc + + request = {} + await client.bulk_update_data_table_rows(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.bulk_update_data_table_rows(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_bulk_update_data_table_rows_async( + transport: str = "grpc_asyncio", + request_type=data_table.BulkUpdateDataTableRowsRequest, +): + client = DataTableServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.bulk_update_data_table_rows), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + data_table.BulkUpdateDataTableRowsResponse() + ) + response = await client.bulk_update_data_table_rows(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = data_table.BulkUpdateDataTableRowsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, data_table.BulkUpdateDataTableRowsResponse) + + +@pytest.mark.asyncio +async def test_bulk_update_data_table_rows_async_from_dict(): + await test_bulk_update_data_table_rows_async(request_type=dict) + + +def test_bulk_update_data_table_rows_field_headers(): + client = DataTableServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = data_table.BulkUpdateDataTableRowsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.bulk_update_data_table_rows), "__call__" + ) as call: + call.return_value = data_table.BulkUpdateDataTableRowsResponse() + client.bulk_update_data_table_rows(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_bulk_update_data_table_rows_field_headers_async(): + client = DataTableServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = data_table.BulkUpdateDataTableRowsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.bulk_update_data_table_rows), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + data_table.BulkUpdateDataTableRowsResponse() + ) + await client.bulk_update_data_table_rows(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_bulk_update_data_table_rows_flattened(): + client = DataTableServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.bulk_update_data_table_rows), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = data_table.BulkUpdateDataTableRowsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.bulk_update_data_table_rows( + parent="parent_value", + requests=[ + data_table.UpdateDataTableRowRequest( + data_table_row=data_table.DataTableRow(name="name_value") + ) + ], + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].requests + mock_val = [ + data_table.UpdateDataTableRowRequest( + data_table_row=data_table.DataTableRow(name="name_value") + ) + ] + assert arg == mock_val + + +def test_bulk_update_data_table_rows_flattened_error(): + client = DataTableServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.bulk_update_data_table_rows( + data_table.BulkUpdateDataTableRowsRequest(), + parent="parent_value", + requests=[ + data_table.UpdateDataTableRowRequest( + data_table_row=data_table.DataTableRow(name="name_value") + ) + ], + ) + + +@pytest.mark.asyncio +async def test_bulk_update_data_table_rows_flattened_async(): + client = DataTableServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.bulk_update_data_table_rows), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = data_table.BulkUpdateDataTableRowsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + data_table.BulkUpdateDataTableRowsResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.bulk_update_data_table_rows( + parent="parent_value", + requests=[ + data_table.UpdateDataTableRowRequest( + data_table_row=data_table.DataTableRow(name="name_value") + ) + ], + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].requests + mock_val = [ + data_table.UpdateDataTableRowRequest( + data_table_row=data_table.DataTableRow(name="name_value") + ) + ] + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_bulk_update_data_table_rows_flattened_error_async(): + client = DataTableServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.bulk_update_data_table_rows( + data_table.BulkUpdateDataTableRowsRequest(), + parent="parent_value", + requests=[ + data_table.UpdateDataTableRowRequest( + data_table_row=data_table.DataTableRow(name="name_value") + ) + ], + ) + + +@pytest.mark.parametrize( + "request_type", + [ + data_table.GetDataTableOperationErrorsRequest, + dict, + ], +) +def test_get_data_table_operation_errors(request_type, transport: str = "grpc"): + client = DataTableServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_data_table_operation_errors), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = data_table.DataTableOperationErrors( + name="name_value", + ) + response = client.get_data_table_operation_errors(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = data_table.GetDataTableOperationErrorsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, data_table.DataTableOperationErrors) + assert response.name == "name_value" + + +def test_get_data_table_operation_errors_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = DataTableServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = data_table.GetDataTableOperationErrorsRequest( + name="name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_data_table_operation_errors), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.get_data_table_operation_errors(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == data_table.GetDataTableOperationErrorsRequest( + name="name_value", + ) + + +def test_get_data_table_operation_errors_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DataTableServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.get_data_table_operation_errors + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.get_data_table_operation_errors + ] = mock_rpc + request = {} + client.get_data_table_operation_errors(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_data_table_operation_errors(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_get_data_table_operation_errors_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = DataTableServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.get_data_table_operation_errors + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.get_data_table_operation_errors + ] = mock_rpc + + request = {} + await client.get_data_table_operation_errors(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.get_data_table_operation_errors(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_get_data_table_operation_errors_async( + transport: str = "grpc_asyncio", + request_type=data_table.GetDataTableOperationErrorsRequest, +): + client = DataTableServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_data_table_operation_errors), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + data_table.DataTableOperationErrors( + name="name_value", + ) + ) + response = await client.get_data_table_operation_errors(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = data_table.GetDataTableOperationErrorsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, data_table.DataTableOperationErrors) + assert response.name == "name_value" + + +@pytest.mark.asyncio +async def test_get_data_table_operation_errors_async_from_dict(): + await test_get_data_table_operation_errors_async(request_type=dict) + + +def test_get_data_table_operation_errors_field_headers(): + client = DataTableServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = data_table.GetDataTableOperationErrorsRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_data_table_operation_errors), "__call__" + ) as call: + call.return_value = data_table.DataTableOperationErrors() + client.get_data_table_operation_errors(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_data_table_operation_errors_field_headers_async(): + client = DataTableServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = data_table.GetDataTableOperationErrorsRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_data_table_operation_errors), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + data_table.DataTableOperationErrors() + ) + await client.get_data_table_operation_errors(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_get_data_table_operation_errors_flattened(): + client = DataTableServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_data_table_operation_errors), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = data_table.DataTableOperationErrors() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_data_table_operation_errors( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_get_data_table_operation_errors_flattened_error(): + client = DataTableServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_data_table_operation_errors( + data_table.GetDataTableOperationErrorsRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_data_table_operation_errors_flattened_async(): + client = DataTableServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_data_table_operation_errors), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = data_table.DataTableOperationErrors() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + data_table.DataTableOperationErrors() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_data_table_operation_errors( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_data_table_operation_errors_flattened_error_async(): + client = DataTableServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_data_table_operation_errors( + data_table.GetDataTableOperationErrorsRequest(), + name="name_value", + ) + + +def test_create_data_table_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DataTableServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.create_data_table in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.create_data_table] = ( + mock_rpc + ) + + request = {} + client.create_data_table(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.create_data_table(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_create_data_table_rest_required_fields( + request_type=gcc_data_table.CreateDataTableRequest, +): + transport_class = transports.DataTableServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request_init["data_table_id"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + assert "dataTableId" not in jsonified_request + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_data_table._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + assert "dataTableId" in jsonified_request + assert jsonified_request["dataTableId"] == request_init["data_table_id"] + + jsonified_request["parent"] = "parent_value" + jsonified_request["dataTableId"] = "data_table_id_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_data_table._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("data_table_id",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + assert "dataTableId" in jsonified_request + assert jsonified_request["dataTableId"] == "data_table_id_value" + + client = DataTableServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = gcc_data_table.DataTable() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = gcc_data_table.DataTable.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.create_data_table(request) + + expected_params = [ + ( + "dataTableId", + "", + ), + ("$alt", "json;enum-encoding=int"), + ] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_create_data_table_rest_unset_required_fields(): + transport = transports.DataTableServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.create_data_table._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(("dataTableId",)) + & set( + ( + "parent", + "dataTable", + "dataTableId", + ) + ) + ) + + +def test_create_data_table_rest_flattened(): + client = DataTableServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = gcc_data_table.DataTable() + + # get arguments that satisfy an http rule for this method + sample_request = { + "parent": "projects/sample1/locations/sample2/instances/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + data_table=gcc_data_table.DataTable(name="name_value"), + data_table_id="data_table_id_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = gcc_data_table.DataTable.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.create_data_table(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*/instances/*}/dataTables" + % client.transport._host, + args[1], + ) + + +def test_create_data_table_rest_flattened_error(transport: str = "rest"): + client = DataTableServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_data_table( + gcc_data_table.CreateDataTableRequest(), + parent="parent_value", + data_table=gcc_data_table.DataTable(name="name_value"), + data_table_id="data_table_id_value", + ) + + +def test_list_data_tables_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DataTableServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.list_data_tables in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.list_data_tables] = ( + mock_rpc + ) + + request = {} + client.list_data_tables(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_data_tables(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_list_data_tables_rest_required_fields( + request_type=data_table.ListDataTablesRequest, +): + transport_class = transports.DataTableServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_data_tables._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_data_tables._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "order_by", + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = DataTableServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = data_table.ListDataTablesResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = data_table.ListDataTablesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.list_data_tables(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_data_tables_rest_unset_required_fields(): + transport = transports.DataTableServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_data_tables._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "orderBy", + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) + + +def test_list_data_tables_rest_flattened(): + client = DataTableServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = data_table.ListDataTablesResponse() + + # get arguments that satisfy an http rule for this method + sample_request = { + "parent": "projects/sample1/locations/sample2/instances/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = data_table.ListDataTablesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.list_data_tables(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*/instances/*}/dataTables" + % client.transport._host, + args[1], + ) + + +def test_list_data_tables_rest_flattened_error(transport: str = "rest"): + client = DataTableServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_data_tables( + data_table.ListDataTablesRequest(), + parent="parent_value", + ) + + +def test_list_data_tables_rest_pager(transport: str = "rest"): + client = DataTableServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + data_table.ListDataTablesResponse( + data_tables=[ + data_table.DataTable(), + data_table.DataTable(), + data_table.DataTable(), + ], + next_page_token="abc", + ), + data_table.ListDataTablesResponse( + data_tables=[], + next_page_token="def", + ), + data_table.ListDataTablesResponse( + data_tables=[ + data_table.DataTable(), + ], + next_page_token="ghi", + ), + data_table.ListDataTablesResponse( + data_tables=[ + data_table.DataTable(), + data_table.DataTable(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(data_table.ListDataTablesResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = { + "parent": "projects/sample1/locations/sample2/instances/sample3" + } + + pager = client.list_data_tables(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, data_table.DataTable) for i in results) + + pages = list(client.list_data_tables(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +def test_get_data_table_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DataTableServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_data_table in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get_data_table] = mock_rpc + + request = {} + client.get_data_table(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_data_table(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_get_data_table_rest_required_fields( + request_type=data_table.GetDataTableRequest, +): + transport_class = transports.DataTableServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_data_table._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_data_table._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = DataTableServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = data_table.DataTable() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = data_table.DataTable.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.get_data_table(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_data_table_rest_unset_required_fields(): + transport = transports.DataTableServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_data_table._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +def test_get_data_table_rest_flattened(): + client = DataTableServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = data_table.DataTable() + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/instances/sample3/dataTables/sample4" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = data_table.DataTable.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.get_data_table(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/instances/*/dataTables/*}" + % client.transport._host, + args[1], + ) + + +def test_get_data_table_rest_flattened_error(transport: str = "rest"): + client = DataTableServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_data_table( + data_table.GetDataTableRequest(), + name="name_value", + ) + + +def test_update_data_table_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DataTableServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.update_data_table in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.update_data_table] = ( + mock_rpc + ) + + request = {} + client.update_data_table(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.update_data_table(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_update_data_table_rest_required_fields( + request_type=gcc_data_table.UpdateDataTableRequest, +): + transport_class = transports.DataTableServiceRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_data_table._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_data_table._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("update_mask",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = DataTableServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = gcc_data_table.DataTable() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "patch", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = gcc_data_table.DataTable.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.update_data_table(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_update_data_table_rest_unset_required_fields(): + transport = transports.DataTableServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.update_data_table._get_unset_required_fields({}) + assert set(unset_fields) == (set(("updateMask",)) & set(("dataTable",))) + + +def test_update_data_table_rest_flattened(): + client = DataTableServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = gcc_data_table.DataTable() + + # get arguments that satisfy an http rule for this method + sample_request = { + "data_table": { + "name": "projects/sample1/locations/sample2/instances/sample3/dataTables/sample4" + } + } + + # get truthy value for each flattened field + mock_args = dict( + data_table=gcc_data_table.DataTable(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = gcc_data_table.DataTable.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.update_data_table(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{data_table.name=projects/*/locations/*/instances/*/dataTables/*}" + % client.transport._host, + args[1], + ) + + +def test_update_data_table_rest_flattened_error(transport: str = "rest"): + client = DataTableServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_data_table( + gcc_data_table.UpdateDataTableRequest(), + data_table=gcc_data_table.DataTable(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +def test_delete_data_table_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DataTableServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.delete_data_table in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.delete_data_table] = ( + mock_rpc + ) + + request = {} + client.delete_data_table(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.delete_data_table(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_delete_data_table_rest_required_fields( + request_type=data_table.DeleteDataTableRequest, +): + transport_class = transports.DataTableServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_data_table._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_data_table._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("force",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = DataTableServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = None + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.delete_data_table(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_delete_data_table_rest_unset_required_fields(): + transport = transports.DataTableServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.delete_data_table._get_unset_required_fields({}) + assert set(unset_fields) == (set(("force",)) & set(("name",))) + + +def test_delete_data_table_rest_flattened(): + client = DataTableServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/instances/sample3/dataTables/sample4" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + force=True, + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.delete_data_table(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/instances/*/dataTables/*}" + % client.transport._host, + args[1], + ) + + +def test_delete_data_table_rest_flattened_error(transport: str = "rest"): + client = DataTableServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_data_table( + data_table.DeleteDataTableRequest(), + name="name_value", + force=True, + ) + + +def test_create_data_table_row_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DataTableServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.create_data_table_row + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.create_data_table_row] = ( + mock_rpc + ) + + request = {} + client.create_data_table_row(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.create_data_table_row(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_create_data_table_row_rest_required_fields( + request_type=data_table.CreateDataTableRowRequest, +): + transport_class = transports.DataTableServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_data_table_row._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_data_table_row._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = DataTableServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = data_table.DataTableRow() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = data_table.DataTableRow.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.create_data_table_row(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_create_data_table_row_rest_unset_required_fields(): + transport = transports.DataTableServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.create_data_table_row._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "parent", + "dataTableRow", + ) + ) + ) + + +def test_create_data_table_row_rest_flattened(): + client = DataTableServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = data_table.DataTableRow() + + # get arguments that satisfy an http rule for this method + sample_request = { + "parent": "projects/sample1/locations/sample2/instances/sample3/dataTables/sample4" + } + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + data_table_row=data_table.DataTableRow(name="name_value"), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = data_table.DataTableRow.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.create_data_table_row(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*/instances/*/dataTables/*}/dataTableRows" + % client.transport._host, + args[1], + ) + + +def test_create_data_table_row_rest_flattened_error(transport: str = "rest"): + client = DataTableServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_data_table_row( + data_table.CreateDataTableRowRequest(), + parent="parent_value", + data_table_row=data_table.DataTableRow(name="name_value"), + ) + + +def test_update_data_table_row_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DataTableServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.update_data_table_row + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.update_data_table_row] = ( + mock_rpc + ) + + request = {} + client.update_data_table_row(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.update_data_table_row(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_update_data_table_row_rest_required_fields( + request_type=data_table.UpdateDataTableRowRequest, +): + transport_class = transports.DataTableServiceRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_data_table_row._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_data_table_row._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("update_mask",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = DataTableServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = data_table.DataTableRow() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "patch", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = data_table.DataTableRow.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.update_data_table_row(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_update_data_table_row_rest_unset_required_fields(): + transport = transports.DataTableServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.update_data_table_row._get_unset_required_fields({}) + assert set(unset_fields) == (set(("updateMask",)) & set(("dataTableRow",))) + + +def test_update_data_table_row_rest_flattened(): + client = DataTableServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = data_table.DataTableRow() + + # get arguments that satisfy an http rule for this method + sample_request = { + "data_table_row": { + "name": "projects/sample1/locations/sample2/instances/sample3/dataTables/sample4/dataTableRows/sample5" + } + } + + # get truthy value for each flattened field + mock_args = dict( + data_table_row=data_table.DataTableRow(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = data_table.DataTableRow.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.update_data_table_row(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{data_table_row.name=projects/*/locations/*/instances/*/dataTables/*/dataTableRows/*}" + % client.transport._host, + args[1], + ) + + +def test_update_data_table_row_rest_flattened_error(transport: str = "rest"): + client = DataTableServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_data_table_row( + data_table.UpdateDataTableRowRequest(), + data_table_row=data_table.DataTableRow(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +def test_list_data_table_rows_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DataTableServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.list_data_table_rows in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.list_data_table_rows] = ( + mock_rpc + ) + + request = {} + client.list_data_table_rows(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_data_table_rows(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_list_data_table_rows_rest_required_fields( + request_type=data_table.ListDataTableRowsRequest, +): + transport_class = transports.DataTableServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_data_table_rows._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_data_table_rows._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "filter", + "order_by", + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = DataTableServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = data_table.ListDataTableRowsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = data_table.ListDataTableRowsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.list_data_table_rows(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_data_table_rows_rest_unset_required_fields(): + transport = transports.DataTableServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_data_table_rows._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "filter", + "orderBy", + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) + + +def test_list_data_table_rows_rest_flattened(): + client = DataTableServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = data_table.ListDataTableRowsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = { + "parent": "projects/sample1/locations/sample2/instances/sample3/dataTables/sample4" + } + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = data_table.ListDataTableRowsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.list_data_table_rows(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*/instances/*/dataTables/*}/dataTableRows" + % client.transport._host, + args[1], + ) + + +def test_list_data_table_rows_rest_flattened_error(transport: str = "rest"): + client = DataTableServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_data_table_rows( + data_table.ListDataTableRowsRequest(), + parent="parent_value", + ) + + +def test_list_data_table_rows_rest_pager(transport: str = "rest"): + client = DataTableServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + data_table.ListDataTableRowsResponse( + data_table_rows=[ + data_table.DataTableRow(), + data_table.DataTableRow(), + data_table.DataTableRow(), + ], + next_page_token="abc", + ), + data_table.ListDataTableRowsResponse( + data_table_rows=[], + next_page_token="def", + ), + data_table.ListDataTableRowsResponse( + data_table_rows=[ + data_table.DataTableRow(), + ], + next_page_token="ghi", + ), + data_table.ListDataTableRowsResponse( + data_table_rows=[ + data_table.DataTableRow(), + data_table.DataTableRow(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + data_table.ListDataTableRowsResponse.to_json(x) for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = { + "parent": "projects/sample1/locations/sample2/instances/sample3/dataTables/sample4" + } + + pager = client.list_data_table_rows(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, data_table.DataTableRow) for i in results) + + pages = list(client.list_data_table_rows(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +def test_get_data_table_row_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DataTableServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.get_data_table_row in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get_data_table_row] = ( + mock_rpc + ) + + request = {} + client.get_data_table_row(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_data_table_row(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_get_data_table_row_rest_required_fields( + request_type=data_table.GetDataTableRowRequest, +): + transport_class = transports.DataTableServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_data_table_row._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_data_table_row._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = DataTableServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = data_table.DataTableRow() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = data_table.DataTableRow.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.get_data_table_row(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_data_table_row_rest_unset_required_fields(): + transport = transports.DataTableServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_data_table_row._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +def test_get_data_table_row_rest_flattened(): + client = DataTableServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = data_table.DataTableRow() + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/instances/sample3/dataTables/sample4/dataTableRows/sample5" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = data_table.DataTableRow.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.get_data_table_row(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/instances/*/dataTables/*/dataTableRows/*}" + % client.transport._host, + args[1], + ) + + +def test_get_data_table_row_rest_flattened_error(transport: str = "rest"): + client = DataTableServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_data_table_row( + data_table.GetDataTableRowRequest(), + name="name_value", + ) + + +def test_delete_data_table_row_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DataTableServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.delete_data_table_row + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.delete_data_table_row] = ( + mock_rpc + ) + + request = {} + client.delete_data_table_row(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.delete_data_table_row(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_delete_data_table_row_rest_required_fields( + request_type=data_table.DeleteDataTableRowRequest, +): + transport_class = transports.DataTableServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_data_table_row._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_data_table_row._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = DataTableServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = None + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.delete_data_table_row(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_delete_data_table_row_rest_unset_required_fields(): + transport = transports.DataTableServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.delete_data_table_row._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +def test_delete_data_table_row_rest_flattened(): + client = DataTableServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/instances/sample3/dataTables/sample4/dataTableRows/sample5" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = "" + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.delete_data_table_row(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/instances/*/dataTables/*/dataTableRows/*}" + % client.transport._host, + args[1], + ) + + +def test_delete_data_table_row_rest_flattened_error(transport: str = "rest"): + client = DataTableServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_data_table_row( + data_table.DeleteDataTableRowRequest(), + name="name_value", + ) + + +def test_bulk_create_data_table_rows_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DataTableServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.bulk_create_data_table_rows + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.bulk_create_data_table_rows + ] = mock_rpc + + request = {} + client.bulk_create_data_table_rows(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.bulk_create_data_table_rows(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_bulk_create_data_table_rows_rest_required_fields( + request_type=data_table.BulkCreateDataTableRowsRequest, +): + transport_class = transports.DataTableServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).bulk_create_data_table_rows._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).bulk_create_data_table_rows._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = DataTableServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = data_table.BulkCreateDataTableRowsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = data_table.BulkCreateDataTableRowsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.bulk_create_data_table_rows(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_bulk_create_data_table_rows_rest_unset_required_fields(): + transport = transports.DataTableServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.bulk_create_data_table_rows._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "parent", + "requests", + ) + ) + ) + + +def test_bulk_create_data_table_rows_rest_flattened(): + client = DataTableServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = data_table.BulkCreateDataTableRowsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = { + "parent": "projects/sample1/locations/sample2/instances/sample3/dataTables/sample4" + } + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + requests=[data_table.CreateDataTableRowRequest(parent="parent_value")], + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = data_table.BulkCreateDataTableRowsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.bulk_create_data_table_rows(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*/instances/*/dataTables/*}/dataTableRows:bulkCreate" + % client.transport._host, + args[1], + ) + + +def test_bulk_create_data_table_rows_rest_flattened_error(transport: str = "rest"): + client = DataTableServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.bulk_create_data_table_rows( + data_table.BulkCreateDataTableRowsRequest(), + parent="parent_value", + requests=[data_table.CreateDataTableRowRequest(parent="parent_value")], + ) + + +def test_bulk_get_data_table_rows_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DataTableServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.bulk_get_data_table_rows + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.bulk_get_data_table_rows + ] = mock_rpc + + request = {} + client.bulk_get_data_table_rows(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.bulk_get_data_table_rows(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_bulk_get_data_table_rows_rest_required_fields( + request_type=data_table.BulkGetDataTableRowsRequest, +): + transport_class = transports.DataTableServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).bulk_get_data_table_rows._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).bulk_get_data_table_rows._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = DataTableServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = data_table.BulkGetDataTableRowsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = data_table.BulkGetDataTableRowsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.bulk_get_data_table_rows(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_bulk_get_data_table_rows_rest_unset_required_fields(): + transport = transports.DataTableServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.bulk_get_data_table_rows._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "parent", + "requests", + ) + ) + ) + + +def test_bulk_get_data_table_rows_rest_flattened(): + client = DataTableServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = data_table.BulkGetDataTableRowsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = { + "parent": "projects/sample1/locations/sample2/instances/sample3/dataTables/sample4" + } + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + requests=[data_table.GetDataTableRowRequest(name="name_value")], + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = data_table.BulkGetDataTableRowsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.bulk_get_data_table_rows(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*/instances/*/dataTables/*}/dataTableRows:bulkGet" + % client.transport._host, + args[1], + ) + + +def test_bulk_get_data_table_rows_rest_flattened_error(transport: str = "rest"): + client = DataTableServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.bulk_get_data_table_rows( + data_table.BulkGetDataTableRowsRequest(), + parent="parent_value", + requests=[data_table.GetDataTableRowRequest(name="name_value")], + ) + + +def test_bulk_replace_data_table_rows_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DataTableServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.bulk_replace_data_table_rows + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.bulk_replace_data_table_rows + ] = mock_rpc + + request = {} + client.bulk_replace_data_table_rows(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.bulk_replace_data_table_rows(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_bulk_replace_data_table_rows_rest_required_fields( + request_type=data_table.BulkReplaceDataTableRowsRequest, +): + transport_class = transports.DataTableServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).bulk_replace_data_table_rows._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).bulk_replace_data_table_rows._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = DataTableServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = data_table.BulkReplaceDataTableRowsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = data_table.BulkReplaceDataTableRowsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.bulk_replace_data_table_rows(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_bulk_replace_data_table_rows_rest_unset_required_fields(): + transport = transports.DataTableServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.bulk_replace_data_table_rows._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "parent", + "requests", + ) + ) + ) + + +def test_bulk_replace_data_table_rows_rest_flattened(): + client = DataTableServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = data_table.BulkReplaceDataTableRowsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = { + "parent": "projects/sample1/locations/sample2/instances/sample3/dataTables/sample4" + } + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + requests=[data_table.CreateDataTableRowRequest(parent="parent_value")], + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = data_table.BulkReplaceDataTableRowsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.bulk_replace_data_table_rows(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*/instances/*/dataTables/*}/dataTableRows:bulkReplace" + % client.transport._host, + args[1], + ) + + +def test_bulk_replace_data_table_rows_rest_flattened_error(transport: str = "rest"): + client = DataTableServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.bulk_replace_data_table_rows( + data_table.BulkReplaceDataTableRowsRequest(), + parent="parent_value", + requests=[data_table.CreateDataTableRowRequest(parent="parent_value")], + ) + + +def test_bulk_update_data_table_rows_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DataTableServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.bulk_update_data_table_rows + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.bulk_update_data_table_rows + ] = mock_rpc + + request = {} + client.bulk_update_data_table_rows(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.bulk_update_data_table_rows(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_bulk_update_data_table_rows_rest_required_fields( + request_type=data_table.BulkUpdateDataTableRowsRequest, +): + transport_class = transports.DataTableServiceRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).bulk_update_data_table_rows._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).bulk_update_data_table_rows._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = DataTableServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = data_table.BulkUpdateDataTableRowsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = data_table.BulkUpdateDataTableRowsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.bulk_update_data_table_rows(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_bulk_update_data_table_rows_rest_unset_required_fields(): + transport = transports.DataTableServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.bulk_update_data_table_rows._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "parent", + "requests", + ) + ) + ) + + +def test_bulk_update_data_table_rows_rest_flattened(): + client = DataTableServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = data_table.BulkUpdateDataTableRowsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = { + "parent": "projects/sample1/locations/sample2/instances/sample3/dataTables/sample4" + } + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + requests=[ + data_table.UpdateDataTableRowRequest( + data_table_row=data_table.DataTableRow(name="name_value") + ) + ], + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = data_table.BulkUpdateDataTableRowsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.bulk_update_data_table_rows(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*/instances/*/dataTables/*}/dataTableRows:bulkUpdate" + % client.transport._host, + args[1], + ) + + +def test_bulk_update_data_table_rows_rest_flattened_error(transport: str = "rest"): + client = DataTableServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.bulk_update_data_table_rows( + data_table.BulkUpdateDataTableRowsRequest(), + parent="parent_value", + requests=[ + data_table.UpdateDataTableRowRequest( + data_table_row=data_table.DataTableRow(name="name_value") + ) + ], + ) + + +def test_get_data_table_operation_errors_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = DataTableServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.get_data_table_operation_errors + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.get_data_table_operation_errors + ] = mock_rpc + + request = {} + client.get_data_table_operation_errors(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_data_table_operation_errors(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_get_data_table_operation_errors_rest_required_fields( + request_type=data_table.GetDataTableOperationErrorsRequest, +): + transport_class = transports.DataTableServiceRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_data_table_operation_errors._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_data_table_operation_errors._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = DataTableServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = data_table.DataTableOperationErrors() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = data_table.DataTableOperationErrors.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.get_data_table_operation_errors(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_data_table_operation_errors_rest_unset_required_fields(): + transport = transports.DataTableServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_data_table_operation_errors._get_unset_required_fields( + {} + ) + assert set(unset_fields) == (set(()) & set(("name",))) + + +def test_get_data_table_operation_errors_rest_flattened(): + client = DataTableServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = data_table.DataTableOperationErrors() + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/instances/sample3/dataTableOperationErrors/sample4" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = data_table.DataTableOperationErrors.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.get_data_table_operation_errors(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/instances/*/dataTableOperationErrors/*}" + % client.transport._host, + args[1], + ) + + +def test_get_data_table_operation_errors_rest_flattened_error(transport: str = "rest"): + client = DataTableServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_data_table_operation_errors( + data_table.GetDataTableOperationErrorsRequest(), + name="name_value", + ) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.DataTableServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = DataTableServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.DataTableServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = DataTableServiceClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.DataTableServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = DataTableServiceClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = DataTableServiceClient( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.DataTableServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = DataTableServiceClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.DataTableServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = DataTableServiceClient(transport=transport) + assert client.transport is transport + + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.DataTableServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.DataTableServiceGrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.DataTableServiceGrpcTransport, + transports.DataTableServiceGrpcAsyncIOTransport, + transports.DataTableServiceRestTransport, + ], +) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + + +def test_transport_kind_grpc(): + transport = DataTableServiceClient.get_transport_class("grpc")( + credentials=ga_credentials.AnonymousCredentials() + ) + assert transport.kind == "grpc" + + +def test_initialize_client_w_grpc(): + client = DataTableServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="grpc" + ) + assert client is not None + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_data_table_empty_call_grpc(): + client = DataTableServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_data_table), "__call__" + ) as call: + call.return_value = gcc_data_table.DataTable() + client.create_data_table(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = gcc_data_table.CreateDataTableRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_data_tables_empty_call_grpc(): + client = DataTableServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.list_data_tables), "__call__") as call: + call.return_value = data_table.ListDataTablesResponse() + client.list_data_tables(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = data_table.ListDataTablesRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_data_table_empty_call_grpc(): + client = DataTableServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.get_data_table), "__call__") as call: + call.return_value = data_table.DataTable() + client.get_data_table(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = data_table.GetDataTableRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_data_table_empty_call_grpc(): + client = DataTableServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_data_table), "__call__" + ) as call: + call.return_value = gcc_data_table.DataTable() + client.update_data_table(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = gcc_data_table.UpdateDataTableRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_data_table_empty_call_grpc(): + client = DataTableServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_data_table), "__call__" + ) as call: + call.return_value = None + client.delete_data_table(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = data_table.DeleteDataTableRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_data_table_row_empty_call_grpc(): + client = DataTableServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_data_table_row), "__call__" + ) as call: + call.return_value = data_table.DataTableRow() + client.create_data_table_row(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = data_table.CreateDataTableRowRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_data_table_row_empty_call_grpc(): + client = DataTableServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_data_table_row), "__call__" + ) as call: + call.return_value = data_table.DataTableRow() + client.update_data_table_row(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = data_table.UpdateDataTableRowRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_data_table_rows_empty_call_grpc(): + client = DataTableServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_data_table_rows), "__call__" + ) as call: + call.return_value = data_table.ListDataTableRowsResponse() + client.list_data_table_rows(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = data_table.ListDataTableRowsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_data_table_row_empty_call_grpc(): + client = DataTableServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_data_table_row), "__call__" + ) as call: + call.return_value = data_table.DataTableRow() + client.get_data_table_row(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = data_table.GetDataTableRowRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_data_table_row_empty_call_grpc(): + client = DataTableServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_data_table_row), "__call__" + ) as call: + call.return_value = None + client.delete_data_table_row(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = data_table.DeleteDataTableRowRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_bulk_create_data_table_rows_empty_call_grpc(): + client = DataTableServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.bulk_create_data_table_rows), "__call__" + ) as call: + call.return_value = data_table.BulkCreateDataTableRowsResponse() + client.bulk_create_data_table_rows(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = data_table.BulkCreateDataTableRowsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_bulk_get_data_table_rows_empty_call_grpc(): + client = DataTableServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.bulk_get_data_table_rows), "__call__" + ) as call: + call.return_value = data_table.BulkGetDataTableRowsResponse() + client.bulk_get_data_table_rows(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = data_table.BulkGetDataTableRowsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_bulk_replace_data_table_rows_empty_call_grpc(): + client = DataTableServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.bulk_replace_data_table_rows), "__call__" + ) as call: + call.return_value = data_table.BulkReplaceDataTableRowsResponse() + client.bulk_replace_data_table_rows(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = data_table.BulkReplaceDataTableRowsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_bulk_update_data_table_rows_empty_call_grpc(): + client = DataTableServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.bulk_update_data_table_rows), "__call__" + ) as call: + call.return_value = data_table.BulkUpdateDataTableRowsResponse() + client.bulk_update_data_table_rows(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = data_table.BulkUpdateDataTableRowsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_data_table_operation_errors_empty_call_grpc(): + client = DataTableServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_data_table_operation_errors), "__call__" + ) as call: + call.return_value = data_table.DataTableOperationErrors() + client.get_data_table_operation_errors(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = data_table.GetDataTableOperationErrorsRequest() + + assert args[0] == request_msg + + +def test_transport_kind_grpc_asyncio(): + transport = DataTableServiceAsyncClient.get_transport_class("grpc_asyncio")( + credentials=async_anonymous_credentials() + ) + assert transport.kind == "grpc_asyncio" + + +def test_initialize_client_w_grpc_asyncio(): + client = DataTableServiceAsyncClient( + credentials=async_anonymous_credentials(), transport="grpc_asyncio" + ) + assert client is not None + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_create_data_table_empty_call_grpc_asyncio(): + client = DataTableServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_data_table), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + gcc_data_table.DataTable( + name="name_value", + display_name="display_name_value", + description="description_value", + data_table_uuid="data_table_uuid_value", + rules=["rules_value"], + rule_associations_count=2479, + row_time_to_live="row_time_to_live_value", + approximate_row_count=2281, + update_source=gcc_data_table.DataTableUpdateSource.USER, + ) + ) + await client.create_data_table(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = gcc_data_table.CreateDataTableRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_list_data_tables_empty_call_grpc_asyncio(): + client = DataTableServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.list_data_tables), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + data_table.ListDataTablesResponse( + next_page_token="next_page_token_value", + ) + ) + await client.list_data_tables(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = data_table.ListDataTablesRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_get_data_table_empty_call_grpc_asyncio(): + client = DataTableServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.get_data_table), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + data_table.DataTable( + name="name_value", + display_name="display_name_value", + description="description_value", + data_table_uuid="data_table_uuid_value", + rules=["rules_value"], + rule_associations_count=2479, + row_time_to_live="row_time_to_live_value", + approximate_row_count=2281, + update_source=data_table.DataTableUpdateSource.USER, + ) + ) + await client.get_data_table(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = data_table.GetDataTableRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_update_data_table_empty_call_grpc_asyncio(): + client = DataTableServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_data_table), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + gcc_data_table.DataTable( + name="name_value", + display_name="display_name_value", + description="description_value", + data_table_uuid="data_table_uuid_value", + rules=["rules_value"], + rule_associations_count=2479, + row_time_to_live="row_time_to_live_value", + approximate_row_count=2281, + update_source=gcc_data_table.DataTableUpdateSource.USER, + ) + ) + await client.update_data_table(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = gcc_data_table.UpdateDataTableRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_delete_data_table_empty_call_grpc_asyncio(): + client = DataTableServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_data_table), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_data_table(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = data_table.DeleteDataTableRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_create_data_table_row_empty_call_grpc_asyncio(): + client = DataTableServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_data_table_row), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + data_table.DataTableRow( + name="name_value", + values=["values_value"], + row_time_to_live="row_time_to_live_value", + ) + ) + await client.create_data_table_row(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = data_table.CreateDataTableRowRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_update_data_table_row_empty_call_grpc_asyncio(): + client = DataTableServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_data_table_row), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + data_table.DataTableRow( + name="name_value", + values=["values_value"], + row_time_to_live="row_time_to_live_value", + ) + ) + await client.update_data_table_row(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = data_table.UpdateDataTableRowRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_list_data_table_rows_empty_call_grpc_asyncio(): + client = DataTableServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_data_table_rows), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + data_table.ListDataTableRowsResponse( + next_page_token="next_page_token_value", + ) + ) + await client.list_data_table_rows(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = data_table.ListDataTableRowsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_get_data_table_row_empty_call_grpc_asyncio(): + client = DataTableServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_data_table_row), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + data_table.DataTableRow( + name="name_value", + values=["values_value"], + row_time_to_live="row_time_to_live_value", + ) + ) + await client.get_data_table_row(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = data_table.GetDataTableRowRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_delete_data_table_row_empty_call_grpc_asyncio(): + client = DataTableServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_data_table_row), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_data_table_row(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = data_table.DeleteDataTableRowRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_bulk_create_data_table_rows_empty_call_grpc_asyncio(): + client = DataTableServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.bulk_create_data_table_rows), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + data_table.BulkCreateDataTableRowsResponse() + ) + await client.bulk_create_data_table_rows(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = data_table.BulkCreateDataTableRowsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_bulk_get_data_table_rows_empty_call_grpc_asyncio(): + client = DataTableServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.bulk_get_data_table_rows), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + data_table.BulkGetDataTableRowsResponse() + ) + await client.bulk_get_data_table_rows(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = data_table.BulkGetDataTableRowsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_bulk_replace_data_table_rows_empty_call_grpc_asyncio(): + client = DataTableServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.bulk_replace_data_table_rows), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + data_table.BulkReplaceDataTableRowsResponse() + ) + await client.bulk_replace_data_table_rows(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = data_table.BulkReplaceDataTableRowsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_bulk_update_data_table_rows_empty_call_grpc_asyncio(): + client = DataTableServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.bulk_update_data_table_rows), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + data_table.BulkUpdateDataTableRowsResponse() + ) + await client.bulk_update_data_table_rows(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = data_table.BulkUpdateDataTableRowsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_get_data_table_operation_errors_empty_call_grpc_asyncio(): + client = DataTableServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_data_table_operation_errors), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + data_table.DataTableOperationErrors( + name="name_value", + ) + ) + await client.get_data_table_operation_errors(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = data_table.GetDataTableOperationErrorsRequest() + + assert args[0] == request_msg + + +def test_transport_kind_rest(): + transport = DataTableServiceClient.get_transport_class("rest")( + credentials=ga_credentials.AnonymousCredentials() + ) + assert transport.kind == "rest" + + +def test_create_data_table_rest_bad_request( + request_type=gcc_data_table.CreateDataTableRequest, +): + client = DataTableServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2/instances/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with ( + mock.patch.object(Session, "request") as req, + pytest.raises(core_exceptions.BadRequest), + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.create_data_table(request) + + +@pytest.mark.parametrize( + "request_type", + [ + gcc_data_table.CreateDataTableRequest, + dict, + ], +) +def test_create_data_table_rest_call_success(request_type): + client = DataTableServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2/instances/sample3"} + request_init["data_table"] = { + "name": "name_value", + "display_name": "display_name_value", + "description": "description_value", + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "column_info": [ + { + "mapped_column_path": "mapped_column_path_value", + "column_type": 1, + "column_index": 1285, + "original_column": "original_column_value", + "key_column": True, + "repeated_values": True, + } + ], + "data_table_uuid": "data_table_uuid_value", + "rules": ["rules_value1", "rules_value2"], + "rule_associations_count": 2479, + "row_time_to_live": "row_time_to_live_value", + "approximate_row_count": 2281, + "scope_info": { + "data_access_scopes": [ + "data_access_scopes_value1", + "data_access_scopes_value2", + ] + }, + "update_source": 1, + "row_time_to_live_update_time": {}, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = gcc_data_table.CreateDataTableRequest.meta.fields["data_table"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["data_table"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["data_table"][field])): + del request_init["data_table"][field][i][subfield] + else: + del request_init["data_table"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = gcc_data_table.DataTable( + name="name_value", + display_name="display_name_value", + description="description_value", + data_table_uuid="data_table_uuid_value", + rules=["rules_value"], + rule_associations_count=2479, + row_time_to_live="row_time_to_live_value", + approximate_row_count=2281, + update_source=gcc_data_table.DataTableUpdateSource.USER, + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = gcc_data_table.DataTable.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.create_data_table(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, gcc_data_table.DataTable) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.description == "description_value" + assert response.data_table_uuid == "data_table_uuid_value" + assert response.rules == ["rules_value"] + assert response.rule_associations_count == 2479 + assert response.row_time_to_live == "row_time_to_live_value" + assert response.approximate_row_count == 2281 + assert response.update_source == gcc_data_table.DataTableUpdateSource.USER + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_data_table_rest_interceptors(null_interceptor): + transport = transports.DataTableServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.DataTableServiceRestInterceptor(), + ) + client = DataTableServiceClient(transport=transport) + + with ( + mock.patch.object(type(client.transport._session), "request") as req, + mock.patch.object(path_template, "transcode") as transcode, + mock.patch.object( + transports.DataTableServiceRestInterceptor, "post_create_data_table" + ) as post, + mock.patch.object( + transports.DataTableServiceRestInterceptor, + "post_create_data_table_with_metadata", + ) as post_with_metadata, + mock.patch.object( + transports.DataTableServiceRestInterceptor, "pre_create_data_table" + ) as pre, + ): + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = gcc_data_table.CreateDataTableRequest.pb( + gcc_data_table.CreateDataTableRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = gcc_data_table.DataTable.to_json(gcc_data_table.DataTable()) + req.return_value.content = return_value + + request = gcc_data_table.CreateDataTableRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = gcc_data_table.DataTable() + post_with_metadata.return_value = gcc_data_table.DataTable(), metadata + + client.create_data_table( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_list_data_tables_rest_bad_request( + request_type=data_table.ListDataTablesRequest, +): + client = DataTableServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2/instances/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with ( + mock.patch.object(Session, "request") as req, + pytest.raises(core_exceptions.BadRequest), + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.list_data_tables(request) + + +@pytest.mark.parametrize( + "request_type", + [ + data_table.ListDataTablesRequest, + dict, + ], +) +def test_list_data_tables_rest_call_success(request_type): + client = DataTableServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2/instances/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = data_table.ListDataTablesResponse( + next_page_token="next_page_token_value", + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = data_table.ListDataTablesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.list_data_tables(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListDataTablesPager) + assert response.next_page_token == "next_page_token_value" + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_data_tables_rest_interceptors(null_interceptor): + transport = transports.DataTableServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.DataTableServiceRestInterceptor(), + ) + client = DataTableServiceClient(transport=transport) + + with ( + mock.patch.object(type(client.transport._session), "request") as req, + mock.patch.object(path_template, "transcode") as transcode, + mock.patch.object( + transports.DataTableServiceRestInterceptor, "post_list_data_tables" + ) as post, + mock.patch.object( + transports.DataTableServiceRestInterceptor, + "post_list_data_tables_with_metadata", + ) as post_with_metadata, + mock.patch.object( + transports.DataTableServiceRestInterceptor, "pre_list_data_tables" + ) as pre, + ): + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = data_table.ListDataTablesRequest.pb( + data_table.ListDataTablesRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = data_table.ListDataTablesResponse.to_json( + data_table.ListDataTablesResponse() + ) + req.return_value.content = return_value + + request = data_table.ListDataTablesRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = data_table.ListDataTablesResponse() + post_with_metadata.return_value = data_table.ListDataTablesResponse(), metadata + + client.list_data_tables( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_get_data_table_rest_bad_request(request_type=data_table.GetDataTableRequest): + client = DataTableServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/instances/sample3/dataTables/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with ( + mock.patch.object(Session, "request") as req, + pytest.raises(core_exceptions.BadRequest), + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.get_data_table(request) + + +@pytest.mark.parametrize( + "request_type", + [ + data_table.GetDataTableRequest, + dict, + ], +) +def test_get_data_table_rest_call_success(request_type): + client = DataTableServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/instances/sample3/dataTables/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = data_table.DataTable( + name="name_value", + display_name="display_name_value", + description="description_value", + data_table_uuid="data_table_uuid_value", + rules=["rules_value"], + rule_associations_count=2479, + row_time_to_live="row_time_to_live_value", + approximate_row_count=2281, + update_source=data_table.DataTableUpdateSource.USER, + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = data_table.DataTable.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.get_data_table(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, data_table.DataTable) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.description == "description_value" + assert response.data_table_uuid == "data_table_uuid_value" + assert response.rules == ["rules_value"] + assert response.rule_associations_count == 2479 + assert response.row_time_to_live == "row_time_to_live_value" + assert response.approximate_row_count == 2281 + assert response.update_source == data_table.DataTableUpdateSource.USER + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_data_table_rest_interceptors(null_interceptor): + transport = transports.DataTableServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.DataTableServiceRestInterceptor(), + ) + client = DataTableServiceClient(transport=transport) + + with ( + mock.patch.object(type(client.transport._session), "request") as req, + mock.patch.object(path_template, "transcode") as transcode, + mock.patch.object( + transports.DataTableServiceRestInterceptor, "post_get_data_table" + ) as post, + mock.patch.object( + transports.DataTableServiceRestInterceptor, + "post_get_data_table_with_metadata", + ) as post_with_metadata, + mock.patch.object( + transports.DataTableServiceRestInterceptor, "pre_get_data_table" + ) as pre, + ): + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = data_table.GetDataTableRequest.pb(data_table.GetDataTableRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = data_table.DataTable.to_json(data_table.DataTable()) + req.return_value.content = return_value + + request = data_table.GetDataTableRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = data_table.DataTable() + post_with_metadata.return_value = data_table.DataTable(), metadata + + client.get_data_table( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_update_data_table_rest_bad_request( + request_type=gcc_data_table.UpdateDataTableRequest, +): + client = DataTableServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = { + "data_table": { + "name": "projects/sample1/locations/sample2/instances/sample3/dataTables/sample4" + } + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with ( + mock.patch.object(Session, "request") as req, + pytest.raises(core_exceptions.BadRequest), + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.update_data_table(request) + + +@pytest.mark.parametrize( + "request_type", + [ + gcc_data_table.UpdateDataTableRequest, + dict, + ], +) +def test_update_data_table_rest_call_success(request_type): + client = DataTableServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = { + "data_table": { + "name": "projects/sample1/locations/sample2/instances/sample3/dataTables/sample4" + } + } + request_init["data_table"] = { + "name": "projects/sample1/locations/sample2/instances/sample3/dataTables/sample4", + "display_name": "display_name_value", + "description": "description_value", + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "column_info": [ + { + "mapped_column_path": "mapped_column_path_value", + "column_type": 1, + "column_index": 1285, + "original_column": "original_column_value", + "key_column": True, + "repeated_values": True, + } + ], + "data_table_uuid": "data_table_uuid_value", + "rules": ["rules_value1", "rules_value2"], + "rule_associations_count": 2479, + "row_time_to_live": "row_time_to_live_value", + "approximate_row_count": 2281, + "scope_info": { + "data_access_scopes": [ + "data_access_scopes_value1", + "data_access_scopes_value2", + ] + }, + "update_source": 1, + "row_time_to_live_update_time": {}, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = gcc_data_table.UpdateDataTableRequest.meta.fields["data_table"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["data_table"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["data_table"][field])): + del request_init["data_table"][field][i][subfield] + else: + del request_init["data_table"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = gcc_data_table.DataTable( + name="name_value", + display_name="display_name_value", + description="description_value", + data_table_uuid="data_table_uuid_value", + rules=["rules_value"], + rule_associations_count=2479, + row_time_to_live="row_time_to_live_value", + approximate_row_count=2281, + update_source=gcc_data_table.DataTableUpdateSource.USER, + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = gcc_data_table.DataTable.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.update_data_table(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, gcc_data_table.DataTable) + assert response.name == "name_value" + assert response.display_name == "display_name_value" + assert response.description == "description_value" + assert response.data_table_uuid == "data_table_uuid_value" + assert response.rules == ["rules_value"] + assert response.rule_associations_count == 2479 + assert response.row_time_to_live == "row_time_to_live_value" + assert response.approximate_row_count == 2281 + assert response.update_source == gcc_data_table.DataTableUpdateSource.USER + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_data_table_rest_interceptors(null_interceptor): + transport = transports.DataTableServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.DataTableServiceRestInterceptor(), + ) + client = DataTableServiceClient(transport=transport) + + with ( + mock.patch.object(type(client.transport._session), "request") as req, + mock.patch.object(path_template, "transcode") as transcode, + mock.patch.object( + transports.DataTableServiceRestInterceptor, "post_update_data_table" + ) as post, + mock.patch.object( + transports.DataTableServiceRestInterceptor, + "post_update_data_table_with_metadata", + ) as post_with_metadata, + mock.patch.object( + transports.DataTableServiceRestInterceptor, "pre_update_data_table" + ) as pre, + ): + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = gcc_data_table.UpdateDataTableRequest.pb( + gcc_data_table.UpdateDataTableRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = gcc_data_table.DataTable.to_json(gcc_data_table.DataTable()) + req.return_value.content = return_value + + request = gcc_data_table.UpdateDataTableRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = gcc_data_table.DataTable() + post_with_metadata.return_value = gcc_data_table.DataTable(), metadata + + client.update_data_table( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_delete_data_table_rest_bad_request( + request_type=data_table.DeleteDataTableRequest, +): + client = DataTableServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/instances/sample3/dataTables/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with ( + mock.patch.object(Session, "request") as req, + pytest.raises(core_exceptions.BadRequest), + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.delete_data_table(request) + + +@pytest.mark.parametrize( + "request_type", + [ + data_table.DeleteDataTableRequest, + dict, + ], +) +def test_delete_data_table_rest_call_success(request_type): + client = DataTableServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/instances/sample3/dataTables/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = "" + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.delete_data_table(request) + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_data_table_rest_interceptors(null_interceptor): + transport = transports.DataTableServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.DataTableServiceRestInterceptor(), + ) + client = DataTableServiceClient(transport=transport) + + with ( + mock.patch.object(type(client.transport._session), "request") as req, + mock.patch.object(path_template, "transcode") as transcode, + mock.patch.object( + transports.DataTableServiceRestInterceptor, "pre_delete_data_table" + ) as pre, + ): + pre.assert_not_called() + pb_message = data_table.DeleteDataTableRequest.pb( + data_table.DeleteDataTableRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + request = data_table.DeleteDataTableRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + + client.delete_data_table( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + + +def test_create_data_table_row_rest_bad_request( + request_type=data_table.CreateDataTableRowRequest, +): + client = DataTableServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = { + "parent": "projects/sample1/locations/sample2/instances/sample3/dataTables/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with ( + mock.patch.object(Session, "request") as req, + pytest.raises(core_exceptions.BadRequest), + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.create_data_table_row(request) + + +@pytest.mark.parametrize( + "request_type", + [ + data_table.CreateDataTableRowRequest, + dict, + ], +) +def test_create_data_table_row_rest_call_success(request_type): + client = DataTableServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = { + "parent": "projects/sample1/locations/sample2/instances/sample3/dataTables/sample4" + } + request_init["data_table_row"] = { + "name": "name_value", + "values": ["values_value1", "values_value2"], + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "row_time_to_live": "row_time_to_live_value", + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = data_table.CreateDataTableRowRequest.meta.fields["data_table_row"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["data_table_row"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["data_table_row"][field])): + del request_init["data_table_row"][field][i][subfield] + else: + del request_init["data_table_row"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = data_table.DataTableRow( + name="name_value", + values=["values_value"], + row_time_to_live="row_time_to_live_value", + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = data_table.DataTableRow.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.create_data_table_row(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, data_table.DataTableRow) + assert response.name == "name_value" + assert response.values == ["values_value"] + assert response.row_time_to_live == "row_time_to_live_value" + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_data_table_row_rest_interceptors(null_interceptor): + transport = transports.DataTableServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.DataTableServiceRestInterceptor(), + ) + client = DataTableServiceClient(transport=transport) + + with ( + mock.patch.object(type(client.transport._session), "request") as req, + mock.patch.object(path_template, "transcode") as transcode, + mock.patch.object( + transports.DataTableServiceRestInterceptor, "post_create_data_table_row" + ) as post, + mock.patch.object( + transports.DataTableServiceRestInterceptor, + "post_create_data_table_row_with_metadata", + ) as post_with_metadata, + mock.patch.object( + transports.DataTableServiceRestInterceptor, "pre_create_data_table_row" + ) as pre, + ): + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = data_table.CreateDataTableRowRequest.pb( + data_table.CreateDataTableRowRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = data_table.DataTableRow.to_json(data_table.DataTableRow()) + req.return_value.content = return_value + + request = data_table.CreateDataTableRowRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = data_table.DataTableRow() + post_with_metadata.return_value = data_table.DataTableRow(), metadata + + client.create_data_table_row( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_update_data_table_row_rest_bad_request( + request_type=data_table.UpdateDataTableRowRequest, +): + client = DataTableServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = { + "data_table_row": { + "name": "projects/sample1/locations/sample2/instances/sample3/dataTables/sample4/dataTableRows/sample5" + } + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with ( + mock.patch.object(Session, "request") as req, + pytest.raises(core_exceptions.BadRequest), + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.update_data_table_row(request) + + +@pytest.mark.parametrize( + "request_type", + [ + data_table.UpdateDataTableRowRequest, + dict, + ], +) +def test_update_data_table_row_rest_call_success(request_type): + client = DataTableServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = { + "data_table_row": { + "name": "projects/sample1/locations/sample2/instances/sample3/dataTables/sample4/dataTableRows/sample5" + } + } + request_init["data_table_row"] = { + "name": "projects/sample1/locations/sample2/instances/sample3/dataTables/sample4/dataTableRows/sample5", + "values": ["values_value1", "values_value2"], + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "row_time_to_live": "row_time_to_live_value", + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = data_table.UpdateDataTableRowRequest.meta.fields["data_table_row"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["data_table_row"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["data_table_row"][field])): + del request_init["data_table_row"][field][i][subfield] + else: + del request_init["data_table_row"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = data_table.DataTableRow( + name="name_value", + values=["values_value"], + row_time_to_live="row_time_to_live_value", + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = data_table.DataTableRow.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.update_data_table_row(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, data_table.DataTableRow) + assert response.name == "name_value" + assert response.values == ["values_value"] + assert response.row_time_to_live == "row_time_to_live_value" + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_data_table_row_rest_interceptors(null_interceptor): + transport = transports.DataTableServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.DataTableServiceRestInterceptor(), + ) + client = DataTableServiceClient(transport=transport) + + with ( + mock.patch.object(type(client.transport._session), "request") as req, + mock.patch.object(path_template, "transcode") as transcode, + mock.patch.object( + transports.DataTableServiceRestInterceptor, "post_update_data_table_row" + ) as post, + mock.patch.object( + transports.DataTableServiceRestInterceptor, + "post_update_data_table_row_with_metadata", + ) as post_with_metadata, + mock.patch.object( + transports.DataTableServiceRestInterceptor, "pre_update_data_table_row" + ) as pre, + ): + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = data_table.UpdateDataTableRowRequest.pb( + data_table.UpdateDataTableRowRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = data_table.DataTableRow.to_json(data_table.DataTableRow()) + req.return_value.content = return_value + + request = data_table.UpdateDataTableRowRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = data_table.DataTableRow() + post_with_metadata.return_value = data_table.DataTableRow(), metadata + + client.update_data_table_row( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_list_data_table_rows_rest_bad_request( + request_type=data_table.ListDataTableRowsRequest, +): + client = DataTableServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = { + "parent": "projects/sample1/locations/sample2/instances/sample3/dataTables/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with ( + mock.patch.object(Session, "request") as req, + pytest.raises(core_exceptions.BadRequest), + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.list_data_table_rows(request) + + +@pytest.mark.parametrize( + "request_type", + [ + data_table.ListDataTableRowsRequest, + dict, + ], +) +def test_list_data_table_rows_rest_call_success(request_type): + client = DataTableServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = { + "parent": "projects/sample1/locations/sample2/instances/sample3/dataTables/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = data_table.ListDataTableRowsResponse( + next_page_token="next_page_token_value", + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = data_table.ListDataTableRowsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.list_data_table_rows(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListDataTableRowsPager) + assert response.next_page_token == "next_page_token_value" + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_data_table_rows_rest_interceptors(null_interceptor): + transport = transports.DataTableServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.DataTableServiceRestInterceptor(), + ) + client = DataTableServiceClient(transport=transport) + + with ( + mock.patch.object(type(client.transport._session), "request") as req, + mock.patch.object(path_template, "transcode") as transcode, + mock.patch.object( + transports.DataTableServiceRestInterceptor, "post_list_data_table_rows" + ) as post, + mock.patch.object( + transports.DataTableServiceRestInterceptor, + "post_list_data_table_rows_with_metadata", + ) as post_with_metadata, + mock.patch.object( + transports.DataTableServiceRestInterceptor, "pre_list_data_table_rows" + ) as pre, + ): + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = data_table.ListDataTableRowsRequest.pb( + data_table.ListDataTableRowsRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = data_table.ListDataTableRowsResponse.to_json( + data_table.ListDataTableRowsResponse() + ) + req.return_value.content = return_value + + request = data_table.ListDataTableRowsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = data_table.ListDataTableRowsResponse() + post_with_metadata.return_value = ( + data_table.ListDataTableRowsResponse(), + metadata, + ) + + client.list_data_table_rows( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_get_data_table_row_rest_bad_request( + request_type=data_table.GetDataTableRowRequest, +): + client = DataTableServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/instances/sample3/dataTables/sample4/dataTableRows/sample5" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with ( + mock.patch.object(Session, "request") as req, + pytest.raises(core_exceptions.BadRequest), + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.get_data_table_row(request) + + +@pytest.mark.parametrize( + "request_type", + [ + data_table.GetDataTableRowRequest, + dict, + ], +) +def test_get_data_table_row_rest_call_success(request_type): + client = DataTableServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/instances/sample3/dataTables/sample4/dataTableRows/sample5" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = data_table.DataTableRow( + name="name_value", + values=["values_value"], + row_time_to_live="row_time_to_live_value", + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = data_table.DataTableRow.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.get_data_table_row(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, data_table.DataTableRow) + assert response.name == "name_value" + assert response.values == ["values_value"] + assert response.row_time_to_live == "row_time_to_live_value" + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_data_table_row_rest_interceptors(null_interceptor): + transport = transports.DataTableServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.DataTableServiceRestInterceptor(), + ) + client = DataTableServiceClient(transport=transport) + + with ( + mock.patch.object(type(client.transport._session), "request") as req, + mock.patch.object(path_template, "transcode") as transcode, + mock.patch.object( + transports.DataTableServiceRestInterceptor, "post_get_data_table_row" + ) as post, + mock.patch.object( + transports.DataTableServiceRestInterceptor, + "post_get_data_table_row_with_metadata", + ) as post_with_metadata, + mock.patch.object( + transports.DataTableServiceRestInterceptor, "pre_get_data_table_row" + ) as pre, + ): + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = data_table.GetDataTableRowRequest.pb( + data_table.GetDataTableRowRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = data_table.DataTableRow.to_json(data_table.DataTableRow()) + req.return_value.content = return_value + + request = data_table.GetDataTableRowRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = data_table.DataTableRow() + post_with_metadata.return_value = data_table.DataTableRow(), metadata + + client.get_data_table_row( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_delete_data_table_row_rest_bad_request( + request_type=data_table.DeleteDataTableRowRequest, +): + client = DataTableServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/instances/sample3/dataTables/sample4/dataTableRows/sample5" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with ( + mock.patch.object(Session, "request") as req, + pytest.raises(core_exceptions.BadRequest), + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.delete_data_table_row(request) + + +@pytest.mark.parametrize( + "request_type", + [ + data_table.DeleteDataTableRowRequest, + dict, + ], +) +def test_delete_data_table_row_rest_call_success(request_type): + client = DataTableServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/instances/sample3/dataTables/sample4/dataTableRows/sample5" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = "" + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.delete_data_table_row(request) + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_data_table_row_rest_interceptors(null_interceptor): + transport = transports.DataTableServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.DataTableServiceRestInterceptor(), + ) + client = DataTableServiceClient(transport=transport) + + with ( + mock.patch.object(type(client.transport._session), "request") as req, + mock.patch.object(path_template, "transcode") as transcode, + mock.patch.object( + transports.DataTableServiceRestInterceptor, "pre_delete_data_table_row" + ) as pre, + ): + pre.assert_not_called() + pb_message = data_table.DeleteDataTableRowRequest.pb( + data_table.DeleteDataTableRowRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + request = data_table.DeleteDataTableRowRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + + client.delete_data_table_row( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + + +def test_bulk_create_data_table_rows_rest_bad_request( + request_type=data_table.BulkCreateDataTableRowsRequest, +): + client = DataTableServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = { + "parent": "projects/sample1/locations/sample2/instances/sample3/dataTables/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with ( + mock.patch.object(Session, "request") as req, + pytest.raises(core_exceptions.BadRequest), + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.bulk_create_data_table_rows(request) + + +@pytest.mark.parametrize( + "request_type", + [ + data_table.BulkCreateDataTableRowsRequest, + dict, + ], +) +def test_bulk_create_data_table_rows_rest_call_success(request_type): + client = DataTableServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = { + "parent": "projects/sample1/locations/sample2/instances/sample3/dataTables/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = data_table.BulkCreateDataTableRowsResponse() + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = data_table.BulkCreateDataTableRowsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.bulk_create_data_table_rows(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, data_table.BulkCreateDataTableRowsResponse) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_bulk_create_data_table_rows_rest_interceptors(null_interceptor): + transport = transports.DataTableServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.DataTableServiceRestInterceptor(), + ) + client = DataTableServiceClient(transport=transport) + + with ( + mock.patch.object(type(client.transport._session), "request") as req, + mock.patch.object(path_template, "transcode") as transcode, + mock.patch.object( + transports.DataTableServiceRestInterceptor, + "post_bulk_create_data_table_rows", + ) as post, + mock.patch.object( + transports.DataTableServiceRestInterceptor, + "post_bulk_create_data_table_rows_with_metadata", + ) as post_with_metadata, + mock.patch.object( + transports.DataTableServiceRestInterceptor, + "pre_bulk_create_data_table_rows", + ) as pre, + ): + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = data_table.BulkCreateDataTableRowsRequest.pb( + data_table.BulkCreateDataTableRowsRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = data_table.BulkCreateDataTableRowsResponse.to_json( + data_table.BulkCreateDataTableRowsResponse() + ) + req.return_value.content = return_value + + request = data_table.BulkCreateDataTableRowsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = data_table.BulkCreateDataTableRowsResponse() + post_with_metadata.return_value = ( + data_table.BulkCreateDataTableRowsResponse(), + metadata, + ) + + client.bulk_create_data_table_rows( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_bulk_get_data_table_rows_rest_bad_request( + request_type=data_table.BulkGetDataTableRowsRequest, +): + client = DataTableServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = { + "parent": "projects/sample1/locations/sample2/instances/sample3/dataTables/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with ( + mock.patch.object(Session, "request") as req, + pytest.raises(core_exceptions.BadRequest), + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.bulk_get_data_table_rows(request) + + +@pytest.mark.parametrize( + "request_type", + [ + data_table.BulkGetDataTableRowsRequest, + dict, + ], +) +def test_bulk_get_data_table_rows_rest_call_success(request_type): + client = DataTableServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = { + "parent": "projects/sample1/locations/sample2/instances/sample3/dataTables/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = data_table.BulkGetDataTableRowsResponse() + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = data_table.BulkGetDataTableRowsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.bulk_get_data_table_rows(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, data_table.BulkGetDataTableRowsResponse) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_bulk_get_data_table_rows_rest_interceptors(null_interceptor): + transport = transports.DataTableServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.DataTableServiceRestInterceptor(), + ) + client = DataTableServiceClient(transport=transport) + + with ( + mock.patch.object(type(client.transport._session), "request") as req, + mock.patch.object(path_template, "transcode") as transcode, + mock.patch.object( + transports.DataTableServiceRestInterceptor, "post_bulk_get_data_table_rows" + ) as post, + mock.patch.object( + transports.DataTableServiceRestInterceptor, + "post_bulk_get_data_table_rows_with_metadata", + ) as post_with_metadata, + mock.patch.object( + transports.DataTableServiceRestInterceptor, "pre_bulk_get_data_table_rows" + ) as pre, + ): + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = data_table.BulkGetDataTableRowsRequest.pb( + data_table.BulkGetDataTableRowsRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = data_table.BulkGetDataTableRowsResponse.to_json( + data_table.BulkGetDataTableRowsResponse() + ) + req.return_value.content = return_value + + request = data_table.BulkGetDataTableRowsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = data_table.BulkGetDataTableRowsResponse() + post_with_metadata.return_value = ( + data_table.BulkGetDataTableRowsResponse(), + metadata, + ) + + client.bulk_get_data_table_rows( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_bulk_replace_data_table_rows_rest_bad_request( + request_type=data_table.BulkReplaceDataTableRowsRequest, +): + client = DataTableServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = { + "parent": "projects/sample1/locations/sample2/instances/sample3/dataTables/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with ( + mock.patch.object(Session, "request") as req, + pytest.raises(core_exceptions.BadRequest), + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.bulk_replace_data_table_rows(request) + + +@pytest.mark.parametrize( + "request_type", + [ + data_table.BulkReplaceDataTableRowsRequest, + dict, + ], +) +def test_bulk_replace_data_table_rows_rest_call_success(request_type): + client = DataTableServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = { + "parent": "projects/sample1/locations/sample2/instances/sample3/dataTables/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = data_table.BulkReplaceDataTableRowsResponse() + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = data_table.BulkReplaceDataTableRowsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.bulk_replace_data_table_rows(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, data_table.BulkReplaceDataTableRowsResponse) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_bulk_replace_data_table_rows_rest_interceptors(null_interceptor): + transport = transports.DataTableServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.DataTableServiceRestInterceptor(), + ) + client = DataTableServiceClient(transport=transport) + + with ( + mock.patch.object(type(client.transport._session), "request") as req, + mock.patch.object(path_template, "transcode") as transcode, + mock.patch.object( + transports.DataTableServiceRestInterceptor, + "post_bulk_replace_data_table_rows", + ) as post, + mock.patch.object( + transports.DataTableServiceRestInterceptor, + "post_bulk_replace_data_table_rows_with_metadata", + ) as post_with_metadata, + mock.patch.object( + transports.DataTableServiceRestInterceptor, + "pre_bulk_replace_data_table_rows", + ) as pre, + ): + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = data_table.BulkReplaceDataTableRowsRequest.pb( + data_table.BulkReplaceDataTableRowsRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = data_table.BulkReplaceDataTableRowsResponse.to_json( + data_table.BulkReplaceDataTableRowsResponse() + ) + req.return_value.content = return_value + + request = data_table.BulkReplaceDataTableRowsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = data_table.BulkReplaceDataTableRowsResponse() + post_with_metadata.return_value = ( + data_table.BulkReplaceDataTableRowsResponse(), + metadata, + ) + + client.bulk_replace_data_table_rows( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_bulk_update_data_table_rows_rest_bad_request( + request_type=data_table.BulkUpdateDataTableRowsRequest, +): + client = DataTableServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = { + "parent": "projects/sample1/locations/sample2/instances/sample3/dataTables/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with ( + mock.patch.object(Session, "request") as req, + pytest.raises(core_exceptions.BadRequest), + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.bulk_update_data_table_rows(request) + + +@pytest.mark.parametrize( + "request_type", + [ + data_table.BulkUpdateDataTableRowsRequest, + dict, + ], +) +def test_bulk_update_data_table_rows_rest_call_success(request_type): + client = DataTableServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = { + "parent": "projects/sample1/locations/sample2/instances/sample3/dataTables/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = data_table.BulkUpdateDataTableRowsResponse() + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = data_table.BulkUpdateDataTableRowsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.bulk_update_data_table_rows(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, data_table.BulkUpdateDataTableRowsResponse) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_bulk_update_data_table_rows_rest_interceptors(null_interceptor): + transport = transports.DataTableServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.DataTableServiceRestInterceptor(), + ) + client = DataTableServiceClient(transport=transport) + + with ( + mock.patch.object(type(client.transport._session), "request") as req, + mock.patch.object(path_template, "transcode") as transcode, + mock.patch.object( + transports.DataTableServiceRestInterceptor, + "post_bulk_update_data_table_rows", + ) as post, + mock.patch.object( + transports.DataTableServiceRestInterceptor, + "post_bulk_update_data_table_rows_with_metadata", + ) as post_with_metadata, + mock.patch.object( + transports.DataTableServiceRestInterceptor, + "pre_bulk_update_data_table_rows", + ) as pre, + ): + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = data_table.BulkUpdateDataTableRowsRequest.pb( + data_table.BulkUpdateDataTableRowsRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = data_table.BulkUpdateDataTableRowsResponse.to_json( + data_table.BulkUpdateDataTableRowsResponse() + ) + req.return_value.content = return_value + + request = data_table.BulkUpdateDataTableRowsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = data_table.BulkUpdateDataTableRowsResponse() + post_with_metadata.return_value = ( + data_table.BulkUpdateDataTableRowsResponse(), + metadata, + ) + + client.bulk_update_data_table_rows( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_get_data_table_operation_errors_rest_bad_request( + request_type=data_table.GetDataTableOperationErrorsRequest, +): + client = DataTableServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/instances/sample3/dataTableOperationErrors/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with ( + mock.patch.object(Session, "request") as req, + pytest.raises(core_exceptions.BadRequest), + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.get_data_table_operation_errors(request) + + +@pytest.mark.parametrize( + "request_type", + [ + data_table.GetDataTableOperationErrorsRequest, + dict, + ], +) +def test_get_data_table_operation_errors_rest_call_success(request_type): + client = DataTableServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/instances/sample3/dataTableOperationErrors/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = data_table.DataTableOperationErrors( + name="name_value", + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = data_table.DataTableOperationErrors.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.get_data_table_operation_errors(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, data_table.DataTableOperationErrors) + assert response.name == "name_value" + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_data_table_operation_errors_rest_interceptors(null_interceptor): + transport = transports.DataTableServiceRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.DataTableServiceRestInterceptor(), + ) + client = DataTableServiceClient(transport=transport) + + with ( + mock.patch.object(type(client.transport._session), "request") as req, + mock.patch.object(path_template, "transcode") as transcode, + mock.patch.object( + transports.DataTableServiceRestInterceptor, + "post_get_data_table_operation_errors", + ) as post, + mock.patch.object( + transports.DataTableServiceRestInterceptor, + "post_get_data_table_operation_errors_with_metadata", + ) as post_with_metadata, + mock.patch.object( + transports.DataTableServiceRestInterceptor, + "pre_get_data_table_operation_errors", + ) as pre, + ): + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = data_table.GetDataTableOperationErrorsRequest.pb( + data_table.GetDataTableOperationErrorsRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = data_table.DataTableOperationErrors.to_json( + data_table.DataTableOperationErrors() + ) + req.return_value.content = return_value + + request = data_table.GetDataTableOperationErrorsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = data_table.DataTableOperationErrors() + post_with_metadata.return_value = ( + data_table.DataTableOperationErrors(), + metadata, + ) + + client.get_data_table_operation_errors( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_cancel_operation_rest_bad_request( + request_type=operations_pb2.CancelOperationRequest, +): + client = DataTableServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type() + request = json_format.ParseDict( + { + "name": "projects/sample1/locations/sample2/instances/sample3/operations/sample4" + }, + request, + ) + + # Mock the http request call within the method and fake a BadRequest error. + with ( + mock.patch.object(Session, "request") as req, + pytest.raises(core_exceptions.BadRequest), + ): + # Wrap the value into a proper Response obj + response_value = Response() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.cancel_operation(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.CancelOperationRequest, + dict, + ], +) +def test_cancel_operation_rest(request_type): + client = DataTableServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + request_init = { + "name": "projects/sample1/locations/sample2/instances/sample3/operations/sample4" + } + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = "{}" + response_value.content = json_return_value.encode("UTF-8") + + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.cancel_operation(request) + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_operation_rest_bad_request( + request_type=operations_pb2.DeleteOperationRequest, +): + client = DataTableServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type() + request = json_format.ParseDict( + { + "name": "projects/sample1/locations/sample2/instances/sample3/operations/sample4" + }, + request, + ) + + # Mock the http request call within the method and fake a BadRequest error. + with ( + mock.patch.object(Session, "request") as req, + pytest.raises(core_exceptions.BadRequest), + ): + # Wrap the value into a proper Response obj + response_value = Response() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.delete_operation(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.DeleteOperationRequest, + dict, + ], +) +def test_delete_operation_rest(request_type): + client = DataTableServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + request_init = { + "name": "projects/sample1/locations/sample2/instances/sample3/operations/sample4" + } + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # Designate an appropriate value for the returned response. + return_value = None + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = "{}" + response_value.content = json_return_value.encode("UTF-8") + + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.delete_operation(request) + + # Establish that the response is the type that we expect. + assert response is None + + +def test_get_operation_rest_bad_request( + request_type=operations_pb2.GetOperationRequest, +): + client = DataTableServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type() + request = json_format.ParseDict( + { + "name": "projects/sample1/locations/sample2/instances/sample3/operations/sample4" + }, + request, + ) + + # Mock the http request call within the method and fake a BadRequest error. + with ( + mock.patch.object(Session, "request") as req, + pytest.raises(core_exceptions.BadRequest), + ): + # Wrap the value into a proper Response obj + response_value = Response() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.get_operation(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.GetOperationRequest, + dict, + ], +) +def test_get_operation_rest(request_type): + client = DataTableServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + request_init = { + "name": "projects/sample1/locations/sample2/instances/sample3/operations/sample4" + } + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation() + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.get_operation(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +def test_list_operations_rest_bad_request( + request_type=operations_pb2.ListOperationsRequest, +): + client = DataTableServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2/instances/sample3"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with ( + mock.patch.object(Session, "request") as req, + pytest.raises(core_exceptions.BadRequest), + ): + # Wrap the value into a proper Response obj + response_value = Response() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.list_operations(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.ListOperationsRequest, + dict, + ], +) +def test_list_operations_rest(request_type): + client = DataTableServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + request_init = {"name": "projects/sample1/locations/sample2/instances/sample3"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.ListOperationsResponse() + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.list_operations(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + + +def test_initialize_client_w_rest(): + client = DataTableServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + assert client is not None + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_data_table_empty_call_rest(): + client = DataTableServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_data_table), "__call__" + ) as call: + client.create_data_table(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = gcc_data_table.CreateDataTableRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_data_tables_empty_call_rest(): + client = DataTableServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.list_data_tables), "__call__") as call: + client.list_data_tables(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = data_table.ListDataTablesRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_data_table_empty_call_rest(): + client = DataTableServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.get_data_table), "__call__") as call: + client.get_data_table(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = data_table.GetDataTableRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_data_table_empty_call_rest(): + client = DataTableServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_data_table), "__call__" + ) as call: + client.update_data_table(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = gcc_data_table.UpdateDataTableRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_data_table_empty_call_rest(): + client = DataTableServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_data_table), "__call__" + ) as call: + client.delete_data_table(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = data_table.DeleteDataTableRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_data_table_row_empty_call_rest(): + client = DataTableServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_data_table_row), "__call__" + ) as call: + client.create_data_table_row(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = data_table.CreateDataTableRowRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_data_table_row_empty_call_rest(): + client = DataTableServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_data_table_row), "__call__" + ) as call: + client.update_data_table_row(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = data_table.UpdateDataTableRowRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_data_table_rows_empty_call_rest(): + client = DataTableServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_data_table_rows), "__call__" + ) as call: + client.list_data_table_rows(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = data_table.ListDataTableRowsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_data_table_row_empty_call_rest(): + client = DataTableServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_data_table_row), "__call__" + ) as call: + client.get_data_table_row(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = data_table.GetDataTableRowRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_data_table_row_empty_call_rest(): + client = DataTableServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_data_table_row), "__call__" + ) as call: + client.delete_data_table_row(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = data_table.DeleteDataTableRowRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_bulk_create_data_table_rows_empty_call_rest(): + client = DataTableServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.bulk_create_data_table_rows), "__call__" + ) as call: + client.bulk_create_data_table_rows(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = data_table.BulkCreateDataTableRowsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_bulk_get_data_table_rows_empty_call_rest(): + client = DataTableServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.bulk_get_data_table_rows), "__call__" + ) as call: + client.bulk_get_data_table_rows(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = data_table.BulkGetDataTableRowsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_bulk_replace_data_table_rows_empty_call_rest(): + client = DataTableServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.bulk_replace_data_table_rows), "__call__" + ) as call: + client.bulk_replace_data_table_rows(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = data_table.BulkReplaceDataTableRowsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_bulk_update_data_table_rows_empty_call_rest(): + client = DataTableServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.bulk_update_data_table_rows), "__call__" + ) as call: + client.bulk_update_data_table_rows(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = data_table.BulkUpdateDataTableRowsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_data_table_operation_errors_empty_call_rest(): + client = DataTableServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_data_table_operation_errors), "__call__" + ) as call: + client.get_data_table_operation_errors(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = data_table.GetDataTableOperationErrorsRequest() + + assert args[0] == request_msg + + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = DataTableServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.DataTableServiceGrpcTransport, + ) + + +def test_data_table_service_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.DataTableServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json", + ) + + +def test_data_table_service_base_transport(): + # Instantiate the base transport. + with mock.patch( + "google.cloud.chronicle_v1.services.data_table_service.transports.DataTableServiceTransport.__init__" + ) as Transport: + Transport.return_value = None + transport = transports.DataTableServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + "create_data_table", + "list_data_tables", + "get_data_table", + "update_data_table", + "delete_data_table", + "create_data_table_row", + "update_data_table_row", + "list_data_table_rows", + "get_data_table_row", + "delete_data_table_row", + "bulk_create_data_table_rows", + "bulk_get_data_table_rows", + "bulk_replace_data_table_rows", + "bulk_update_data_table_rows", + "get_data_table_operation_errors", + "get_operation", + "cancel_operation", + "delete_operation", + "list_operations", + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Catch all for all remaining methods and properties + remainder = [ + "kind", + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_data_table_service_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with ( + mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, + mock.patch( + "google.cloud.chronicle_v1.services.data_table_service.transports.DataTableServiceTransport._prep_wrapped_messages" + ) as Transport, + ): + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.DataTableServiceTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with( + "credentials.json", + scopes=None, + default_scopes=( + "https://www.googleapis.com/auth/chronicle", + "https://www.googleapis.com/auth/chronicle.readonly", + "https://www.googleapis.com/auth/cloud-platform", + ), + quota_project_id="octopus", + ) + + +def test_data_table_service_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with ( + mock.patch.object(google.auth, "default", autospec=True) as adc, + mock.patch( + "google.cloud.chronicle_v1.services.data_table_service.transports.DataTableServiceTransport._prep_wrapped_messages" + ) as Transport, + ): + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.DataTableServiceTransport() + adc.assert_called_once() + + +def test_data_table_service_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + DataTableServiceClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=( + "https://www.googleapis.com/auth/chronicle", + "https://www.googleapis.com/auth/chronicle.readonly", + "https://www.googleapis.com/auth/cloud-platform", + ), + quota_project_id=None, + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.DataTableServiceGrpcTransport, + transports.DataTableServiceGrpcAsyncIOTransport, + ], +) +def test_data_table_service_transport_auth_adc(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=( + "https://www.googleapis.com/auth/chronicle", + "https://www.googleapis.com/auth/chronicle.readonly", + "https://www.googleapis.com/auth/cloud-platform", + ), + quota_project_id="octopus", + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.DataTableServiceGrpcTransport, + transports.DataTableServiceGrpcAsyncIOTransport, + transports.DataTableServiceRestTransport, + ], +) +def test_data_table_service_transport_auth_gdch_credentials(transport_class): + host = "https://language.com" + api_audience_tests = [None, "https://language2.com"] + api_audience_expect = [host, "https://language2.com"] + for t, e in zip(api_audience_tests, api_audience_expect): + with mock.patch.object(google.auth, "default", autospec=True) as adc: + gdch_mock = mock.MagicMock() + type(gdch_mock).with_gdch_audience = mock.PropertyMock( + return_value=gdch_mock + ) + adc.return_value = (gdch_mock, None) + transport_class(host=host, api_audience=t) + gdch_mock.with_gdch_audience.assert_called_once_with(e) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.DataTableServiceGrpcTransport, grpc_helpers), + (transports.DataTableServiceGrpcAsyncIOTransport, grpc_helpers_async), + ], +) +def test_data_table_service_transport_create_channel(transport_class, grpc_helpers): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with ( + mock.patch.object(google.auth, "default", autospec=True) as adc, + mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel, + ): + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + + create_channel.assert_called_with( + "chronicle.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=( + "https://www.googleapis.com/auth/chronicle", + "https://www.googleapis.com/auth/chronicle.readonly", + "https://www.googleapis.com/auth/cloud-platform", + ), + scopes=["1", "2"], + default_host="chronicle.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.DataTableServiceGrpcTransport, + transports.DataTableServiceGrpcAsyncIOTransport, + ], +) +def test_data_table_service_grpc_transport_client_cert_source_for_mtls(transport_class): + cred = ga_credentials.AnonymousCredentials() + + # Check ssl_channel_credentials is used if provided. + with mock.patch.object(transport_class, "create_channel") as mock_create_channel: + mock_ssl_channel_creds = mock.Mock() + transport_class( + host="squid.clam.whelk", + credentials=cred, + ssl_channel_credentials=mock_ssl_channel_creds, + ) + mock_create_channel.assert_called_once_with( + "squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_channel_creds, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls + # is used. + with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): + with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: + transport_class( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback, + ) + expected_cert, expected_key = client_cert_source_callback() + mock_ssl_cred.assert_called_once_with( + certificate_chain=expected_cert, private_key=expected_key + ) + + +def test_data_table_service_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ) as mock_configure_mtls_channel: + transports.DataTableServiceRestTransport( + credentials=cred, client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_data_table_service_host_no_port(transport_name): + client = DataTableServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="chronicle.googleapis.com" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "chronicle.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://chronicle.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_data_table_service_host_with_port(transport_name): + client = DataTableServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="chronicle.googleapis.com:8000" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "chronicle.googleapis.com:8000" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://chronicle.googleapis.com:8000" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_data_table_service_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = DataTableServiceClient( + credentials=creds1, + transport=transport_name, + ) + client2 = DataTableServiceClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.create_data_table._session + session2 = client2.transport.create_data_table._session + assert session1 != session2 + session1 = client1.transport.list_data_tables._session + session2 = client2.transport.list_data_tables._session + assert session1 != session2 + session1 = client1.transport.get_data_table._session + session2 = client2.transport.get_data_table._session + assert session1 != session2 + session1 = client1.transport.update_data_table._session + session2 = client2.transport.update_data_table._session + assert session1 != session2 + session1 = client1.transport.delete_data_table._session + session2 = client2.transport.delete_data_table._session + assert session1 != session2 + session1 = client1.transport.create_data_table_row._session + session2 = client2.transport.create_data_table_row._session + assert session1 != session2 + session1 = client1.transport.update_data_table_row._session + session2 = client2.transport.update_data_table_row._session + assert session1 != session2 + session1 = client1.transport.list_data_table_rows._session + session2 = client2.transport.list_data_table_rows._session + assert session1 != session2 + session1 = client1.transport.get_data_table_row._session + session2 = client2.transport.get_data_table_row._session + assert session1 != session2 + session1 = client1.transport.delete_data_table_row._session + session2 = client2.transport.delete_data_table_row._session + assert session1 != session2 + session1 = client1.transport.bulk_create_data_table_rows._session + session2 = client2.transport.bulk_create_data_table_rows._session + assert session1 != session2 + session1 = client1.transport.bulk_get_data_table_rows._session + session2 = client2.transport.bulk_get_data_table_rows._session + assert session1 != session2 + session1 = client1.transport.bulk_replace_data_table_rows._session + session2 = client2.transport.bulk_replace_data_table_rows._session + assert session1 != session2 + session1 = client1.transport.bulk_update_data_table_rows._session + session2 = client2.transport.bulk_update_data_table_rows._session + assert session1 != session2 + session1 = client1.transport.get_data_table_operation_errors._session + session2 = client2.transport.get_data_table_operation_errors._session + assert session1 != session2 + + +def test_data_table_service_grpc_transport_channel(): + channel = grpc.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.DataTableServiceGrpcTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +def test_data_table_service_grpc_asyncio_transport_channel(): + channel = aio.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.DataTableServiceGrpcAsyncIOTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.filterwarnings("ignore::FutureWarning") +@pytest.mark.parametrize( + "transport_class", + [ + transports.DataTableServiceGrpcTransport, + transports.DataTableServiceGrpcAsyncIOTransport, + ], +) +def test_data_table_service_transport_channel_mtls_with_client_cert_source( + transport_class, +): + with mock.patch( + "grpc.ssl_channel_credentials", autospec=True + ) as grpc_ssl_channel_cred: + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + cred = ga_credentials.AnonymousCredentials() + with pytest.warns(DeprecationWarning): + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (cred, None) + transport = transport_class( + host="squid.clam.whelk", + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + adc.assert_called_once() + + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + assert transport._ssl_channel_credentials == mock_ssl_cred + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [ + transports.DataTableServiceGrpcTransport, + transports.DataTableServiceGrpcAsyncIOTransport, + ], +) +def test_data_table_service_transport_channel_mtls_with_adc(transport_class): + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + mock_cred = mock.Mock() + + with pytest.warns(DeprecationWarning): + transport = transport_class( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=None, + ) + + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + + +def test_data_access_scope_path(): + project = "squid" + location = "clam" + instance = "whelk" + data_access_scope = "octopus" + expected = "projects/{project}/locations/{location}/instances/{instance}/dataAccessScopes/{data_access_scope}".format( + project=project, + location=location, + instance=instance, + data_access_scope=data_access_scope, + ) + actual = DataTableServiceClient.data_access_scope_path( + project, location, instance, data_access_scope + ) + assert expected == actual + + +def test_parse_data_access_scope_path(): + expected = { + "project": "oyster", + "location": "nudibranch", + "instance": "cuttlefish", + "data_access_scope": "mussel", + } + path = DataTableServiceClient.data_access_scope_path(**expected) + + # Check that the path construction is reversible. + actual = DataTableServiceClient.parse_data_access_scope_path(path) + assert expected == actual + + +def test_data_table_path(): + project = "winkle" + location = "nautilus" + instance = "scallop" + data_table = "abalone" + expected = "projects/{project}/locations/{location}/instances/{instance}/dataTables/{data_table}".format( + project=project, + location=location, + instance=instance, + data_table=data_table, + ) + actual = DataTableServiceClient.data_table_path( + project, location, instance, data_table + ) + assert expected == actual + + +def test_parse_data_table_path(): + expected = { + "project": "squid", + "location": "clam", + "instance": "whelk", + "data_table": "octopus", + } + path = DataTableServiceClient.data_table_path(**expected) + + # Check that the path construction is reversible. + actual = DataTableServiceClient.parse_data_table_path(path) + assert expected == actual + + +def test_data_table_operation_errors_path(): + project = "oyster" + location = "nudibranch" + instance = "cuttlefish" + data_table_operation_errors = "mussel" + expected = "projects/{project}/locations/{location}/instances/{instance}/dataTableOperationErrors/{data_table_operation_errors}".format( + project=project, + location=location, + instance=instance, + data_table_operation_errors=data_table_operation_errors, + ) + actual = DataTableServiceClient.data_table_operation_errors_path( + project, location, instance, data_table_operation_errors + ) + assert expected == actual + + +def test_parse_data_table_operation_errors_path(): + expected = { + "project": "winkle", + "location": "nautilus", + "instance": "scallop", + "data_table_operation_errors": "abalone", + } + path = DataTableServiceClient.data_table_operation_errors_path(**expected) + + # Check that the path construction is reversible. + actual = DataTableServiceClient.parse_data_table_operation_errors_path(path) + assert expected == actual + + +def test_data_table_row_path(): + project = "squid" + location = "clam" + instance = "whelk" + data_table = "octopus" + data_table_row = "oyster" + expected = "projects/{project}/locations/{location}/instances/{instance}/dataTables/{data_table}/dataTableRows/{data_table_row}".format( + project=project, + location=location, + instance=instance, + data_table=data_table, + data_table_row=data_table_row, + ) + actual = DataTableServiceClient.data_table_row_path( + project, location, instance, data_table, data_table_row + ) + assert expected == actual + + +def test_parse_data_table_row_path(): + expected = { + "project": "nudibranch", + "location": "cuttlefish", + "instance": "mussel", + "data_table": "winkle", + "data_table_row": "nautilus", + } + path = DataTableServiceClient.data_table_row_path(**expected) + + # Check that the path construction is reversible. + actual = DataTableServiceClient.parse_data_table_row_path(path) + assert expected == actual + + +def test_common_billing_account_path(): + billing_account = "scallop" + expected = "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + actual = DataTableServiceClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "abalone", + } + path = DataTableServiceClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = DataTableServiceClient.parse_common_billing_account_path(path) + assert expected == actual + + +def test_common_folder_path(): + folder = "squid" + expected = "folders/{folder}".format( + folder=folder, + ) + actual = DataTableServiceClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "clam", + } + path = DataTableServiceClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = DataTableServiceClient.parse_common_folder_path(path) + assert expected == actual + + +def test_common_organization_path(): + organization = "whelk" + expected = "organizations/{organization}".format( + organization=organization, + ) + actual = DataTableServiceClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "octopus", + } + path = DataTableServiceClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = DataTableServiceClient.parse_common_organization_path(path) + assert expected == actual + + +def test_common_project_path(): + project = "oyster" + expected = "projects/{project}".format( + project=project, + ) + actual = DataTableServiceClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "nudibranch", + } + path = DataTableServiceClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = DataTableServiceClient.parse_common_project_path(path) + assert expected == actual + + +def test_common_location_path(): + project = "cuttlefish" + location = "mussel" + expected = "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + actual = DataTableServiceClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "winkle", + "location": "nautilus", + } + path = DataTableServiceClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = DataTableServiceClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object( + transports.DataTableServiceTransport, "_prep_wrapped_messages" + ) as prep: + client = DataTableServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object( + transports.DataTableServiceTransport, "_prep_wrapped_messages" + ) as prep: + transport_class = DataTableServiceClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +def test_delete_operation(transport: str = "grpc"): + client = DataTableServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.DeleteOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.delete_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_delete_operation_async(transport: str = "grpc_asyncio"): + client = DataTableServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.DeleteOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_operation_field_headers(): + client = DataTableServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.DeleteOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + call.return_value = None + + client.delete_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_delete_operation_field_headers_async(): + client = DataTableServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.DeleteOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_delete_operation_from_dict(): + client = DataTableServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + response = client.delete_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_delete_operation_from_dict_async(): + client = DataTableServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_delete_operation_flattened(): + client = DataTableServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + client.delete_operation() + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == operations_pb2.DeleteOperationRequest() + + +@pytest.mark.asyncio +async def test_delete_operation_flattened_async(): + client = DataTableServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.delete_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_operation() + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == operations_pb2.DeleteOperationRequest() + + +def test_cancel_operation(transport: str = "grpc"): + client = DataTableServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.CancelOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_cancel_operation_async(transport: str = "grpc_asyncio"): + client = DataTableServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.CancelOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert response is None + + +def test_cancel_operation_field_headers(): + client = DataTableServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.CancelOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + call.return_value = None + + client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_cancel_operation_field_headers_async(): + client = DataTableServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.CancelOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.cancel_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_cancel_operation_from_dict(): + client = DataTableServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + response = client.cancel_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_cancel_operation_from_dict_async(): + client = DataTableServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.cancel_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_cancel_operation_flattened(): + client = DataTableServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = None + + client.cancel_operation() + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == operations_pb2.CancelOperationRequest() + + +@pytest.mark.asyncio +async def test_cancel_operation_flattened_async(): + client = DataTableServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.cancel_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.cancel_operation() + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == operations_pb2.CancelOperationRequest() + + +def test_get_operation(transport: str = "grpc"): + client = DataTableServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.GetOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + response = client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +@pytest.mark.asyncio +async def test_get_operation_async(transport: str = "grpc_asyncio"): + client = DataTableServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.GetOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +def test_get_operation_field_headers(): + client = DataTableServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.GetOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + call.return_value = operations_pb2.Operation() + + client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_operation_field_headers_async(): + client = DataTableServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.GetOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + await client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_get_operation_from_dict(): + client = DataTableServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + + response = client.get_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_get_operation_from_dict_async(): + client = DataTableServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.get_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_get_operation_flattened(): + client = DataTableServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + + client.get_operation() + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == operations_pb2.GetOperationRequest() + + +@pytest.mark.asyncio +async def test_get_operation_flattened_async(): + client = DataTableServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + await client.get_operation() + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == operations_pb2.GetOperationRequest() + + +def test_list_operations(transport: str = "grpc"): + client = DataTableServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.ListOperationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.ListOperationsResponse() + response = client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + + +@pytest.mark.asyncio +async def test_list_operations_async(transport: str = "grpc_asyncio"): + client = DataTableServiceAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.ListOperationsRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + response = await client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.ListOperationsResponse) + + +def test_list_operations_field_headers(): + client = DataTableServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.ListOperationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + call.return_value = operations_pb2.ListOperationsResponse() + + client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_operations_field_headers_async(): + client = DataTableServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.ListOperationsRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + await client.list_operations(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_list_operations_from_dict(): + client = DataTableServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.ListOperationsResponse() + + response = client.list_operations( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_list_operations_from_dict_async(): + client = DataTableServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + response = await client.list_operations( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_list_operations_flattened(): + client = DataTableServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.ListOperationsResponse() + + client.list_operations() + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == operations_pb2.ListOperationsRequest() + + +@pytest.mark.asyncio +async def test_list_operations_flattened_async(): + client = DataTableServiceAsyncClient( + credentials=async_anonymous_credentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.list_operations), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.ListOperationsResponse() + ) + await client.list_operations() + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == operations_pb2.ListOperationsRequest() + + +def test_transport_close_grpc(): + client = DataTableServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="grpc" + ) + with mock.patch.object( + type(getattr(client.transport, "_grpc_channel")), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +@pytest.mark.asyncio +async def test_transport_close_grpc_asyncio(): + client = DataTableServiceAsyncClient( + credentials=async_anonymous_credentials(), transport="grpc_asyncio" + ) + with mock.patch.object( + type(getattr(client.transport, "_grpc_channel")), "close" + ) as close: + async with client: + close.assert_not_called() + close.assert_called_once() + + +def test_transport_close_rest(): + client = DataTableServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + with mock.patch.object( + type(getattr(client.transport, "_session")), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + "rest", + "grpc", + ] + for transport in transports: + client = DataTableServiceClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + + +@pytest.mark.parametrize( + "client_class,transport_class", + [ + (DataTableServiceClient, transports.DataTableServiceGrpcTransport), + (DataTableServiceAsyncClient, transports.DataTableServiceGrpcAsyncIOTransport), + ], +) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) diff --git a/packages/google-cloud-config/google/cloud/config/__init__.py b/packages/google-cloud-config/google/cloud/config/__init__.py index 287b89353973..0ceb7664dde2 100644 --- a/packages/google-cloud-config/google/cloud/config/__init__.py +++ b/packages/google-cloud-config/google/cloud/config/__init__.py @@ -23,19 +23,32 @@ from google.cloud.config_v1.types.config import ( ApplyResults, AutoMigrationConfig, + CreateDeploymentGroupRequest, CreateDeploymentRequest, CreatePreviewRequest, + DeleteDeploymentGroupRequest, DeleteDeploymentRequest, DeletePreviewRequest, DeleteStatefileRequest, Deployment, + DeploymentGroup, + DeploymentGroupRevision, DeploymentOperationMetadata, + DeploymentOperationSummary, + DeploymentSource, + DeploymentSpec, + DeploymentUnit, + DeploymentUnitProgress, + DeprovisionDeploymentGroupRequest, ExportDeploymentStatefileRequest, ExportLockInfoRequest, ExportPreviewResultRequest, ExportPreviewResultResponse, ExportRevisionStatefileRequest, + ExternalValueSource, GetAutoMigrationConfigRequest, + GetDeploymentGroupRequest, + GetDeploymentGroupRevisionRequest, GetDeploymentRequest, GetPreviewRequest, GetResourceChangeRequest, @@ -45,6 +58,10 @@ GetTerraformVersionRequest, GitSource, ImportStatefileRequest, + ListDeploymentGroupRevisionsRequest, + ListDeploymentGroupRevisionsResponse, + ListDeploymentGroupsRequest, + ListDeploymentGroupsResponse, ListDeploymentsRequest, ListDeploymentsResponse, ListPreviewsRequest, @@ -69,6 +86,8 @@ PropertyChange, PropertyDrift, ProviderConfig, + ProvisionDeploymentGroupOperationMetadata, + ProvisionDeploymentGroupRequest, QuotaValidation, Resource, ResourceCAIInfo, @@ -86,6 +105,7 @@ TerraformVersion, UnlockDeploymentRequest, UpdateAutoMigrationConfigRequest, + UpdateDeploymentGroupRequest, UpdateDeploymentRequest, ) @@ -94,19 +114,32 @@ "ConfigAsyncClient", "ApplyResults", "AutoMigrationConfig", + "CreateDeploymentGroupRequest", "CreateDeploymentRequest", "CreatePreviewRequest", + "DeleteDeploymentGroupRequest", "DeleteDeploymentRequest", "DeletePreviewRequest", "DeleteStatefileRequest", "Deployment", + "DeploymentGroup", + "DeploymentGroupRevision", "DeploymentOperationMetadata", + "DeploymentOperationSummary", + "DeploymentSource", + "DeploymentSpec", + "DeploymentUnit", + "DeploymentUnitProgress", + "DeprovisionDeploymentGroupRequest", "ExportDeploymentStatefileRequest", "ExportLockInfoRequest", "ExportPreviewResultRequest", "ExportPreviewResultResponse", "ExportRevisionStatefileRequest", + "ExternalValueSource", "GetAutoMigrationConfigRequest", + "GetDeploymentGroupRequest", + "GetDeploymentGroupRevisionRequest", "GetDeploymentRequest", "GetPreviewRequest", "GetResourceChangeRequest", @@ -116,6 +149,10 @@ "GetTerraformVersionRequest", "GitSource", "ImportStatefileRequest", + "ListDeploymentGroupRevisionsRequest", + "ListDeploymentGroupRevisionsResponse", + "ListDeploymentGroupsRequest", + "ListDeploymentGroupsResponse", "ListDeploymentsRequest", "ListDeploymentsResponse", "ListPreviewsRequest", @@ -140,6 +177,8 @@ "PropertyChange", "PropertyDrift", "ProviderConfig", + "ProvisionDeploymentGroupOperationMetadata", + "ProvisionDeploymentGroupRequest", "Resource", "ResourceCAIInfo", "ResourceChange", @@ -156,6 +195,7 @@ "TerraformVersion", "UnlockDeploymentRequest", "UpdateAutoMigrationConfigRequest", + "UpdateDeploymentGroupRequest", "UpdateDeploymentRequest", "QuotaValidation", ) diff --git a/packages/google-cloud-config/google/cloud/config_v1/__init__.py b/packages/google-cloud-config/google/cloud/config_v1/__init__.py index ba61f7e34884..58883a570f9b 100644 --- a/packages/google-cloud-config/google/cloud/config_v1/__init__.py +++ b/packages/google-cloud-config/google/cloud/config_v1/__init__.py @@ -33,19 +33,32 @@ from .types.config import ( ApplyResults, AutoMigrationConfig, + CreateDeploymentGroupRequest, CreateDeploymentRequest, CreatePreviewRequest, + DeleteDeploymentGroupRequest, DeleteDeploymentRequest, DeletePreviewRequest, DeleteStatefileRequest, Deployment, + DeploymentGroup, + DeploymentGroupRevision, DeploymentOperationMetadata, + DeploymentOperationSummary, + DeploymentSource, + DeploymentSpec, + DeploymentUnit, + DeploymentUnitProgress, + DeprovisionDeploymentGroupRequest, ExportDeploymentStatefileRequest, ExportLockInfoRequest, ExportPreviewResultRequest, ExportPreviewResultResponse, ExportRevisionStatefileRequest, + ExternalValueSource, GetAutoMigrationConfigRequest, + GetDeploymentGroupRequest, + GetDeploymentGroupRevisionRequest, GetDeploymentRequest, GetPreviewRequest, GetResourceChangeRequest, @@ -55,6 +68,10 @@ GetTerraformVersionRequest, GitSource, ImportStatefileRequest, + ListDeploymentGroupRevisionsRequest, + ListDeploymentGroupRevisionsResponse, + ListDeploymentGroupsRequest, + ListDeploymentGroupsResponse, ListDeploymentsRequest, ListDeploymentsResponse, ListPreviewsRequest, @@ -79,6 +96,8 @@ PropertyChange, PropertyDrift, ProviderConfig, + ProvisionDeploymentGroupOperationMetadata, + ProvisionDeploymentGroupRequest, QuotaValidation, Resource, ResourceCAIInfo, @@ -96,6 +115,7 @@ TerraformVersion, UnlockDeploymentRequest, UpdateAutoMigrationConfigRequest, + UpdateDeploymentGroupRequest, UpdateDeploymentRequest, ) @@ -198,19 +218,32 @@ def _get_version(dependency_name): "ApplyResults", "AutoMigrationConfig", "ConfigClient", + "CreateDeploymentGroupRequest", "CreateDeploymentRequest", "CreatePreviewRequest", + "DeleteDeploymentGroupRequest", "DeleteDeploymentRequest", "DeletePreviewRequest", "DeleteStatefileRequest", "Deployment", + "DeploymentGroup", + "DeploymentGroupRevision", "DeploymentOperationMetadata", + "DeploymentOperationSummary", + "DeploymentSource", + "DeploymentSpec", + "DeploymentUnit", + "DeploymentUnitProgress", + "DeprovisionDeploymentGroupRequest", "ExportDeploymentStatefileRequest", "ExportLockInfoRequest", "ExportPreviewResultRequest", "ExportPreviewResultResponse", "ExportRevisionStatefileRequest", + "ExternalValueSource", "GetAutoMigrationConfigRequest", + "GetDeploymentGroupRequest", + "GetDeploymentGroupRevisionRequest", "GetDeploymentRequest", "GetPreviewRequest", "GetResourceChangeRequest", @@ -220,6 +253,10 @@ def _get_version(dependency_name): "GetTerraformVersionRequest", "GitSource", "ImportStatefileRequest", + "ListDeploymentGroupRevisionsRequest", + "ListDeploymentGroupRevisionsResponse", + "ListDeploymentGroupsRequest", + "ListDeploymentGroupsResponse", "ListDeploymentsRequest", "ListDeploymentsResponse", "ListPreviewsRequest", @@ -244,6 +281,8 @@ def _get_version(dependency_name): "PropertyChange", "PropertyDrift", "ProviderConfig", + "ProvisionDeploymentGroupOperationMetadata", + "ProvisionDeploymentGroupRequest", "QuotaValidation", "Resource", "ResourceCAIInfo", @@ -261,5 +300,6 @@ def _get_version(dependency_name): "TerraformVersion", "UnlockDeploymentRequest", "UpdateAutoMigrationConfigRequest", + "UpdateDeploymentGroupRequest", "UpdateDeploymentRequest", ) diff --git a/packages/google-cloud-config/google/cloud/config_v1/gapic_metadata.json b/packages/google-cloud-config/google/cloud/config_v1/gapic_metadata.json index 746d54e27899..acde226fec42 100644 --- a/packages/google-cloud-config/google/cloud/config_v1/gapic_metadata.json +++ b/packages/google-cloud-config/google/cloud/config_v1/gapic_metadata.json @@ -15,6 +15,11 @@ "create_deployment" ] }, + "CreateDeploymentGroup": { + "methods": [ + "create_deployment_group" + ] + }, "CreatePreview": { "methods": [ "create_preview" @@ -25,6 +30,11 @@ "delete_deployment" ] }, + "DeleteDeploymentGroup": { + "methods": [ + "delete_deployment_group" + ] + }, "DeletePreview": { "methods": [ "delete_preview" @@ -35,6 +45,11 @@ "delete_statefile" ] }, + "DeprovisionDeploymentGroup": { + "methods": [ + "deprovision_deployment_group" + ] + }, "ExportDeploymentStatefile": { "methods": [ "export_deployment_statefile" @@ -65,6 +80,16 @@ "get_deployment" ] }, + "GetDeploymentGroup": { + "methods": [ + "get_deployment_group" + ] + }, + "GetDeploymentGroupRevision": { + "methods": [ + "get_deployment_group_revision" + ] + }, "GetPreview": { "methods": [ "get_preview" @@ -100,6 +125,16 @@ "import_statefile" ] }, + "ListDeploymentGroupRevisions": { + "methods": [ + "list_deployment_group_revisions" + ] + }, + "ListDeploymentGroups": { + "methods": [ + "list_deployment_groups" + ] + }, "ListDeployments": { "methods": [ "list_deployments" @@ -140,6 +175,11 @@ "lock_deployment" ] }, + "ProvisionDeploymentGroup": { + "methods": [ + "provision_deployment_group" + ] + }, "UnlockDeployment": { "methods": [ "unlock_deployment" @@ -154,6 +194,11 @@ "methods": [ "update_deployment" ] + }, + "UpdateDeploymentGroup": { + "methods": [ + "update_deployment_group" + ] } } }, @@ -165,6 +210,11 @@ "create_deployment" ] }, + "CreateDeploymentGroup": { + "methods": [ + "create_deployment_group" + ] + }, "CreatePreview": { "methods": [ "create_preview" @@ -175,6 +225,11 @@ "delete_deployment" ] }, + "DeleteDeploymentGroup": { + "methods": [ + "delete_deployment_group" + ] + }, "DeletePreview": { "methods": [ "delete_preview" @@ -185,6 +240,11 @@ "delete_statefile" ] }, + "DeprovisionDeploymentGroup": { + "methods": [ + "deprovision_deployment_group" + ] + }, "ExportDeploymentStatefile": { "methods": [ "export_deployment_statefile" @@ -215,6 +275,16 @@ "get_deployment" ] }, + "GetDeploymentGroup": { + "methods": [ + "get_deployment_group" + ] + }, + "GetDeploymentGroupRevision": { + "methods": [ + "get_deployment_group_revision" + ] + }, "GetPreview": { "methods": [ "get_preview" @@ -250,6 +320,16 @@ "import_statefile" ] }, + "ListDeploymentGroupRevisions": { + "methods": [ + "list_deployment_group_revisions" + ] + }, + "ListDeploymentGroups": { + "methods": [ + "list_deployment_groups" + ] + }, "ListDeployments": { "methods": [ "list_deployments" @@ -290,6 +370,11 @@ "lock_deployment" ] }, + "ProvisionDeploymentGroup": { + "methods": [ + "provision_deployment_group" + ] + }, "UnlockDeployment": { "methods": [ "unlock_deployment" @@ -304,6 +389,11 @@ "methods": [ "update_deployment" ] + }, + "UpdateDeploymentGroup": { + "methods": [ + "update_deployment_group" + ] } } }, @@ -315,6 +405,11 @@ "create_deployment" ] }, + "CreateDeploymentGroup": { + "methods": [ + "create_deployment_group" + ] + }, "CreatePreview": { "methods": [ "create_preview" @@ -325,6 +420,11 @@ "delete_deployment" ] }, + "DeleteDeploymentGroup": { + "methods": [ + "delete_deployment_group" + ] + }, "DeletePreview": { "methods": [ "delete_preview" @@ -335,6 +435,11 @@ "delete_statefile" ] }, + "DeprovisionDeploymentGroup": { + "methods": [ + "deprovision_deployment_group" + ] + }, "ExportDeploymentStatefile": { "methods": [ "export_deployment_statefile" @@ -365,6 +470,16 @@ "get_deployment" ] }, + "GetDeploymentGroup": { + "methods": [ + "get_deployment_group" + ] + }, + "GetDeploymentGroupRevision": { + "methods": [ + "get_deployment_group_revision" + ] + }, "GetPreview": { "methods": [ "get_preview" @@ -400,6 +515,16 @@ "import_statefile" ] }, + "ListDeploymentGroupRevisions": { + "methods": [ + "list_deployment_group_revisions" + ] + }, + "ListDeploymentGroups": { + "methods": [ + "list_deployment_groups" + ] + }, "ListDeployments": { "methods": [ "list_deployments" @@ -440,6 +565,11 @@ "lock_deployment" ] }, + "ProvisionDeploymentGroup": { + "methods": [ + "provision_deployment_group" + ] + }, "UnlockDeployment": { "methods": [ "unlock_deployment" @@ -454,6 +584,11 @@ "methods": [ "update_deployment" ] + }, + "UpdateDeploymentGroup": { + "methods": [ + "update_deployment_group" + ] } } } diff --git a/packages/google-cloud-config/google/cloud/config_v1/services/config/async_client.py b/packages/google-cloud-config/google/cloud/config_v1/services/config/async_client.py index 4e11dbd7aad0..9803e8b14b4e 100644 --- a/packages/google-cloud-config/google/cloud/config_v1/services/config/async_client.py +++ b/packages/google-cloud-config/google/cloud/config_v1/services/config/async_client.py @@ -95,6 +95,14 @@ class ConfigAsyncClient: ) deployment_path = staticmethod(ConfigClient.deployment_path) parse_deployment_path = staticmethod(ConfigClient.parse_deployment_path) + deployment_group_path = staticmethod(ConfigClient.deployment_group_path) + parse_deployment_group_path = staticmethod(ConfigClient.parse_deployment_group_path) + deployment_group_revision_path = staticmethod( + ConfigClient.deployment_group_revision_path + ) + parse_deployment_group_revision_path = staticmethod( + ConfigClient.parse_deployment_group_revision_path + ) preview_path = staticmethod(ConfigClient.preview_path) parse_preview_path = staticmethod(ConfigClient.parse_preview_path) resource_path = staticmethod(ConfigClient.resource_path) @@ -3805,6 +3813,1185 @@ async def sample_update_auto_migration_config(): # Done; return the response. return response + async def get_deployment_group( + self, + request: Optional[Union[config.GetDeploymentGroupRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> config.DeploymentGroup: + r"""Get a DeploymentGroup for a given project and + location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import config_v1 + + async def sample_get_deployment_group(): + # Create a client + client = config_v1.ConfigAsyncClient() + + # Initialize request argument(s) + request = config_v1.GetDeploymentGroupRequest( + name="name_value", + ) + + # Make the request + response = await client.get_deployment_group(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.config_v1.types.GetDeploymentGroupRequest, dict]]): + The request object. The request message for the + GetDeploymentGroup method. + name (:class:`str`): + Required. The name of the deployment group to retrieve. + Format: + 'projects/{project_id}/locations/{location}/deploymentGroups/{deployment_group}'. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.config_v1.types.DeploymentGroup: + A DeploymentGroup is a collection of + DeploymentUnits that in a DAG-like + structure. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, config.GetDeploymentGroupRequest): + request = config.GetDeploymentGroupRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.get_deployment_group + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def create_deployment_group( + self, + request: Optional[Union[config.CreateDeploymentGroupRequest, dict]] = None, + *, + parent: Optional[str] = None, + deployment_group: Optional[config.DeploymentGroup] = None, + deployment_group_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation_async.AsyncOperation: + r"""Creates a + [DeploymentGroup][google.cloud.config.v1.DeploymentGroup] The + newly created DeploymentGroup will be in the ``CREATING`` state + and can be retrieved via Get and List calls. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import config_v1 + + async def sample_create_deployment_group(): + # Create a client + client = config_v1.ConfigAsyncClient() + + # Initialize request argument(s) + request = config_v1.CreateDeploymentGroupRequest( + parent="parent_value", + deployment_group_id="deployment_group_id_value", + ) + + # Make the request + operation = client.create_deployment_group(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.config_v1.types.CreateDeploymentGroupRequest, dict]]): + The request object. A request to create a deployment + group + parent (:class:`str`): + Required. The parent in whose context the Deployment + Group is created. The parent value is in the format: + 'projects/{project_id}/locations/{location}' + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + deployment_group (:class:`google.cloud.config_v1.types.DeploymentGroup`): + Required. [Deployment Group][] resource to create + This corresponds to the ``deployment_group`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + deployment_group_id (:class:`str`): + Required. The deployment group ID. + This corresponds to the ``deployment_group_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.config_v1.types.DeploymentGroup` A DeploymentGroup is a collection of DeploymentUnits that in a DAG-like + structure. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent, deployment_group, deployment_group_id] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, config.CreateDeploymentGroupRequest): + request = config.CreateDeploymentGroupRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if deployment_group is not None: + request.deployment_group = deployment_group + if deployment_group_id is not None: + request.deployment_group_id = deployment_group_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.create_deployment_group + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + config.DeploymentGroup, + metadata_type=config.OperationMetadata, + ) + + # Done; return the response. + return response + + async def update_deployment_group( + self, + request: Optional[Union[config.UpdateDeploymentGroupRequest, dict]] = None, + *, + deployment_group: Optional[config.DeploymentGroup] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation_async.AsyncOperation: + r"""Updates a + [DeploymentGroup][google.cloud.config.v1.DeploymentGroup] + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import config_v1 + + async def sample_update_deployment_group(): + # Create a client + client = config_v1.ConfigAsyncClient() + + # Initialize request argument(s) + request = config_v1.UpdateDeploymentGroupRequest( + ) + + # Make the request + operation = client.update_deployment_group(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.config_v1.types.UpdateDeploymentGroupRequest, dict]]): + The request object. A request message for updating a + deployment group + deployment_group (:class:`google.cloud.config_v1.types.DeploymentGroup`): + Required. + [DeploymentGroup][google.cloud.config.v1.DeploymentGroup] + to update. + + The deployment group's ``name`` field is used to + identify the resource to be updated. Format: + ``projects/{project}/locations/{location}/deploymentGroups/{deployment_group_id}`` + + This corresponds to the ``deployment_group`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): + Optional. Field mask used to specify the fields to be + overwritten in the Deployment Group resource by the + update. + + The fields specified in the update_mask are relative to + the resource, not the full request. A field will be + overwritten if it is in the mask. If the user does not + provide a mask then all fields will be overwritten. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.config_v1.types.DeploymentGroup` A DeploymentGroup is a collection of DeploymentUnits that in a DAG-like + structure. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [deployment_group, update_mask] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, config.UpdateDeploymentGroupRequest): + request = config.UpdateDeploymentGroupRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if deployment_group is not None: + request.deployment_group = deployment_group + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.update_deployment_group + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("deployment_group.name", request.deployment_group.name),) + ), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + config.DeploymentGroup, + metadata_type=config.OperationMetadata, + ) + + # Done; return the response. + return response + + async def delete_deployment_group( + self, + request: Optional[Union[config.DeleteDeploymentGroupRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation_async.AsyncOperation: + r"""Deletes a + [DeploymentGroup][google.cloud.config.v1.DeploymentGroup] + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import config_v1 + + async def sample_delete_deployment_group(): + # Create a client + client = config_v1.ConfigAsyncClient() + + # Initialize request argument(s) + request = config_v1.DeleteDeploymentGroupRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_deployment_group(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.config_v1.types.DeleteDeploymentGroupRequest, dict]]): + The request object. Request message for Delete + DeploymentGroup + name (:class:`str`): + Required. The name of DeploymentGroup in the format + projects/{project_id}/locations/{location_id}/deploymentGroups/{deploymentGroup} + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.config_v1.types.DeploymentGroup` A DeploymentGroup is a collection of DeploymentUnits that in a DAG-like + structure. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, config.DeleteDeploymentGroupRequest): + request = config.DeleteDeploymentGroupRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.delete_deployment_group + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + config.DeploymentGroup, + metadata_type=config.OperationMetadata, + ) + + # Done; return the response. + return response + + async def list_deployment_groups( + self, + request: Optional[Union[config.ListDeploymentGroupsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListDeploymentGroupsAsyncPager: + r"""List DeploymentGroups for a given project and + location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import config_v1 + + async def sample_list_deployment_groups(): + # Create a client + client = config_v1.ConfigAsyncClient() + + # Initialize request argument(s) + request = config_v1.ListDeploymentGroupsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_deployment_groups(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.config_v1.types.ListDeploymentGroupsRequest, dict]]): + The request object. The request message for the + ListDeploymentGroups method. + parent (:class:`str`): + Required. The parent, which owns this collection of + deployment groups. Format: + 'projects/{project_id}/locations/{location}'. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.config_v1.services.config.pagers.ListDeploymentGroupsAsyncPager: + The response message for the + ListDeploymentGroups method. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, config.ListDeploymentGroupsRequest): + request = config.ListDeploymentGroupsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.list_deployment_groups + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListDeploymentGroupsAsyncPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def provision_deployment_group( + self, + request: Optional[Union[config.ProvisionDeploymentGroupRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation_async.AsyncOperation: + r"""Provisions a deployment group. + + NOTE: As a first step of this operation, Infra Manager will + automatically delete any Deployments that were part of the *last + successful* + [DeploymentGroupRevision][google.cloud.config.v1.DeploymentGroupRevision] + but are *no longer* included in the *current* + [DeploymentGroup][google.cloud.config.v1.DeploymentGroup] + definition (e.g., following an ``UpdateDeploymentGroup`` call), + along with their actuated resources. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import config_v1 + + async def sample_provision_deployment_group(): + # Create a client + client = config_v1.ConfigAsyncClient() + + # Initialize request argument(s) + request = config_v1.ProvisionDeploymentGroupRequest( + name="name_value", + ) + + # Make the request + operation = client.provision_deployment_group(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.config_v1.types.ProvisionDeploymentGroupRequest, dict]]): + The request object. The request message for the + ProvisionDeploymentGroup method. + name (:class:`str`): + Required. The name of the deployment group to provision. + Format: + 'projects/{project_id}/locations/{location}/deploymentGroups/{deployment_group}'. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.config_v1.types.DeploymentGroup` A DeploymentGroup is a collection of DeploymentUnits that in a DAG-like + structure. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, config.ProvisionDeploymentGroupRequest): + request = config.ProvisionDeploymentGroupRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.provision_deployment_group + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + config.DeploymentGroup, + metadata_type=config.OperationMetadata, + ) + + # Done; return the response. + return response + + async def deprovision_deployment_group( + self, + request: Optional[Union[config.DeprovisionDeploymentGroupRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation_async.AsyncOperation: + r"""Deprovisions a deployment group. + + NOTE: As a first step of this operation, Infra Manager will + automatically delete any Deployments that were part of the *last + successful* + [DeploymentGroupRevision][google.cloud.config.v1.DeploymentGroupRevision] + but are *no longer* included in the *current* + [DeploymentGroup][google.cloud.config.v1.DeploymentGroup] + definition (e.g., following an ``UpdateDeploymentGroup`` call), + along with their actuated resources. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import config_v1 + + async def sample_deprovision_deployment_group(): + # Create a client + client = config_v1.ConfigAsyncClient() + + # Initialize request argument(s) + request = config_v1.DeprovisionDeploymentGroupRequest( + name="name_value", + ) + + # Make the request + operation = client.deprovision_deployment_group(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.config_v1.types.DeprovisionDeploymentGroupRequest, dict]]): + The request object. The request message for the + DeprovisionDeploymentGroup method. + name (:class:`str`): + Required. The name of the deployment group to + deprovision. Format: + 'projects/{project_id}/locations/{location}/deploymentGroups/{deployment_group}'. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.config_v1.types.DeploymentGroup` A DeploymentGroup is a collection of DeploymentUnits that in a DAG-like + structure. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, config.DeprovisionDeploymentGroupRequest): + request = config.DeprovisionDeploymentGroupRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.deprovision_deployment_group + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + config.DeploymentGroup, + metadata_type=config.OperationMetadata, + ) + + # Done; return the response. + return response + + async def get_deployment_group_revision( + self, + request: Optional[Union[config.GetDeploymentGroupRevisionRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> config.DeploymentGroupRevision: + r"""Gets details about a + [DeploymentGroupRevision][google.cloud.config.v1.DeploymentGroupRevision]. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import config_v1 + + async def sample_get_deployment_group_revision(): + # Create a client + client = config_v1.ConfigAsyncClient() + + # Initialize request argument(s) + request = config_v1.GetDeploymentGroupRevisionRequest( + name="name_value", + ) + + # Make the request + response = await client.get_deployment_group_revision(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.config_v1.types.GetDeploymentGroupRevisionRequest, dict]]): + The request object. The request message for the + GetDeploymentGroupRevision method. + name (:class:`str`): + Required. The name of the deployment group revision to + retrieve. Format: + 'projects/{project_id}/locations/{location}/deploymentGroups/{deployment_group}/revisions/{revision}'. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.config_v1.types.DeploymentGroupRevision: + A DeploymentGroupRevision represents a snapshot of a + [DeploymentGroup][google.cloud.config.v1.DeploymentGroup] + at a given point in time, created when a + DeploymentGroup is provisioned or deprovisioned. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, config.GetDeploymentGroupRevisionRequest): + request = config.GetDeploymentGroupRevisionRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.get_deployment_group_revision + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_deployment_group_revisions( + self, + request: Optional[ + Union[config.ListDeploymentGroupRevisionsRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListDeploymentGroupRevisionsAsyncPager: + r"""Lists + [DeploymentGroupRevision][google.cloud.config.v1.DeploymentGroupRevision]s + in a given + [DeploymentGroup][google.cloud.config.v1.DeploymentGroup]. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import config_v1 + + async def sample_list_deployment_group_revisions(): + # Create a client + client = config_v1.ConfigAsyncClient() + + # Initialize request argument(s) + request = config_v1.ListDeploymentGroupRevisionsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_deployment_group_revisions(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.config_v1.types.ListDeploymentGroupRevisionsRequest, dict]]): + The request object. The request message for the + ListDeploymentGroupRevisions method. + parent (:class:`str`): + Required. The parent, which owns this collection of + deployment group revisions. Format: + 'projects/{project_id}/locations/{location}/deploymentGroups/{deployment_group}'. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.config_v1.services.config.pagers.ListDeploymentGroupRevisionsAsyncPager: + The response message for the + ListDeploymentGroupRevisions method. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, config.ListDeploymentGroupRevisionsRequest): + request = config.ListDeploymentGroupRevisionsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.list_deployment_group_revisions + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListDeploymentGroupRevisionsAsyncPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + async def list_operations( self, request: Optional[Union[operations_pb2.ListOperationsRequest, dict]] = None, diff --git a/packages/google-cloud-config/google/cloud/config_v1/services/config/client.py b/packages/google-cloud-config/google/cloud/config_v1/services/config/client.py index 57559a5bbf07..e184d24fcc8e 100644 --- a/packages/google-cloud-config/google/cloud/config_v1/services/config/client.py +++ b/packages/google-cloud-config/google/cloud/config_v1/services/config/client.py @@ -286,6 +286,52 @@ def parse_deployment_path(path: str) -> Dict[str, str]: ) return m.groupdict() if m else {} + @staticmethod + def deployment_group_path( + project: str, + location: str, + deployment_group: str, + ) -> str: + """Returns a fully-qualified deployment_group string.""" + return "projects/{project}/locations/{location}/deploymentGroups/{deployment_group}".format( + project=project, + location=location, + deployment_group=deployment_group, + ) + + @staticmethod + def parse_deployment_group_path(path: str) -> Dict[str, str]: + """Parses a deployment_group path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/deploymentGroups/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def deployment_group_revision_path( + project: str, + location: str, + deployment_group: str, + revision: str, + ) -> str: + """Returns a fully-qualified deployment_group_revision string.""" + return "projects/{project}/locations/{location}/deploymentGroups/{deployment_group}/revisions/{revision}".format( + project=project, + location=location, + deployment_group=deployment_group, + revision=revision, + ) + + @staticmethod + def parse_deployment_group_revision_path(path: str) -> Dict[str, str]: + """Parses a deployment_group_revision path into its component segments.""" + m = re.match( + r"^projects/(?P.+?)/locations/(?P.+?)/deploymentGroups/(?P.+?)/revisions/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + @staticmethod def preview_path( project: str, @@ -4356,6 +4402,1166 @@ def sample_update_auto_migration_config(): # Done; return the response. return response + def get_deployment_group( + self, + request: Optional[Union[config.GetDeploymentGroupRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> config.DeploymentGroup: + r"""Get a DeploymentGroup for a given project and + location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import config_v1 + + def sample_get_deployment_group(): + # Create a client + client = config_v1.ConfigClient() + + # Initialize request argument(s) + request = config_v1.GetDeploymentGroupRequest( + name="name_value", + ) + + # Make the request + response = client.get_deployment_group(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.config_v1.types.GetDeploymentGroupRequest, dict]): + The request object. The request message for the + GetDeploymentGroup method. + name (str): + Required. The name of the deployment group to retrieve. + Format: + 'projects/{project_id}/locations/{location}/deploymentGroups/{deployment_group}'. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.config_v1.types.DeploymentGroup: + A DeploymentGroup is a collection of + DeploymentUnits that in a DAG-like + structure. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, config.GetDeploymentGroupRequest): + request = config.GetDeploymentGroupRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_deployment_group] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def create_deployment_group( + self, + request: Optional[Union[config.CreateDeploymentGroupRequest, dict]] = None, + *, + parent: Optional[str] = None, + deployment_group: Optional[config.DeploymentGroup] = None, + deployment_group_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation.Operation: + r"""Creates a + [DeploymentGroup][google.cloud.config.v1.DeploymentGroup] The + newly created DeploymentGroup will be in the ``CREATING`` state + and can be retrieved via Get and List calls. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import config_v1 + + def sample_create_deployment_group(): + # Create a client + client = config_v1.ConfigClient() + + # Initialize request argument(s) + request = config_v1.CreateDeploymentGroupRequest( + parent="parent_value", + deployment_group_id="deployment_group_id_value", + ) + + # Make the request + operation = client.create_deployment_group(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.config_v1.types.CreateDeploymentGroupRequest, dict]): + The request object. A request to create a deployment + group + parent (str): + Required. The parent in whose context the Deployment + Group is created. The parent value is in the format: + 'projects/{project_id}/locations/{location}' + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + deployment_group (google.cloud.config_v1.types.DeploymentGroup): + Required. [Deployment Group][] resource to create + This corresponds to the ``deployment_group`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + deployment_group_id (str): + Required. The deployment group ID. + This corresponds to the ``deployment_group_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.config_v1.types.DeploymentGroup` A DeploymentGroup is a collection of DeploymentUnits that in a DAG-like + structure. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent, deployment_group, deployment_group_id] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, config.CreateDeploymentGroupRequest): + request = config.CreateDeploymentGroupRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if deployment_group is not None: + request.deployment_group = deployment_group + if deployment_group_id is not None: + request.deployment_group_id = deployment_group_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_deployment_group] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + config.DeploymentGroup, + metadata_type=config.OperationMetadata, + ) + + # Done; return the response. + return response + + def update_deployment_group( + self, + request: Optional[Union[config.UpdateDeploymentGroupRequest, dict]] = None, + *, + deployment_group: Optional[config.DeploymentGroup] = None, + update_mask: Optional[field_mask_pb2.FieldMask] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation.Operation: + r"""Updates a + [DeploymentGroup][google.cloud.config.v1.DeploymentGroup] + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import config_v1 + + def sample_update_deployment_group(): + # Create a client + client = config_v1.ConfigClient() + + # Initialize request argument(s) + request = config_v1.UpdateDeploymentGroupRequest( + ) + + # Make the request + operation = client.update_deployment_group(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.config_v1.types.UpdateDeploymentGroupRequest, dict]): + The request object. A request message for updating a + deployment group + deployment_group (google.cloud.config_v1.types.DeploymentGroup): + Required. + [DeploymentGroup][google.cloud.config.v1.DeploymentGroup] + to update. + + The deployment group's ``name`` field is used to + identify the resource to be updated. Format: + ``projects/{project}/locations/{location}/deploymentGroups/{deployment_group_id}`` + + This corresponds to the ``deployment_group`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Optional. Field mask used to specify the fields to be + overwritten in the Deployment Group resource by the + update. + + The fields specified in the update_mask are relative to + the resource, not the full request. A field will be + overwritten if it is in the mask. If the user does not + provide a mask then all fields will be overwritten. + + This corresponds to the ``update_mask`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.config_v1.types.DeploymentGroup` A DeploymentGroup is a collection of DeploymentUnits that in a DAG-like + structure. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [deployment_group, update_mask] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, config.UpdateDeploymentGroupRequest): + request = config.UpdateDeploymentGroupRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if deployment_group is not None: + request.deployment_group = deployment_group + if update_mask is not None: + request.update_mask = update_mask + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_deployment_group] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("deployment_group.name", request.deployment_group.name),) + ), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + config.DeploymentGroup, + metadata_type=config.OperationMetadata, + ) + + # Done; return the response. + return response + + def delete_deployment_group( + self, + request: Optional[Union[config.DeleteDeploymentGroupRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation.Operation: + r"""Deletes a + [DeploymentGroup][google.cloud.config.v1.DeploymentGroup] + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import config_v1 + + def sample_delete_deployment_group(): + # Create a client + client = config_v1.ConfigClient() + + # Initialize request argument(s) + request = config_v1.DeleteDeploymentGroupRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_deployment_group(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.config_v1.types.DeleteDeploymentGroupRequest, dict]): + The request object. Request message for Delete + DeploymentGroup + name (str): + Required. The name of DeploymentGroup in the format + projects/{project_id}/locations/{location_id}/deploymentGroups/{deploymentGroup} + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.config_v1.types.DeploymentGroup` A DeploymentGroup is a collection of DeploymentUnits that in a DAG-like + structure. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, config.DeleteDeploymentGroupRequest): + request = config.DeleteDeploymentGroupRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_deployment_group] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + config.DeploymentGroup, + metadata_type=config.OperationMetadata, + ) + + # Done; return the response. + return response + + def list_deployment_groups( + self, + request: Optional[Union[config.ListDeploymentGroupsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListDeploymentGroupsPager: + r"""List DeploymentGroups for a given project and + location. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import config_v1 + + def sample_list_deployment_groups(): + # Create a client + client = config_v1.ConfigClient() + + # Initialize request argument(s) + request = config_v1.ListDeploymentGroupsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_deployment_groups(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.config_v1.types.ListDeploymentGroupsRequest, dict]): + The request object. The request message for the + ListDeploymentGroups method. + parent (str): + Required. The parent, which owns this collection of + deployment groups. Format: + 'projects/{project_id}/locations/{location}'. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.config_v1.services.config.pagers.ListDeploymentGroupsPager: + The response message for the + ListDeploymentGroups method. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, config.ListDeploymentGroupsRequest): + request = config.ListDeploymentGroupsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_deployment_groups] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListDeploymentGroupsPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def provision_deployment_group( + self, + request: Optional[Union[config.ProvisionDeploymentGroupRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation.Operation: + r"""Provisions a deployment group. + + NOTE: As a first step of this operation, Infra Manager will + automatically delete any Deployments that were part of the *last + successful* + [DeploymentGroupRevision][google.cloud.config.v1.DeploymentGroupRevision] + but are *no longer* included in the *current* + [DeploymentGroup][google.cloud.config.v1.DeploymentGroup] + definition (e.g., following an ``UpdateDeploymentGroup`` call), + along with their actuated resources. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import config_v1 + + def sample_provision_deployment_group(): + # Create a client + client = config_v1.ConfigClient() + + # Initialize request argument(s) + request = config_v1.ProvisionDeploymentGroupRequest( + name="name_value", + ) + + # Make the request + operation = client.provision_deployment_group(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.config_v1.types.ProvisionDeploymentGroupRequest, dict]): + The request object. The request message for the + ProvisionDeploymentGroup method. + name (str): + Required. The name of the deployment group to provision. + Format: + 'projects/{project_id}/locations/{location}/deploymentGroups/{deployment_group}'. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.config_v1.types.DeploymentGroup` A DeploymentGroup is a collection of DeploymentUnits that in a DAG-like + structure. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, config.ProvisionDeploymentGroupRequest): + request = config.ProvisionDeploymentGroupRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.provision_deployment_group + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + config.DeploymentGroup, + metadata_type=config.OperationMetadata, + ) + + # Done; return the response. + return response + + def deprovision_deployment_group( + self, + request: Optional[Union[config.DeprovisionDeploymentGroupRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation.Operation: + r"""Deprovisions a deployment group. + + NOTE: As a first step of this operation, Infra Manager will + automatically delete any Deployments that were part of the *last + successful* + [DeploymentGroupRevision][google.cloud.config.v1.DeploymentGroupRevision] + but are *no longer* included in the *current* + [DeploymentGroup][google.cloud.config.v1.DeploymentGroup] + definition (e.g., following an ``UpdateDeploymentGroup`` call), + along with their actuated resources. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import config_v1 + + def sample_deprovision_deployment_group(): + # Create a client + client = config_v1.ConfigClient() + + # Initialize request argument(s) + request = config_v1.DeprovisionDeploymentGroupRequest( + name="name_value", + ) + + # Make the request + operation = client.deprovision_deployment_group(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.config_v1.types.DeprovisionDeploymentGroupRequest, dict]): + The request object. The request message for the + DeprovisionDeploymentGroup method. + name (str): + Required. The name of the deployment group to + deprovision. Format: + 'projects/{project_id}/locations/{location}/deploymentGroups/{deployment_group}'. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.cloud.config_v1.types.DeploymentGroup` A DeploymentGroup is a collection of DeploymentUnits that in a DAG-like + structure. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, config.DeprovisionDeploymentGroupRequest): + request = config.DeprovisionDeploymentGroupRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.deprovision_deployment_group + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + config.DeploymentGroup, + metadata_type=config.OperationMetadata, + ) + + # Done; return the response. + return response + + def get_deployment_group_revision( + self, + request: Optional[Union[config.GetDeploymentGroupRevisionRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> config.DeploymentGroupRevision: + r"""Gets details about a + [DeploymentGroupRevision][google.cloud.config.v1.DeploymentGroupRevision]. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import config_v1 + + def sample_get_deployment_group_revision(): + # Create a client + client = config_v1.ConfigClient() + + # Initialize request argument(s) + request = config_v1.GetDeploymentGroupRevisionRequest( + name="name_value", + ) + + # Make the request + response = client.get_deployment_group_revision(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.config_v1.types.GetDeploymentGroupRevisionRequest, dict]): + The request object. The request message for the + GetDeploymentGroupRevision method. + name (str): + Required. The name of the deployment group revision to + retrieve. Format: + 'projects/{project_id}/locations/{location}/deploymentGroups/{deployment_group}/revisions/{revision}'. + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.config_v1.types.DeploymentGroupRevision: + A DeploymentGroupRevision represents a snapshot of a + [DeploymentGroup][google.cloud.config.v1.DeploymentGroup] + at a given point in time, created when a + DeploymentGroup is provisioned or deprovisioned. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, config.GetDeploymentGroupRevisionRequest): + request = config.GetDeploymentGroupRevisionRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.get_deployment_group_revision + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_deployment_group_revisions( + self, + request: Optional[ + Union[config.ListDeploymentGroupRevisionsRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListDeploymentGroupRevisionsPager: + r"""Lists + [DeploymentGroupRevision][google.cloud.config.v1.DeploymentGroupRevision]s + in a given + [DeploymentGroup][google.cloud.config.v1.DeploymentGroup]. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import config_v1 + + def sample_list_deployment_group_revisions(): + # Create a client + client = config_v1.ConfigClient() + + # Initialize request argument(s) + request = config_v1.ListDeploymentGroupRevisionsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_deployment_group_revisions(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.config_v1.types.ListDeploymentGroupRevisionsRequest, dict]): + The request object. The request message for the + ListDeploymentGroupRevisions method. + parent (str): + Required. The parent, which owns this collection of + deployment group revisions. Format: + 'projects/{project_id}/locations/{location}/deploymentGroups/{deployment_group}'. + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.config_v1.services.config.pagers.ListDeploymentGroupRevisionsPager: + The response message for the + ListDeploymentGroupRevisions method. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, config.ListDeploymentGroupRevisionsRequest): + request = config.ListDeploymentGroupRevisionsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.list_deployment_group_revisions + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListDeploymentGroupRevisionsPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + def __enter__(self) -> "ConfigClient": return self diff --git a/packages/google-cloud-config/google/cloud/config_v1/services/config/pagers.py b/packages/google-cloud-config/google/cloud/config_v1/services/config/pagers.py index 06aaa046b659..5f22454fd153 100644 --- a/packages/google-cloud-config/google/cloud/config_v1/services/config/pagers.py +++ b/packages/google-cloud-config/google/cloud/config_v1/services/config/pagers.py @@ -1131,3 +1131,315 @@ async def async_generator(): def __repr__(self) -> str: return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListDeploymentGroupsPager: + """A pager for iterating through ``list_deployment_groups`` requests. + + This class thinly wraps an initial + :class:`google.cloud.config_v1.types.ListDeploymentGroupsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``deployment_groups`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListDeploymentGroups`` requests and continue to iterate + through the ``deployment_groups`` field on the + corresponding responses. + + All the usual :class:`google.cloud.config_v1.types.ListDeploymentGroupsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., config.ListDeploymentGroupsResponse], + request: config.ListDeploymentGroupsRequest, + response: config.ListDeploymentGroupsResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.config_v1.types.ListDeploymentGroupsRequest): + The initial request object. + response (google.cloud.config_v1.types.ListDeploymentGroupsResponse): + The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + self._method = method + self._request = config.ListDeploymentGroupsRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[config.ListDeploymentGroupsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) + yield self._response + + def __iter__(self) -> Iterator[config.DeploymentGroup]: + for page in self.pages: + yield from page.deployment_groups + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListDeploymentGroupsAsyncPager: + """A pager for iterating through ``list_deployment_groups`` requests. + + This class thinly wraps an initial + :class:`google.cloud.config_v1.types.ListDeploymentGroupsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``deployment_groups`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListDeploymentGroups`` requests and continue to iterate + through the ``deployment_groups`` field on the + corresponding responses. + + All the usual :class:`google.cloud.config_v1.types.ListDeploymentGroupsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[config.ListDeploymentGroupsResponse]], + request: config.ListDeploymentGroupsRequest, + response: config.ListDeploymentGroupsResponse, + *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.config_v1.types.ListDeploymentGroupsRequest): + The initial request object. + response (google.cloud.config_v1.types.ListDeploymentGroupsResponse): + The initial response object. + retry (google.api_core.retry.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + self._method = method + self._request = config.ListDeploymentGroupsRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[config.ListDeploymentGroupsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) + yield self._response + + def __aiter__(self) -> AsyncIterator[config.DeploymentGroup]: + async def async_generator(): + async for page in self.pages: + for response in page.deployment_groups: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListDeploymentGroupRevisionsPager: + """A pager for iterating through ``list_deployment_group_revisions`` requests. + + This class thinly wraps an initial + :class:`google.cloud.config_v1.types.ListDeploymentGroupRevisionsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``deployment_group_revisions`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListDeploymentGroupRevisions`` requests and continue to iterate + through the ``deployment_group_revisions`` field on the + corresponding responses. + + All the usual :class:`google.cloud.config_v1.types.ListDeploymentGroupRevisionsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., config.ListDeploymentGroupRevisionsResponse], + request: config.ListDeploymentGroupRevisionsRequest, + response: config.ListDeploymentGroupRevisionsResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.config_v1.types.ListDeploymentGroupRevisionsRequest): + The initial request object. + response (google.cloud.config_v1.types.ListDeploymentGroupRevisionsResponse): + The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + self._method = method + self._request = config.ListDeploymentGroupRevisionsRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[config.ListDeploymentGroupRevisionsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) + yield self._response + + def __iter__(self) -> Iterator[config.DeploymentGroupRevision]: + for page in self.pages: + yield from page.deployment_group_revisions + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListDeploymentGroupRevisionsAsyncPager: + """A pager for iterating through ``list_deployment_group_revisions`` requests. + + This class thinly wraps an initial + :class:`google.cloud.config_v1.types.ListDeploymentGroupRevisionsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``deployment_group_revisions`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListDeploymentGroupRevisions`` requests and continue to iterate + through the ``deployment_group_revisions`` field on the + corresponding responses. + + All the usual :class:`google.cloud.config_v1.types.ListDeploymentGroupRevisionsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., Awaitable[config.ListDeploymentGroupRevisionsResponse]], + request: config.ListDeploymentGroupRevisionsRequest, + response: config.ListDeploymentGroupRevisionsResponse, + *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.config_v1.types.ListDeploymentGroupRevisionsRequest): + The initial request object. + response (google.cloud.config_v1.types.ListDeploymentGroupRevisionsResponse): + The initial response object. + retry (google.api_core.retry.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + self._method = method + self._request = config.ListDeploymentGroupRevisionsRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[config.ListDeploymentGroupRevisionsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) + yield self._response + + def __aiter__(self) -> AsyncIterator[config.DeploymentGroupRevision]: + async def async_generator(): + async for page in self.pages: + for response in page.deployment_group_revisions: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) diff --git a/packages/google-cloud-config/google/cloud/config_v1/services/config/transports/base.py b/packages/google-cloud-config/google/cloud/config_v1/services/config/transports/base.py index 4059497f0267..9ad28e561882 100644 --- a/packages/google-cloud-config/google/cloud/config_v1/services/config/transports/base.py +++ b/packages/google-cloud-config/google/cloud/config_v1/services/config/transports/base.py @@ -294,6 +294,51 @@ def _prep_wrapped_messages(self, client_info): default_timeout=None, client_info=client_info, ), + self.get_deployment_group: gapic_v1.method.wrap_method( + self.get_deployment_group, + default_timeout=None, + client_info=client_info, + ), + self.create_deployment_group: gapic_v1.method.wrap_method( + self.create_deployment_group, + default_timeout=None, + client_info=client_info, + ), + self.update_deployment_group: gapic_v1.method.wrap_method( + self.update_deployment_group, + default_timeout=None, + client_info=client_info, + ), + self.delete_deployment_group: gapic_v1.method.wrap_method( + self.delete_deployment_group, + default_timeout=None, + client_info=client_info, + ), + self.list_deployment_groups: gapic_v1.method.wrap_method( + self.list_deployment_groups, + default_timeout=None, + client_info=client_info, + ), + self.provision_deployment_group: gapic_v1.method.wrap_method( + self.provision_deployment_group, + default_timeout=None, + client_info=client_info, + ), + self.deprovision_deployment_group: gapic_v1.method.wrap_method( + self.deprovision_deployment_group, + default_timeout=None, + client_info=client_info, + ), + self.get_deployment_group_revision: gapic_v1.method.wrap_method( + self.get_deployment_group_revision, + default_timeout=None, + client_info=client_info, + ), + self.list_deployment_group_revisions: gapic_v1.method.wrap_method( + self.list_deployment_group_revisions, + default_timeout=None, + client_info=client_info, + ), self.get_location: gapic_v1.method.wrap_method( self.get_location, default_timeout=None, @@ -627,6 +672,95 @@ def update_auto_migration_config( ]: raise NotImplementedError() + @property + def get_deployment_group( + self, + ) -> Callable[ + [config.GetDeploymentGroupRequest], + Union[config.DeploymentGroup, Awaitable[config.DeploymentGroup]], + ]: + raise NotImplementedError() + + @property + def create_deployment_group( + self, + ) -> Callable[ + [config.CreateDeploymentGroupRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def update_deployment_group( + self, + ) -> Callable[ + [config.UpdateDeploymentGroupRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def delete_deployment_group( + self, + ) -> Callable[ + [config.DeleteDeploymentGroupRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def list_deployment_groups( + self, + ) -> Callable[ + [config.ListDeploymentGroupsRequest], + Union[ + config.ListDeploymentGroupsResponse, + Awaitable[config.ListDeploymentGroupsResponse], + ], + ]: + raise NotImplementedError() + + @property + def provision_deployment_group( + self, + ) -> Callable[ + [config.ProvisionDeploymentGroupRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def deprovision_deployment_group( + self, + ) -> Callable[ + [config.DeprovisionDeploymentGroupRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def get_deployment_group_revision( + self, + ) -> Callable[ + [config.GetDeploymentGroupRevisionRequest], + Union[ + config.DeploymentGroupRevision, Awaitable[config.DeploymentGroupRevision] + ], + ]: + raise NotImplementedError() + + @property + def list_deployment_group_revisions( + self, + ) -> Callable[ + [config.ListDeploymentGroupRevisionsRequest], + Union[ + config.ListDeploymentGroupRevisionsResponse, + Awaitable[config.ListDeploymentGroupRevisionsResponse], + ], + ]: + raise NotImplementedError() + @property def list_operations( self, diff --git a/packages/google-cloud-config/google/cloud/config_v1/services/config/transports/grpc.py b/packages/google-cloud-config/google/cloud/config_v1/services/config/transports/grpc.py index 2cf2d8aff6a3..15337e94128a 100644 --- a/packages/google-cloud-config/google/cloud/config_v1/services/config/transports/grpc.py +++ b/packages/google-cloud-config/google/cloud/config_v1/services/config/transports/grpc.py @@ -1125,6 +1125,285 @@ def update_auto_migration_config( ) return self._stubs["update_auto_migration_config"] + @property + def get_deployment_group( + self, + ) -> Callable[[config.GetDeploymentGroupRequest], config.DeploymentGroup]: + r"""Return a callable for the get deployment group method over gRPC. + + Get a DeploymentGroup for a given project and + location. + + Returns: + Callable[[~.GetDeploymentGroupRequest], + ~.DeploymentGroup]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_deployment_group" not in self._stubs: + self._stubs["get_deployment_group"] = self._logged_channel.unary_unary( + "/google.cloud.config.v1.Config/GetDeploymentGroup", + request_serializer=config.GetDeploymentGroupRequest.serialize, + response_deserializer=config.DeploymentGroup.deserialize, + ) + return self._stubs["get_deployment_group"] + + @property + def create_deployment_group( + self, + ) -> Callable[[config.CreateDeploymentGroupRequest], operations_pb2.Operation]: + r"""Return a callable for the create deployment group method over gRPC. + + Creates a + [DeploymentGroup][google.cloud.config.v1.DeploymentGroup] The + newly created DeploymentGroup will be in the ``CREATING`` state + and can be retrieved via Get and List calls. + + Returns: + Callable[[~.CreateDeploymentGroupRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_deployment_group" not in self._stubs: + self._stubs["create_deployment_group"] = self._logged_channel.unary_unary( + "/google.cloud.config.v1.Config/CreateDeploymentGroup", + request_serializer=config.CreateDeploymentGroupRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["create_deployment_group"] + + @property + def update_deployment_group( + self, + ) -> Callable[[config.UpdateDeploymentGroupRequest], operations_pb2.Operation]: + r"""Return a callable for the update deployment group method over gRPC. + + Updates a + [DeploymentGroup][google.cloud.config.v1.DeploymentGroup] + + Returns: + Callable[[~.UpdateDeploymentGroupRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_deployment_group" not in self._stubs: + self._stubs["update_deployment_group"] = self._logged_channel.unary_unary( + "/google.cloud.config.v1.Config/UpdateDeploymentGroup", + request_serializer=config.UpdateDeploymentGroupRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["update_deployment_group"] + + @property + def delete_deployment_group( + self, + ) -> Callable[[config.DeleteDeploymentGroupRequest], operations_pb2.Operation]: + r"""Return a callable for the delete deployment group method over gRPC. + + Deletes a + [DeploymentGroup][google.cloud.config.v1.DeploymentGroup] + + Returns: + Callable[[~.DeleteDeploymentGroupRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_deployment_group" not in self._stubs: + self._stubs["delete_deployment_group"] = self._logged_channel.unary_unary( + "/google.cloud.config.v1.Config/DeleteDeploymentGroup", + request_serializer=config.DeleteDeploymentGroupRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["delete_deployment_group"] + + @property + def list_deployment_groups( + self, + ) -> Callable[ + [config.ListDeploymentGroupsRequest], config.ListDeploymentGroupsResponse + ]: + r"""Return a callable for the list deployment groups method over gRPC. + + List DeploymentGroups for a given project and + location. + + Returns: + Callable[[~.ListDeploymentGroupsRequest], + ~.ListDeploymentGroupsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_deployment_groups" not in self._stubs: + self._stubs["list_deployment_groups"] = self._logged_channel.unary_unary( + "/google.cloud.config.v1.Config/ListDeploymentGroups", + request_serializer=config.ListDeploymentGroupsRequest.serialize, + response_deserializer=config.ListDeploymentGroupsResponse.deserialize, + ) + return self._stubs["list_deployment_groups"] + + @property + def provision_deployment_group( + self, + ) -> Callable[[config.ProvisionDeploymentGroupRequest], operations_pb2.Operation]: + r"""Return a callable for the provision deployment group method over gRPC. + + Provisions a deployment group. + + NOTE: As a first step of this operation, Infra Manager will + automatically delete any Deployments that were part of the *last + successful* + [DeploymentGroupRevision][google.cloud.config.v1.DeploymentGroupRevision] + but are *no longer* included in the *current* + [DeploymentGroup][google.cloud.config.v1.DeploymentGroup] + definition (e.g., following an ``UpdateDeploymentGroup`` call), + along with their actuated resources. + + Returns: + Callable[[~.ProvisionDeploymentGroupRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "provision_deployment_group" not in self._stubs: + self._stubs["provision_deployment_group"] = ( + self._logged_channel.unary_unary( + "/google.cloud.config.v1.Config/ProvisionDeploymentGroup", + request_serializer=config.ProvisionDeploymentGroupRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + ) + return self._stubs["provision_deployment_group"] + + @property + def deprovision_deployment_group( + self, + ) -> Callable[[config.DeprovisionDeploymentGroupRequest], operations_pb2.Operation]: + r"""Return a callable for the deprovision deployment group method over gRPC. + + Deprovisions a deployment group. + + NOTE: As a first step of this operation, Infra Manager will + automatically delete any Deployments that were part of the *last + successful* + [DeploymentGroupRevision][google.cloud.config.v1.DeploymentGroupRevision] + but are *no longer* included in the *current* + [DeploymentGroup][google.cloud.config.v1.DeploymentGroup] + definition (e.g., following an ``UpdateDeploymentGroup`` call), + along with their actuated resources. + + Returns: + Callable[[~.DeprovisionDeploymentGroupRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "deprovision_deployment_group" not in self._stubs: + self._stubs["deprovision_deployment_group"] = ( + self._logged_channel.unary_unary( + "/google.cloud.config.v1.Config/DeprovisionDeploymentGroup", + request_serializer=config.DeprovisionDeploymentGroupRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + ) + return self._stubs["deprovision_deployment_group"] + + @property + def get_deployment_group_revision( + self, + ) -> Callable[ + [config.GetDeploymentGroupRevisionRequest], config.DeploymentGroupRevision + ]: + r"""Return a callable for the get deployment group revision method over gRPC. + + Gets details about a + [DeploymentGroupRevision][google.cloud.config.v1.DeploymentGroupRevision]. + + Returns: + Callable[[~.GetDeploymentGroupRevisionRequest], + ~.DeploymentGroupRevision]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_deployment_group_revision" not in self._stubs: + self._stubs["get_deployment_group_revision"] = ( + self._logged_channel.unary_unary( + "/google.cloud.config.v1.Config/GetDeploymentGroupRevision", + request_serializer=config.GetDeploymentGroupRevisionRequest.serialize, + response_deserializer=config.DeploymentGroupRevision.deserialize, + ) + ) + return self._stubs["get_deployment_group_revision"] + + @property + def list_deployment_group_revisions( + self, + ) -> Callable[ + [config.ListDeploymentGroupRevisionsRequest], + config.ListDeploymentGroupRevisionsResponse, + ]: + r"""Return a callable for the list deployment group + revisions method over gRPC. + + Lists + [DeploymentGroupRevision][google.cloud.config.v1.DeploymentGroupRevision]s + in a given + [DeploymentGroup][google.cloud.config.v1.DeploymentGroup]. + + Returns: + Callable[[~.ListDeploymentGroupRevisionsRequest], + ~.ListDeploymentGroupRevisionsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_deployment_group_revisions" not in self._stubs: + self._stubs["list_deployment_group_revisions"] = ( + self._logged_channel.unary_unary( + "/google.cloud.config.v1.Config/ListDeploymentGroupRevisions", + request_serializer=config.ListDeploymentGroupRevisionsRequest.serialize, + response_deserializer=config.ListDeploymentGroupRevisionsResponse.deserialize, + ) + ) + return self._stubs["list_deployment_group_revisions"] + def close(self): self._logged_channel.close() diff --git a/packages/google-cloud-config/google/cloud/config_v1/services/config/transports/grpc_asyncio.py b/packages/google-cloud-config/google/cloud/config_v1/services/config/transports/grpc_asyncio.py index 5d4daf465a81..a8955b250379 100644 --- a/packages/google-cloud-config/google/cloud/config_v1/services/config/transports/grpc_asyncio.py +++ b/packages/google-cloud-config/google/cloud/config_v1/services/config/transports/grpc_asyncio.py @@ -1164,6 +1164,299 @@ def update_auto_migration_config( ) return self._stubs["update_auto_migration_config"] + @property + def get_deployment_group( + self, + ) -> Callable[ + [config.GetDeploymentGroupRequest], Awaitable[config.DeploymentGroup] + ]: + r"""Return a callable for the get deployment group method over gRPC. + + Get a DeploymentGroup for a given project and + location. + + Returns: + Callable[[~.GetDeploymentGroupRequest], + Awaitable[~.DeploymentGroup]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_deployment_group" not in self._stubs: + self._stubs["get_deployment_group"] = self._logged_channel.unary_unary( + "/google.cloud.config.v1.Config/GetDeploymentGroup", + request_serializer=config.GetDeploymentGroupRequest.serialize, + response_deserializer=config.DeploymentGroup.deserialize, + ) + return self._stubs["get_deployment_group"] + + @property + def create_deployment_group( + self, + ) -> Callable[ + [config.CreateDeploymentGroupRequest], Awaitable[operations_pb2.Operation] + ]: + r"""Return a callable for the create deployment group method over gRPC. + + Creates a + [DeploymentGroup][google.cloud.config.v1.DeploymentGroup] The + newly created DeploymentGroup will be in the ``CREATING`` state + and can be retrieved via Get and List calls. + + Returns: + Callable[[~.CreateDeploymentGroupRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_deployment_group" not in self._stubs: + self._stubs["create_deployment_group"] = self._logged_channel.unary_unary( + "/google.cloud.config.v1.Config/CreateDeploymentGroup", + request_serializer=config.CreateDeploymentGroupRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["create_deployment_group"] + + @property + def update_deployment_group( + self, + ) -> Callable[ + [config.UpdateDeploymentGroupRequest], Awaitable[operations_pb2.Operation] + ]: + r"""Return a callable for the update deployment group method over gRPC. + + Updates a + [DeploymentGroup][google.cloud.config.v1.DeploymentGroup] + + Returns: + Callable[[~.UpdateDeploymentGroupRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_deployment_group" not in self._stubs: + self._stubs["update_deployment_group"] = self._logged_channel.unary_unary( + "/google.cloud.config.v1.Config/UpdateDeploymentGroup", + request_serializer=config.UpdateDeploymentGroupRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["update_deployment_group"] + + @property + def delete_deployment_group( + self, + ) -> Callable[ + [config.DeleteDeploymentGroupRequest], Awaitable[operations_pb2.Operation] + ]: + r"""Return a callable for the delete deployment group method over gRPC. + + Deletes a + [DeploymentGroup][google.cloud.config.v1.DeploymentGroup] + + Returns: + Callable[[~.DeleteDeploymentGroupRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_deployment_group" not in self._stubs: + self._stubs["delete_deployment_group"] = self._logged_channel.unary_unary( + "/google.cloud.config.v1.Config/DeleteDeploymentGroup", + request_serializer=config.DeleteDeploymentGroupRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["delete_deployment_group"] + + @property + def list_deployment_groups( + self, + ) -> Callable[ + [config.ListDeploymentGroupsRequest], + Awaitable[config.ListDeploymentGroupsResponse], + ]: + r"""Return a callable for the list deployment groups method over gRPC. + + List DeploymentGroups for a given project and + location. + + Returns: + Callable[[~.ListDeploymentGroupsRequest], + Awaitable[~.ListDeploymentGroupsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_deployment_groups" not in self._stubs: + self._stubs["list_deployment_groups"] = self._logged_channel.unary_unary( + "/google.cloud.config.v1.Config/ListDeploymentGroups", + request_serializer=config.ListDeploymentGroupsRequest.serialize, + response_deserializer=config.ListDeploymentGroupsResponse.deserialize, + ) + return self._stubs["list_deployment_groups"] + + @property + def provision_deployment_group( + self, + ) -> Callable[ + [config.ProvisionDeploymentGroupRequest], Awaitable[operations_pb2.Operation] + ]: + r"""Return a callable for the provision deployment group method over gRPC. + + Provisions a deployment group. + + NOTE: As a first step of this operation, Infra Manager will + automatically delete any Deployments that were part of the *last + successful* + [DeploymentGroupRevision][google.cloud.config.v1.DeploymentGroupRevision] + but are *no longer* included in the *current* + [DeploymentGroup][google.cloud.config.v1.DeploymentGroup] + definition (e.g., following an ``UpdateDeploymentGroup`` call), + along with their actuated resources. + + Returns: + Callable[[~.ProvisionDeploymentGroupRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "provision_deployment_group" not in self._stubs: + self._stubs["provision_deployment_group"] = ( + self._logged_channel.unary_unary( + "/google.cloud.config.v1.Config/ProvisionDeploymentGroup", + request_serializer=config.ProvisionDeploymentGroupRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + ) + return self._stubs["provision_deployment_group"] + + @property + def deprovision_deployment_group( + self, + ) -> Callable[ + [config.DeprovisionDeploymentGroupRequest], Awaitable[operations_pb2.Operation] + ]: + r"""Return a callable for the deprovision deployment group method over gRPC. + + Deprovisions a deployment group. + + NOTE: As a first step of this operation, Infra Manager will + automatically delete any Deployments that were part of the *last + successful* + [DeploymentGroupRevision][google.cloud.config.v1.DeploymentGroupRevision] + but are *no longer* included in the *current* + [DeploymentGroup][google.cloud.config.v1.DeploymentGroup] + definition (e.g., following an ``UpdateDeploymentGroup`` call), + along with their actuated resources. + + Returns: + Callable[[~.DeprovisionDeploymentGroupRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "deprovision_deployment_group" not in self._stubs: + self._stubs["deprovision_deployment_group"] = ( + self._logged_channel.unary_unary( + "/google.cloud.config.v1.Config/DeprovisionDeploymentGroup", + request_serializer=config.DeprovisionDeploymentGroupRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + ) + return self._stubs["deprovision_deployment_group"] + + @property + def get_deployment_group_revision( + self, + ) -> Callable[ + [config.GetDeploymentGroupRevisionRequest], + Awaitable[config.DeploymentGroupRevision], + ]: + r"""Return a callable for the get deployment group revision method over gRPC. + + Gets details about a + [DeploymentGroupRevision][google.cloud.config.v1.DeploymentGroupRevision]. + + Returns: + Callable[[~.GetDeploymentGroupRevisionRequest], + Awaitable[~.DeploymentGroupRevision]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_deployment_group_revision" not in self._stubs: + self._stubs["get_deployment_group_revision"] = ( + self._logged_channel.unary_unary( + "/google.cloud.config.v1.Config/GetDeploymentGroupRevision", + request_serializer=config.GetDeploymentGroupRevisionRequest.serialize, + response_deserializer=config.DeploymentGroupRevision.deserialize, + ) + ) + return self._stubs["get_deployment_group_revision"] + + @property + def list_deployment_group_revisions( + self, + ) -> Callable[ + [config.ListDeploymentGroupRevisionsRequest], + Awaitable[config.ListDeploymentGroupRevisionsResponse], + ]: + r"""Return a callable for the list deployment group + revisions method over gRPC. + + Lists + [DeploymentGroupRevision][google.cloud.config.v1.DeploymentGroupRevision]s + in a given + [DeploymentGroup][google.cloud.config.v1.DeploymentGroup]. + + Returns: + Callable[[~.ListDeploymentGroupRevisionsRequest], + Awaitable[~.ListDeploymentGroupRevisionsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_deployment_group_revisions" not in self._stubs: + self._stubs["list_deployment_group_revisions"] = ( + self._logged_channel.unary_unary( + "/google.cloud.config.v1.Config/ListDeploymentGroupRevisions", + request_serializer=config.ListDeploymentGroupRevisionsRequest.serialize, + response_deserializer=config.ListDeploymentGroupRevisionsResponse.deserialize, + ) + ) + return self._stubs["list_deployment_group_revisions"] + def _prep_wrapped_messages(self, client_info): """Precompute the wrapped methods, overriding the base class method to use async wrappers.""" self._wrapped_methods = { @@ -1312,6 +1605,51 @@ def _prep_wrapped_messages(self, client_info): default_timeout=None, client_info=client_info, ), + self.get_deployment_group: self._wrap_method( + self.get_deployment_group, + default_timeout=None, + client_info=client_info, + ), + self.create_deployment_group: self._wrap_method( + self.create_deployment_group, + default_timeout=None, + client_info=client_info, + ), + self.update_deployment_group: self._wrap_method( + self.update_deployment_group, + default_timeout=None, + client_info=client_info, + ), + self.delete_deployment_group: self._wrap_method( + self.delete_deployment_group, + default_timeout=None, + client_info=client_info, + ), + self.list_deployment_groups: self._wrap_method( + self.list_deployment_groups, + default_timeout=None, + client_info=client_info, + ), + self.provision_deployment_group: self._wrap_method( + self.provision_deployment_group, + default_timeout=None, + client_info=client_info, + ), + self.deprovision_deployment_group: self._wrap_method( + self.deprovision_deployment_group, + default_timeout=None, + client_info=client_info, + ), + self.get_deployment_group_revision: self._wrap_method( + self.get_deployment_group_revision, + default_timeout=None, + client_info=client_info, + ), + self.list_deployment_group_revisions: self._wrap_method( + self.list_deployment_group_revisions, + default_timeout=None, + client_info=client_info, + ), self.get_location: self._wrap_method( self.get_location, default_timeout=None, diff --git a/packages/google-cloud-config/google/cloud/config_v1/services/config/transports/rest.py b/packages/google-cloud-config/google/cloud/config_v1/services/config/transports/rest.py index c07961a3d7f5..63229fb1d76b 100644 --- a/packages/google-cloud-config/google/cloud/config_v1/services/config/transports/rest.py +++ b/packages/google-cloud-config/google/cloud/config_v1/services/config/transports/rest.py @@ -87,6 +87,14 @@ def post_create_deployment(self, response): logging.log(f"Received response: {response}") return response + def pre_create_deployment_group(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_deployment_group(self, response): + logging.log(f"Received response: {response}") + return response + def pre_create_preview(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -103,6 +111,14 @@ def post_delete_deployment(self, response): logging.log(f"Received response: {response}") return response + def pre_delete_deployment_group(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_delete_deployment_group(self, response): + logging.log(f"Received response: {response}") + return response + def pre_delete_preview(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -115,6 +131,14 @@ def pre_delete_statefile(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata + def pre_deprovision_deployment_group(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_deprovision_deployment_group(self, response): + logging.log(f"Received response: {response}") + return response + def pre_export_deployment_statefile(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -163,6 +187,22 @@ def post_get_deployment(self, response): logging.log(f"Received response: {response}") return response + def pre_get_deployment_group(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_deployment_group(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_deployment_group_revision(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_deployment_group_revision(self, response): + logging.log(f"Received response: {response}") + return response + def pre_get_preview(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -219,6 +259,22 @@ def post_import_statefile(self, response): logging.log(f"Received response: {response}") return response + def pre_list_deployment_group_revisions(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_deployment_group_revisions(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_deployment_groups(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_deployment_groups(self, response): + logging.log(f"Received response: {response}") + return response + def pre_list_deployments(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -283,6 +339,14 @@ def post_lock_deployment(self, response): logging.log(f"Received response: {response}") return response + def pre_provision_deployment_group(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_provision_deployment_group(self, response): + logging.log(f"Received response: {response}") + return response + def pre_unlock_deployment(self, request, metadata): logging.log(f"Received request: {request}") return request, metadata @@ -307,6 +371,14 @@ def post_update_deployment(self, response): logging.log(f"Received response: {response}") return response + def pre_update_deployment_group(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_deployment_group(self, response): + logging.log(f"Received response: {response}") + return response + transport = ConfigRestTransport(interceptor=MyCustomConfigInterceptor()) client = ConfigClient(transport=transport) @@ -359,6 +431,54 @@ def post_create_deployment_with_metadata( """ return response, metadata + def pre_create_deployment_group( + self, + request: config.CreateDeploymentGroupRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + config.CreateDeploymentGroupRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Pre-rpc interceptor for create_deployment_group + + Override in a subclass to manipulate the request or metadata + before they are sent to the Config server. + """ + return request, metadata + + def post_create_deployment_group( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for create_deployment_group + + DEPRECATED. Please use the `post_create_deployment_group_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the Config server but before + it is returned to user code. This `post_create_deployment_group` interceptor runs + before the `post_create_deployment_group_with_metadata` interceptor. + """ + return response + + def post_create_deployment_group_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_deployment_group + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Config server but before it is returned to user code. + + We recommend only using this `post_create_deployment_group_with_metadata` + interceptor in new development instead of the `post_create_deployment_group` interceptor. + When both interceptors are used, this `post_create_deployment_group_with_metadata` interceptor runs after the + `post_create_deployment_group` interceptor. The (possibly modified) response returned by + `post_create_deployment_group` will be passed to + `post_create_deployment_group_with_metadata`. + """ + return response, metadata + def pre_create_preview( self, request: config.CreatePreviewRequest, @@ -451,6 +571,54 @@ def post_delete_deployment_with_metadata( """ return response, metadata + def pre_delete_deployment_group( + self, + request: config.DeleteDeploymentGroupRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + config.DeleteDeploymentGroupRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Pre-rpc interceptor for delete_deployment_group + + Override in a subclass to manipulate the request or metadata + before they are sent to the Config server. + """ + return request, metadata + + def post_delete_deployment_group( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for delete_deployment_group + + DEPRECATED. Please use the `post_delete_deployment_group_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the Config server but before + it is returned to user code. This `post_delete_deployment_group` interceptor runs + before the `post_delete_deployment_group_with_metadata` interceptor. + """ + return response + + def post_delete_deployment_group_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete_deployment_group + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Config server but before it is returned to user code. + + We recommend only using this `post_delete_deployment_group_with_metadata` + interceptor in new development instead of the `post_delete_deployment_group` interceptor. + When both interceptors are used, this `post_delete_deployment_group_with_metadata` interceptor runs after the + `post_delete_deployment_group` interceptor. The (possibly modified) response returned by + `post_delete_deployment_group` will be passed to + `post_delete_deployment_group_with_metadata`. + """ + return response, metadata + def pre_delete_preview( self, request: config.DeletePreviewRequest, @@ -509,6 +677,55 @@ def pre_delete_statefile( """ return request, metadata + def pre_deprovision_deployment_group( + self, + request: config.DeprovisionDeploymentGroupRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + config.DeprovisionDeploymentGroupRequest, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Pre-rpc interceptor for deprovision_deployment_group + + Override in a subclass to manipulate the request or metadata + before they are sent to the Config server. + """ + return request, metadata + + def post_deprovision_deployment_group( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for deprovision_deployment_group + + DEPRECATED. Please use the `post_deprovision_deployment_group_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the Config server but before + it is returned to user code. This `post_deprovision_deployment_group` interceptor runs + before the `post_deprovision_deployment_group_with_metadata` interceptor. + """ + return response + + def post_deprovision_deployment_group_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for deprovision_deployment_group + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Config server but before it is returned to user code. + + We recommend only using this `post_deprovision_deployment_group_with_metadata` + interceptor in new development instead of the `post_deprovision_deployment_group` interceptor. + When both interceptors are used, this `post_deprovision_deployment_group_with_metadata` interceptor runs after the + `post_deprovision_deployment_group` interceptor. The (possibly modified) response returned by + `post_deprovision_deployment_group` will be passed to + `post_deprovision_deployment_group_with_metadata`. + """ + return response, metadata + def pre_export_deployment_statefile( self, request: config.ExportDeploymentStatefileRequest, @@ -791,6 +1008,103 @@ def post_get_deployment_with_metadata( """ return response, metadata + def pre_get_deployment_group( + self, + request: config.GetDeploymentGroupRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + config.GetDeploymentGroupRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Pre-rpc interceptor for get_deployment_group + + Override in a subclass to manipulate the request or metadata + before they are sent to the Config server. + """ + return request, metadata + + def post_get_deployment_group( + self, response: config.DeploymentGroup + ) -> config.DeploymentGroup: + """Post-rpc interceptor for get_deployment_group + + DEPRECATED. Please use the `post_get_deployment_group_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the Config server but before + it is returned to user code. This `post_get_deployment_group` interceptor runs + before the `post_get_deployment_group_with_metadata` interceptor. + """ + return response + + def post_get_deployment_group_with_metadata( + self, + response: config.DeploymentGroup, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[config.DeploymentGroup, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_deployment_group + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Config server but before it is returned to user code. + + We recommend only using this `post_get_deployment_group_with_metadata` + interceptor in new development instead of the `post_get_deployment_group` interceptor. + When both interceptors are used, this `post_get_deployment_group_with_metadata` interceptor runs after the + `post_get_deployment_group` interceptor. The (possibly modified) response returned by + `post_get_deployment_group` will be passed to + `post_get_deployment_group_with_metadata`. + """ + return response, metadata + + def pre_get_deployment_group_revision( + self, + request: config.GetDeploymentGroupRevisionRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + config.GetDeploymentGroupRevisionRequest, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Pre-rpc interceptor for get_deployment_group_revision + + Override in a subclass to manipulate the request or metadata + before they are sent to the Config server. + """ + return request, metadata + + def post_get_deployment_group_revision( + self, response: config.DeploymentGroupRevision + ) -> config.DeploymentGroupRevision: + """Post-rpc interceptor for get_deployment_group_revision + + DEPRECATED. Please use the `post_get_deployment_group_revision_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the Config server but before + it is returned to user code. This `post_get_deployment_group_revision` interceptor runs + before the `post_get_deployment_group_revision_with_metadata` interceptor. + """ + return response + + def post_get_deployment_group_revision_with_metadata( + self, + response: config.DeploymentGroupRevision, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[config.DeploymentGroupRevision, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for get_deployment_group_revision + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Config server but before it is returned to user code. + + We recommend only using this `post_get_deployment_group_revision_with_metadata` + interceptor in new development instead of the `post_get_deployment_group_revision` interceptor. + When both interceptors are used, this `post_get_deployment_group_revision_with_metadata` interceptor runs after the + `post_get_deployment_group_revision` interceptor. The (possibly modified) response returned by + `post_get_deployment_group_revision` will be passed to + `post_get_deployment_group_revision_with_metadata`. + """ + return response, metadata + def pre_get_preview( self, request: config.GetPreviewRequest, @@ -1109,99 +1423,201 @@ def post_import_statefile_with_metadata( """ return response, metadata - def pre_list_deployments( + def pre_list_deployment_group_revisions( self, - request: config.ListDeploymentsRequest, + request: config.ListDeploymentGroupRevisionsRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]], - ) -> Tuple[config.ListDeploymentsRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for list_deployments + ) -> Tuple[ + config.ListDeploymentGroupRevisionsRequest, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Pre-rpc interceptor for list_deployment_group_revisions Override in a subclass to manipulate the request or metadata before they are sent to the Config server. """ return request, metadata - def post_list_deployments( - self, response: config.ListDeploymentsResponse - ) -> config.ListDeploymentsResponse: - """Post-rpc interceptor for list_deployments + def post_list_deployment_group_revisions( + self, response: config.ListDeploymentGroupRevisionsResponse + ) -> config.ListDeploymentGroupRevisionsResponse: + """Post-rpc interceptor for list_deployment_group_revisions - DEPRECATED. Please use the `post_list_deployments_with_metadata` + DEPRECATED. Please use the `post_list_deployment_group_revisions_with_metadata` interceptor instead. Override in a subclass to read or manipulate the response after it is returned by the Config server but before - it is returned to user code. This `post_list_deployments` interceptor runs - before the `post_list_deployments_with_metadata` interceptor. + it is returned to user code. This `post_list_deployment_group_revisions` interceptor runs + before the `post_list_deployment_group_revisions_with_metadata` interceptor. """ return response - def post_list_deployments_with_metadata( + def post_list_deployment_group_revisions_with_metadata( self, - response: config.ListDeploymentsResponse, + response: config.ListDeploymentGroupRevisionsResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]], - ) -> Tuple[config.ListDeploymentsResponse, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for list_deployments + ) -> Tuple[ + config.ListDeploymentGroupRevisionsResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for list_deployment_group_revisions Override in a subclass to read or manipulate the response or metadata after it is returned by the Config server but before it is returned to user code. - We recommend only using this `post_list_deployments_with_metadata` - interceptor in new development instead of the `post_list_deployments` interceptor. - When both interceptors are used, this `post_list_deployments_with_metadata` interceptor runs after the - `post_list_deployments` interceptor. The (possibly modified) response returned by - `post_list_deployments` will be passed to - `post_list_deployments_with_metadata`. + We recommend only using this `post_list_deployment_group_revisions_with_metadata` + interceptor in new development instead of the `post_list_deployment_group_revisions` interceptor. + When both interceptors are used, this `post_list_deployment_group_revisions_with_metadata` interceptor runs after the + `post_list_deployment_group_revisions` interceptor. The (possibly modified) response returned by + `post_list_deployment_group_revisions` will be passed to + `post_list_deployment_group_revisions_with_metadata`. """ return response, metadata - def pre_list_previews( + def pre_list_deployment_groups( self, - request: config.ListPreviewsRequest, + request: config.ListDeploymentGroupsRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]], - ) -> Tuple[config.ListPreviewsRequest, Sequence[Tuple[str, Union[str, bytes]]]]: - """Pre-rpc interceptor for list_previews + ) -> Tuple[ + config.ListDeploymentGroupsRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Pre-rpc interceptor for list_deployment_groups Override in a subclass to manipulate the request or metadata before they are sent to the Config server. """ return request, metadata - def post_list_previews( - self, response: config.ListPreviewsResponse - ) -> config.ListPreviewsResponse: - """Post-rpc interceptor for list_previews + def post_list_deployment_groups( + self, response: config.ListDeploymentGroupsResponse + ) -> config.ListDeploymentGroupsResponse: + """Post-rpc interceptor for list_deployment_groups - DEPRECATED. Please use the `post_list_previews_with_metadata` + DEPRECATED. Please use the `post_list_deployment_groups_with_metadata` interceptor instead. Override in a subclass to read or manipulate the response after it is returned by the Config server but before - it is returned to user code. This `post_list_previews` interceptor runs - before the `post_list_previews_with_metadata` interceptor. + it is returned to user code. This `post_list_deployment_groups` interceptor runs + before the `post_list_deployment_groups_with_metadata` interceptor. """ return response - def post_list_previews_with_metadata( + def post_list_deployment_groups_with_metadata( self, - response: config.ListPreviewsResponse, + response: config.ListDeploymentGroupsResponse, metadata: Sequence[Tuple[str, Union[str, bytes]]], - ) -> Tuple[config.ListPreviewsResponse, Sequence[Tuple[str, Union[str, bytes]]]]: - """Post-rpc interceptor for list_previews + ) -> Tuple[ + config.ListDeploymentGroupsResponse, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for list_deployment_groups Override in a subclass to read or manipulate the response or metadata after it is returned by the Config server but before it is returned to user code. - We recommend only using this `post_list_previews_with_metadata` - interceptor in new development instead of the `post_list_previews` interceptor. - When both interceptors are used, this `post_list_previews_with_metadata` interceptor runs after the - `post_list_previews` interceptor. The (possibly modified) response returned by - `post_list_previews` will be passed to - `post_list_previews_with_metadata`. + We recommend only using this `post_list_deployment_groups_with_metadata` + interceptor in new development instead of the `post_list_deployment_groups` interceptor. + When both interceptors are used, this `post_list_deployment_groups_with_metadata` interceptor runs after the + `post_list_deployment_groups` interceptor. The (possibly modified) response returned by + `post_list_deployment_groups` will be passed to + `post_list_deployment_groups_with_metadata`. """ return response, metadata - def pre_list_resource_changes( + def pre_list_deployments( + self, + request: config.ListDeploymentsRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[config.ListDeploymentsRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for list_deployments + + Override in a subclass to manipulate the request or metadata + before they are sent to the Config server. + """ + return request, metadata + + def post_list_deployments( + self, response: config.ListDeploymentsResponse + ) -> config.ListDeploymentsResponse: + """Post-rpc interceptor for list_deployments + + DEPRECATED. Please use the `post_list_deployments_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the Config server but before + it is returned to user code. This `post_list_deployments` interceptor runs + before the `post_list_deployments_with_metadata` interceptor. + """ + return response + + def post_list_deployments_with_metadata( + self, + response: config.ListDeploymentsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[config.ListDeploymentsResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for list_deployments + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Config server but before it is returned to user code. + + We recommend only using this `post_list_deployments_with_metadata` + interceptor in new development instead of the `post_list_deployments` interceptor. + When both interceptors are used, this `post_list_deployments_with_metadata` interceptor runs after the + `post_list_deployments` interceptor. The (possibly modified) response returned by + `post_list_deployments` will be passed to + `post_list_deployments_with_metadata`. + """ + return response, metadata + + def pre_list_previews( + self, + request: config.ListPreviewsRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[config.ListPreviewsRequest, Sequence[Tuple[str, Union[str, bytes]]]]: + """Pre-rpc interceptor for list_previews + + Override in a subclass to manipulate the request or metadata + before they are sent to the Config server. + """ + return request, metadata + + def post_list_previews( + self, response: config.ListPreviewsResponse + ) -> config.ListPreviewsResponse: + """Post-rpc interceptor for list_previews + + DEPRECATED. Please use the `post_list_previews_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the Config server but before + it is returned to user code. This `post_list_previews` interceptor runs + before the `post_list_previews_with_metadata` interceptor. + """ + return response + + def post_list_previews_with_metadata( + self, + response: config.ListPreviewsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[config.ListPreviewsResponse, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for list_previews + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Config server but before it is returned to user code. + + We recommend only using this `post_list_previews_with_metadata` + interceptor in new development instead of the `post_list_previews` interceptor. + When both interceptors are used, this `post_list_previews_with_metadata` interceptor runs after the + `post_list_previews` interceptor. The (possibly modified) response returned by + `post_list_previews` will be passed to + `post_list_previews_with_metadata`. + """ + return response, metadata + + def pre_list_resource_changes( self, request: config.ListResourceChangesRequest, metadata: Sequence[Tuple[str, Union[str, bytes]]], @@ -1489,6 +1905,54 @@ def post_lock_deployment_with_metadata( """ return response, metadata + def pre_provision_deployment_group( + self, + request: config.ProvisionDeploymentGroupRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + config.ProvisionDeploymentGroupRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Pre-rpc interceptor for provision_deployment_group + + Override in a subclass to manipulate the request or metadata + before they are sent to the Config server. + """ + return request, metadata + + def post_provision_deployment_group( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for provision_deployment_group + + DEPRECATED. Please use the `post_provision_deployment_group_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the Config server but before + it is returned to user code. This `post_provision_deployment_group` interceptor runs + before the `post_provision_deployment_group_with_metadata` interceptor. + """ + return response + + def post_provision_deployment_group_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for provision_deployment_group + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Config server but before it is returned to user code. + + We recommend only using this `post_provision_deployment_group_with_metadata` + interceptor in new development instead of the `post_provision_deployment_group` interceptor. + When both interceptors are used, this `post_provision_deployment_group_with_metadata` interceptor runs after the + `post_provision_deployment_group` interceptor. The (possibly modified) response returned by + `post_provision_deployment_group` will be passed to + `post_provision_deployment_group_with_metadata`. + """ + return response, metadata + def pre_unlock_deployment( self, request: config.UnlockDeploymentRequest, @@ -1629,6 +2093,54 @@ def post_update_deployment_with_metadata( """ return response, metadata + def pre_update_deployment_group( + self, + request: config.UpdateDeploymentGroupRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + config.UpdateDeploymentGroupRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Pre-rpc interceptor for update_deployment_group + + Override in a subclass to manipulate the request or metadata + before they are sent to the Config server. + """ + return request, metadata + + def post_update_deployment_group( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for update_deployment_group + + DEPRECATED. Please use the `post_update_deployment_group_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the Config server but before + it is returned to user code. This `post_update_deployment_group` interceptor runs + before the `post_update_deployment_group_with_metadata` interceptor. + """ + return response + + def post_update_deployment_group_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_deployment_group + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the Config server but before it is returned to user code. + + We recommend only using this `post_update_deployment_group_with_metadata` + interceptor in new development instead of the `post_update_deployment_group` interceptor. + When both interceptors are used, this `post_update_deployment_group_with_metadata` interceptor runs after the + `post_update_deployment_group` interceptor. The (possibly modified) response returned by + `post_update_deployment_group` will be passed to + `post_update_deployment_group_with_metadata`. + """ + return response, metadata + def pre_get_location( self, request: locations_pb2.GetLocationRequest, @@ -2156,9 +2668,11 @@ def __call__( ) return resp - class _CreatePreview(_BaseConfigRestTransport._BaseCreatePreview, ConfigRestStub): + class _CreateDeploymentGroup( + _BaseConfigRestTransport._BaseCreateDeploymentGroup, ConfigRestStub + ): def __hash__(self): - return hash("ConfigRestTransport.CreatePreview") + return hash("ConfigRestTransport.CreateDeploymentGroup") @staticmethod def _get_response( @@ -2185,17 +2699,18 @@ def _get_response( def __call__( self, - request: config.CreatePreviewRequest, + request: config.CreateDeploymentGroupRequest, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: - r"""Call the create preview method over HTTP. + r"""Call the create deployment group method over HTTP. Args: - request (~.config.CreatePreviewRequest): - The request object. A request to create a preview. + request (~.config.CreateDeploymentGroupRequest): + The request object. A request to create a deployment + group retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -2213,25 +2728,23 @@ def __call__( """ http_options = ( - _BaseConfigRestTransport._BaseCreatePreview._get_http_options() + _BaseConfigRestTransport._BaseCreateDeploymentGroup._get_http_options() ) - request, metadata = self._interceptor.pre_create_preview(request, metadata) - transcoded_request = ( - _BaseConfigRestTransport._BaseCreatePreview._get_transcoded_request( - http_options, request - ) + request, metadata = self._interceptor.pre_create_deployment_group( + request, metadata + ) + transcoded_request = _BaseConfigRestTransport._BaseCreateDeploymentGroup._get_transcoded_request( + http_options, request ) - body = _BaseConfigRestTransport._BaseCreatePreview._get_request_body_json( + body = _BaseConfigRestTransport._BaseCreateDeploymentGroup._get_request_body_json( transcoded_request ) # Jsonify the query params - query_params = ( - _BaseConfigRestTransport._BaseCreatePreview._get_query_params_json( - transcoded_request - ) + query_params = _BaseConfigRestTransport._BaseCreateDeploymentGroup._get_query_params_json( + transcoded_request ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( @@ -2252,17 +2765,17 @@ def __call__( "headers": dict(metadata), } _LOGGER.debug( - f"Sending request for google.cloud.config_v1.ConfigClient.CreatePreview", + f"Sending request for google.cloud.config_v1.ConfigClient.CreateDeploymentGroup", extra={ "serviceName": "google.cloud.config.v1.Config", - "rpcName": "CreatePreview", + "rpcName": "CreateDeploymentGroup", "httpRequest": http_request, "metadata": http_request["headers"], }, ) # Send the request - response = ConfigRestTransport._CreatePreview._get_response( + response = ConfigRestTransport._CreateDeploymentGroup._get_response( self._host, metadata, query_params, @@ -2281,9 +2794,9 @@ def __call__( resp = operations_pb2.Operation() json_format.Parse(response.content, resp, ignore_unknown_fields=True) - resp = self._interceptor.post_create_preview(resp) + resp = self._interceptor.post_create_deployment_group(resp) response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_create_preview_with_metadata( + resp, _ = self._interceptor.post_create_deployment_group_with_metadata( resp, response_metadata ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( @@ -2299,21 +2812,19 @@ def __call__( "status": response.status_code, } _LOGGER.debug( - "Received response for google.cloud.config_v1.ConfigClient.create_preview", + "Received response for google.cloud.config_v1.ConfigClient.create_deployment_group", extra={ "serviceName": "google.cloud.config.v1.Config", - "rpcName": "CreatePreview", + "rpcName": "CreateDeploymentGroup", "metadata": http_response["headers"], "httpResponse": http_response, }, ) return resp - class _DeleteDeployment( - _BaseConfigRestTransport._BaseDeleteDeployment, ConfigRestStub - ): + class _CreatePreview(_BaseConfigRestTransport._BaseCreatePreview, ConfigRestStub): def __hash__(self): - return hash("ConfigRestTransport.DeleteDeployment") + return hash("ConfigRestTransport.CreatePreview") @staticmethod def _get_response( @@ -2334,22 +2845,23 @@ def _get_response( timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, ) return response def __call__( self, - request: config.DeleteDeploymentRequest, + request: config.CreatePreviewRequest, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: - r"""Call the delete deployment method over HTTP. + r"""Call the create preview method over HTTP. Args: - request (~.config.DeleteDeploymentRequest): - The request object. + request (~.config.CreatePreviewRequest): + The request object. A request to create a preview. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -2367,21 +2879,23 @@ def __call__( """ http_options = ( - _BaseConfigRestTransport._BaseDeleteDeployment._get_http_options() + _BaseConfigRestTransport._BaseCreatePreview._get_http_options() ) - request, metadata = self._interceptor.pre_delete_deployment( - request, metadata - ) + request, metadata = self._interceptor.pre_create_preview(request, metadata) transcoded_request = ( - _BaseConfigRestTransport._BaseDeleteDeployment._get_transcoded_request( + _BaseConfigRestTransport._BaseCreatePreview._get_transcoded_request( http_options, request ) ) + body = _BaseConfigRestTransport._BaseCreatePreview._get_request_body_json( + transcoded_request + ) + # Jsonify the query params query_params = ( - _BaseConfigRestTransport._BaseDeleteDeployment._get_query_params_json( + _BaseConfigRestTransport._BaseCreatePreview._get_query_params_json( transcoded_request ) ) @@ -2404,23 +2918,24 @@ def __call__( "headers": dict(metadata), } _LOGGER.debug( - f"Sending request for google.cloud.config_v1.ConfigClient.DeleteDeployment", + f"Sending request for google.cloud.config_v1.ConfigClient.CreatePreview", extra={ "serviceName": "google.cloud.config.v1.Config", - "rpcName": "DeleteDeployment", + "rpcName": "CreatePreview", "httpRequest": http_request, "metadata": http_request["headers"], }, ) # Send the request - response = ConfigRestTransport._DeleteDeployment._get_response( + response = ConfigRestTransport._CreatePreview._get_response( self._host, metadata, query_params, self._session, timeout, transcoded_request, + body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -2432,9 +2947,9 @@ def __call__( resp = operations_pb2.Operation() json_format.Parse(response.content, resp, ignore_unknown_fields=True) - resp = self._interceptor.post_delete_deployment(resp) + resp = self._interceptor.post_create_preview(resp) response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_delete_deployment_with_metadata( + resp, _ = self._interceptor.post_create_preview_with_metadata( resp, response_metadata ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( @@ -2450,19 +2965,21 @@ def __call__( "status": response.status_code, } _LOGGER.debug( - "Received response for google.cloud.config_v1.ConfigClient.delete_deployment", + "Received response for google.cloud.config_v1.ConfigClient.create_preview", extra={ "serviceName": "google.cloud.config.v1.Config", - "rpcName": "DeleteDeployment", + "rpcName": "CreatePreview", "metadata": http_response["headers"], "httpResponse": http_response, }, ) return resp - class _DeletePreview(_BaseConfigRestTransport._BaseDeletePreview, ConfigRestStub): + class _DeleteDeployment( + _BaseConfigRestTransport._BaseDeleteDeployment, ConfigRestStub + ): def __hash__(self): - return hash("ConfigRestTransport.DeletePreview") + return hash("ConfigRestTransport.DeleteDeployment") @staticmethod def _get_response( @@ -2488,17 +3005,17 @@ def _get_response( def __call__( self, - request: config.DeletePreviewRequest, + request: config.DeleteDeploymentRequest, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: - r"""Call the delete preview method over HTTP. + r"""Call the delete deployment method over HTTP. Args: - request (~.config.DeletePreviewRequest): - The request object. A request to delete a preview. + request (~.config.DeleteDeploymentRequest): + The request object. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -2516,19 +3033,21 @@ def __call__( """ http_options = ( - _BaseConfigRestTransport._BaseDeletePreview._get_http_options() + _BaseConfigRestTransport._BaseDeleteDeployment._get_http_options() ) - request, metadata = self._interceptor.pre_delete_preview(request, metadata) + request, metadata = self._interceptor.pre_delete_deployment( + request, metadata + ) transcoded_request = ( - _BaseConfigRestTransport._BaseDeletePreview._get_transcoded_request( + _BaseConfigRestTransport._BaseDeleteDeployment._get_transcoded_request( http_options, request ) ) # Jsonify the query params query_params = ( - _BaseConfigRestTransport._BaseDeletePreview._get_query_params_json( + _BaseConfigRestTransport._BaseDeleteDeployment._get_query_params_json( transcoded_request ) ) @@ -2551,10 +3070,305 @@ def __call__( "headers": dict(metadata), } _LOGGER.debug( - f"Sending request for google.cloud.config_v1.ConfigClient.DeletePreview", + f"Sending request for google.cloud.config_v1.ConfigClient.DeleteDeployment", extra={ "serviceName": "google.cloud.config.v1.Config", - "rpcName": "DeletePreview", + "rpcName": "DeleteDeployment", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = ConfigRestTransport._DeleteDeployment._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_delete_deployment(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_deployment_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.config_v1.ConfigClient.delete_deployment", + extra={ + "serviceName": "google.cloud.config.v1.Config", + "rpcName": "DeleteDeployment", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _DeleteDeploymentGroup( + _BaseConfigRestTransport._BaseDeleteDeploymentGroup, ConfigRestStub + ): + def __hash__(self): + return hash("ConfigRestTransport.DeleteDeploymentGroup") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: config.DeleteDeploymentGroupRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.Operation: + r"""Call the delete deployment group method over HTTP. + + Args: + request (~.config.DeleteDeploymentGroupRequest): + The request object. Request message for Delete + DeploymentGroup + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options = ( + _BaseConfigRestTransport._BaseDeleteDeploymentGroup._get_http_options() + ) + + request, metadata = self._interceptor.pre_delete_deployment_group( + request, metadata + ) + transcoded_request = _BaseConfigRestTransport._BaseDeleteDeploymentGroup._get_transcoded_request( + http_options, request + ) + + # Jsonify the query params + query_params = _BaseConfigRestTransport._BaseDeleteDeploymentGroup._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.config_v1.ConfigClient.DeleteDeploymentGroup", + extra={ + "serviceName": "google.cloud.config.v1.Config", + "rpcName": "DeleteDeploymentGroup", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = ConfigRestTransport._DeleteDeploymentGroup._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_delete_deployment_group(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_deployment_group_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.config_v1.ConfigClient.delete_deployment_group", + extra={ + "serviceName": "google.cloud.config.v1.Config", + "rpcName": "DeleteDeploymentGroup", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _DeletePreview(_BaseConfigRestTransport._BaseDeletePreview, ConfigRestStub): + def __hash__(self): + return hash("ConfigRestTransport.DeletePreview") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: config.DeletePreviewRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.Operation: + r"""Call the delete preview method over HTTP. + + Args: + request (~.config.DeletePreviewRequest): + The request object. A request to delete a preview. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options = ( + _BaseConfigRestTransport._BaseDeletePreview._get_http_options() + ) + + request, metadata = self._interceptor.pre_delete_preview(request, metadata) + transcoded_request = ( + _BaseConfigRestTransport._BaseDeletePreview._get_transcoded_request( + http_options, request + ) + ) + + # Jsonify the query params + query_params = ( + _BaseConfigRestTransport._BaseDeletePreview._get_query_params_json( + transcoded_request + ) + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.config_v1.ConfigClient.DeletePreview", + extra={ + "serviceName": "google.cloud.config.v1.Config", + "rpcName": "DeletePreview", "httpRequest": http_request, "metadata": http_request["headers"], }, @@ -2576,19 +3390,753 @@ def __call__( raise core_exceptions.from_http_response(response) # Return the response - resp = operations_pb2.Operation() - json_format.Parse(response.content, resp, ignore_unknown_fields=True) + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_delete_preview(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_preview_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.config_v1.ConfigClient.delete_preview", + extra={ + "serviceName": "google.cloud.config.v1.Config", + "rpcName": "DeletePreview", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _DeleteStatefile( + _BaseConfigRestTransport._BaseDeleteStatefile, ConfigRestStub + ): + def __hash__(self): + return hash("ConfigRestTransport.DeleteStatefile") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__( + self, + request: config.DeleteStatefileRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ): + r"""Call the delete statefile method over HTTP. + + Args: + request (~.config.DeleteStatefileRequest): + The request object. A request to delete a state file + passed to a 'DeleteStatefile' call. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + + http_options = ( + _BaseConfigRestTransport._BaseDeleteStatefile._get_http_options() + ) + + request, metadata = self._interceptor.pre_delete_statefile( + request, metadata + ) + transcoded_request = ( + _BaseConfigRestTransport._BaseDeleteStatefile._get_transcoded_request( + http_options, request + ) + ) + + body = _BaseConfigRestTransport._BaseDeleteStatefile._get_request_body_json( + transcoded_request + ) + + # Jsonify the query params + query_params = ( + _BaseConfigRestTransport._BaseDeleteStatefile._get_query_params_json( + transcoded_request + ) + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.config_v1.ConfigClient.DeleteStatefile", + extra={ + "serviceName": "google.cloud.config.v1.Config", + "rpcName": "DeleteStatefile", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = ConfigRestTransport._DeleteStatefile._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + class _DeprovisionDeploymentGroup( + _BaseConfigRestTransport._BaseDeprovisionDeploymentGroup, ConfigRestStub + ): + def __hash__(self): + return hash("ConfigRestTransport.DeprovisionDeploymentGroup") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__( + self, + request: config.DeprovisionDeploymentGroupRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.Operation: + r"""Call the deprovision deployment + group method over HTTP. + + Args: + request (~.config.DeprovisionDeploymentGroupRequest): + The request object. The request message for the + DeprovisionDeploymentGroup method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options = _BaseConfigRestTransport._BaseDeprovisionDeploymentGroup._get_http_options() + + request, metadata = self._interceptor.pre_deprovision_deployment_group( + request, metadata + ) + transcoded_request = _BaseConfigRestTransport._BaseDeprovisionDeploymentGroup._get_transcoded_request( + http_options, request + ) + + body = _BaseConfigRestTransport._BaseDeprovisionDeploymentGroup._get_request_body_json( + transcoded_request + ) + + # Jsonify the query params + query_params = _BaseConfigRestTransport._BaseDeprovisionDeploymentGroup._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.config_v1.ConfigClient.DeprovisionDeploymentGroup", + extra={ + "serviceName": "google.cloud.config.v1.Config", + "rpcName": "DeprovisionDeploymentGroup", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = ConfigRestTransport._DeprovisionDeploymentGroup._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_deprovision_deployment_group(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_deprovision_deployment_group_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.config_v1.ConfigClient.deprovision_deployment_group", + extra={ + "serviceName": "google.cloud.config.v1.Config", + "rpcName": "DeprovisionDeploymentGroup", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _ExportDeploymentStatefile( + _BaseConfigRestTransport._BaseExportDeploymentStatefile, ConfigRestStub + ): + def __hash__(self): + return hash("ConfigRestTransport.ExportDeploymentStatefile") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__( + self, + request: config.ExportDeploymentStatefileRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> config.Statefile: + r"""Call the export deployment + statefile method over HTTP. + + Args: + request (~.config.ExportDeploymentStatefileRequest): + The request object. A request to export a state file + passed to a 'ExportDeploymentStatefile' + call. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.config.Statefile: + Contains info about a Terraform state + file + + """ + + http_options = _BaseConfigRestTransport._BaseExportDeploymentStatefile._get_http_options() + + request, metadata = self._interceptor.pre_export_deployment_statefile( + request, metadata + ) + transcoded_request = _BaseConfigRestTransport._BaseExportDeploymentStatefile._get_transcoded_request( + http_options, request + ) + + body = _BaseConfigRestTransport._BaseExportDeploymentStatefile._get_request_body_json( + transcoded_request + ) + + # Jsonify the query params + query_params = _BaseConfigRestTransport._BaseExportDeploymentStatefile._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.config_v1.ConfigClient.ExportDeploymentStatefile", + extra={ + "serviceName": "google.cloud.config.v1.Config", + "rpcName": "ExportDeploymentStatefile", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = ConfigRestTransport._ExportDeploymentStatefile._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = config.Statefile() + pb_resp = config.Statefile.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_export_deployment_statefile(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_export_deployment_statefile_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = config.Statefile.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.config_v1.ConfigClient.export_deployment_statefile", + extra={ + "serviceName": "google.cloud.config.v1.Config", + "rpcName": "ExportDeploymentStatefile", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _ExportLockInfo(_BaseConfigRestTransport._BaseExportLockInfo, ConfigRestStub): + def __hash__(self): + return hash("ConfigRestTransport.ExportLockInfo") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: config.ExportLockInfoRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> config.LockInfo: + r"""Call the export lock info method over HTTP. + + Args: + request (~.config.ExportLockInfoRequest): + The request object. A request to get a state file lock + info passed to a 'ExportLockInfo' call. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.config.LockInfo: + Details about the lock which locked + the deployment. + + """ + + http_options = ( + _BaseConfigRestTransport._BaseExportLockInfo._get_http_options() + ) + + request, metadata = self._interceptor.pre_export_lock_info( + request, metadata + ) + transcoded_request = ( + _BaseConfigRestTransport._BaseExportLockInfo._get_transcoded_request( + http_options, request + ) + ) + + # Jsonify the query params + query_params = ( + _BaseConfigRestTransport._BaseExportLockInfo._get_query_params_json( + transcoded_request + ) + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.config_v1.ConfigClient.ExportLockInfo", + extra={ + "serviceName": "google.cloud.config.v1.Config", + "rpcName": "ExportLockInfo", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = ConfigRestTransport._ExportLockInfo._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = config.LockInfo() + pb_resp = config.LockInfo.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_export_lock_info(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_export_lock_info_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = config.LockInfo.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.config_v1.ConfigClient.export_lock_info", + extra={ + "serviceName": "google.cloud.config.v1.Config", + "rpcName": "ExportLockInfo", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _ExportPreviewResult( + _BaseConfigRestTransport._BaseExportPreviewResult, ConfigRestStub + ): + def __hash__(self): + return hash("ConfigRestTransport.ExportPreviewResult") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__( + self, + request: config.ExportPreviewResultRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> config.ExportPreviewResultResponse: + r"""Call the export preview result method over HTTP. + + Args: + request (~.config.ExportPreviewResultRequest): + The request object. A request to export preview results. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.config.ExportPreviewResultResponse: + A response to ``ExportPreviewResult`` call. Contains + preview results. + + """ + + http_options = ( + _BaseConfigRestTransport._BaseExportPreviewResult._get_http_options() + ) + + request, metadata = self._interceptor.pre_export_preview_result( + request, metadata + ) + transcoded_request = _BaseConfigRestTransport._BaseExportPreviewResult._get_transcoded_request( + http_options, request + ) + + body = _BaseConfigRestTransport._BaseExportPreviewResult._get_request_body_json( + transcoded_request + ) + + # Jsonify the query params + query_params = _BaseConfigRestTransport._BaseExportPreviewResult._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.config_v1.ConfigClient.ExportPreviewResult", + extra={ + "serviceName": "google.cloud.config.v1.Config", + "rpcName": "ExportPreviewResult", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = ConfigRestTransport._ExportPreviewResult._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = config.ExportPreviewResultResponse() + pb_resp = config.ExportPreviewResultResponse.pb(resp) - resp = self._interceptor.post_delete_preview(resp) + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_export_preview_result(resp) response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_delete_preview_with_metadata( + resp, _ = self._interceptor.post_export_preview_result_with_metadata( resp, response_metadata ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER try: - response_payload = json_format.MessageToJson(resp) + response_payload = config.ExportPreviewResultResponse.to_json( + response + ) except: response_payload = None http_response = { @@ -2597,21 +4145,21 @@ def __call__( "status": response.status_code, } _LOGGER.debug( - "Received response for google.cloud.config_v1.ConfigClient.delete_preview", + "Received response for google.cloud.config_v1.ConfigClient.export_preview_result", extra={ "serviceName": "google.cloud.config.v1.Config", - "rpcName": "DeletePreview", + "rpcName": "ExportPreviewResult", "metadata": http_response["headers"], "httpResponse": http_response, }, ) return resp - class _DeleteStatefile( - _BaseConfigRestTransport._BaseDeleteStatefile, ConfigRestStub + class _ExportRevisionStatefile( + _BaseConfigRestTransport._BaseExportRevisionStatefile, ConfigRestStub ): def __hash__(self): - return hash("ConfigRestTransport.DeleteStatefile") + return hash("ConfigRestTransport.ExportRevisionStatefile") @staticmethod def _get_response( @@ -2638,18 +4186,19 @@ def _get_response( def __call__( self, - request: config.DeleteStatefileRequest, + request: config.ExportRevisionStatefileRequest, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ): - r"""Call the delete statefile method over HTTP. + ) -> config.Statefile: + r"""Call the export revision statefile method over HTTP. Args: - request (~.config.DeleteStatefileRequest): - The request object. A request to delete a state file - passed to a 'DeleteStatefile' call. + request (~.config.ExportRevisionStatefileRequest): + The request object. A request to export a state file + passed to a 'ExportRevisionStatefile' + call. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -2657,30 +4206,30 @@ def __call__( sent along with the request as metadata. Normally, each value must be of type `str`, but for metadata keys ending with the suffix `-bin`, the corresponding values must be of type `bytes`. + + Returns: + ~.config.Statefile: + Contains info about a Terraform state + file + """ - http_options = ( - _BaseConfigRestTransport._BaseDeleteStatefile._get_http_options() - ) + http_options = _BaseConfigRestTransport._BaseExportRevisionStatefile._get_http_options() - request, metadata = self._interceptor.pre_delete_statefile( + request, metadata = self._interceptor.pre_export_revision_statefile( request, metadata ) - transcoded_request = ( - _BaseConfigRestTransport._BaseDeleteStatefile._get_transcoded_request( - http_options, request - ) + transcoded_request = _BaseConfigRestTransport._BaseExportRevisionStatefile._get_transcoded_request( + http_options, request ) - body = _BaseConfigRestTransport._BaseDeleteStatefile._get_request_body_json( + body = _BaseConfigRestTransport._BaseExportRevisionStatefile._get_request_body_json( transcoded_request ) # Jsonify the query params - query_params = ( - _BaseConfigRestTransport._BaseDeleteStatefile._get_query_params_json( - transcoded_request - ) + query_params = _BaseConfigRestTransport._BaseExportRevisionStatefile._get_query_params_json( + transcoded_request ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( @@ -2701,17 +4250,17 @@ def __call__( "headers": dict(metadata), } _LOGGER.debug( - f"Sending request for google.cloud.config_v1.ConfigClient.DeleteStatefile", + f"Sending request for google.cloud.config_v1.ConfigClient.ExportRevisionStatefile", extra={ "serviceName": "google.cloud.config.v1.Config", - "rpcName": "DeleteStatefile", + "rpcName": "ExportRevisionStatefile", "httpRequest": http_request, "metadata": http_request["headers"], }, ) # Send the request - response = ConfigRestTransport._DeleteStatefile._get_response( + response = ConfigRestTransport._ExportRevisionStatefile._get_response( self._host, metadata, query_params, @@ -2726,11 +4275,45 @@ def __call__( if response.status_code >= 400: raise core_exceptions.from_http_response(response) - class _ExportDeploymentStatefile( - _BaseConfigRestTransport._BaseExportDeploymentStatefile, ConfigRestStub + # Return the response + resp = config.Statefile() + pb_resp = config.Statefile.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_export_revision_statefile(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_export_revision_statefile_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = config.Statefile.to_json(response) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.config_v1.ConfigClient.export_revision_statefile", + extra={ + "serviceName": "google.cloud.config.v1.Config", + "rpcName": "ExportRevisionStatefile", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _GetAutoMigrationConfig( + _BaseConfigRestTransport._BaseGetAutoMigrationConfig, ConfigRestStub ): def __hash__(self): - return hash("ConfigRestTransport.ExportDeploymentStatefile") + return hash("ConfigRestTransport.GetAutoMigrationConfig") @staticmethod def _get_response( @@ -2751,56 +4334,52 @@ def _get_response( timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, ) return response def __call__( self, - request: config.ExportDeploymentStatefileRequest, + request: config.GetAutoMigrationConfigRequest, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> config.Statefile: - r"""Call the export deployment - statefile method over HTTP. + ) -> config.AutoMigrationConfig: + r"""Call the get auto migration config method over HTTP. - Args: - request (~.config.ExportDeploymentStatefileRequest): - The request object. A request to export a state file - passed to a 'ExportDeploymentStatefile' - call. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. + Args: + request (~.config.GetAutoMigrationConfigRequest): + The request object. The request message for the + GetAutoMigrationConfig method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. - Returns: - ~.config.Statefile: - Contains info about a Terraform state - file + Returns: + ~.config.AutoMigrationConfig: + AutoMigrationConfig contains the + automigration configuration for a + project. """ - http_options = _BaseConfigRestTransport._BaseExportDeploymentStatefile._get_http_options() + http_options = ( + _BaseConfigRestTransport._BaseGetAutoMigrationConfig._get_http_options() + ) - request, metadata = self._interceptor.pre_export_deployment_statefile( + request, metadata = self._interceptor.pre_get_auto_migration_config( request, metadata ) - transcoded_request = _BaseConfigRestTransport._BaseExportDeploymentStatefile._get_transcoded_request( + transcoded_request = _BaseConfigRestTransport._BaseGetAutoMigrationConfig._get_transcoded_request( http_options, request ) - body = _BaseConfigRestTransport._BaseExportDeploymentStatefile._get_request_body_json( - transcoded_request - ) - # Jsonify the query params - query_params = _BaseConfigRestTransport._BaseExportDeploymentStatefile._get_query_params_json( + query_params = _BaseConfigRestTransport._BaseGetAutoMigrationConfig._get_query_params_json( transcoded_request ) @@ -2822,24 +4401,23 @@ def __call__( "headers": dict(metadata), } _LOGGER.debug( - f"Sending request for google.cloud.config_v1.ConfigClient.ExportDeploymentStatefile", + f"Sending request for google.cloud.config_v1.ConfigClient.GetAutoMigrationConfig", extra={ "serviceName": "google.cloud.config.v1.Config", - "rpcName": "ExportDeploymentStatefile", + "rpcName": "GetAutoMigrationConfig", "httpRequest": http_request, "metadata": http_request["headers"], }, ) # Send the request - response = ConfigRestTransport._ExportDeploymentStatefile._get_response( + response = ConfigRestTransport._GetAutoMigrationConfig._get_response( self._host, metadata, query_params, self._session, timeout, transcoded_request, - body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -2848,21 +4426,21 @@ def __call__( raise core_exceptions.from_http_response(response) # Return the response - resp = config.Statefile() - pb_resp = config.Statefile.pb(resp) + resp = config.AutoMigrationConfig() + pb_resp = config.AutoMigrationConfig.pb(resp) json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_export_deployment_statefile(resp) + resp = self._interceptor.post_get_auto_migration_config(resp) response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_export_deployment_statefile_with_metadata( + resp, _ = self._interceptor.post_get_auto_migration_config_with_metadata( resp, response_metadata ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER try: - response_payload = config.Statefile.to_json(response) + response_payload = config.AutoMigrationConfig.to_json(response) except: response_payload = None http_response = { @@ -2871,19 +4449,19 @@ def __call__( "status": response.status_code, } _LOGGER.debug( - "Received response for google.cloud.config_v1.ConfigClient.export_deployment_statefile", + "Received response for google.cloud.config_v1.ConfigClient.get_auto_migration_config", extra={ "serviceName": "google.cloud.config.v1.Config", - "rpcName": "ExportDeploymentStatefile", + "rpcName": "GetAutoMigrationConfig", "metadata": http_response["headers"], "httpResponse": http_response, }, ) return resp - class _ExportLockInfo(_BaseConfigRestTransport._BaseExportLockInfo, ConfigRestStub): + class _GetDeployment(_BaseConfigRestTransport._BaseGetDeployment, ConfigRestStub): def __hash__(self): - return hash("ConfigRestTransport.ExportLockInfo") + return hash("ConfigRestTransport.GetDeployment") @staticmethod def _get_response( @@ -2909,18 +4487,17 @@ def _get_response( def __call__( self, - request: config.ExportLockInfoRequest, + request: config.GetDeploymentRequest, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> config.LockInfo: - r"""Call the export lock info method over HTTP. + ) -> config.Deployment: + r"""Call the get deployment method over HTTP. Args: - request (~.config.ExportLockInfoRequest): - The request object. A request to get a state file lock - info passed to a 'ExportLockInfo' call. + request (~.config.GetDeploymentRequest): + The request object. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -2930,28 +4507,27 @@ def __call__( be of type `bytes`. Returns: - ~.config.LockInfo: - Details about the lock which locked - the deployment. + ~.config.Deployment: + A Deployment is a group of resources + and configs managed and provisioned by + Infra Manager. """ http_options = ( - _BaseConfigRestTransport._BaseExportLockInfo._get_http_options() + _BaseConfigRestTransport._BaseGetDeployment._get_http_options() ) - request, metadata = self._interceptor.pre_export_lock_info( - request, metadata - ) + request, metadata = self._interceptor.pre_get_deployment(request, metadata) transcoded_request = ( - _BaseConfigRestTransport._BaseExportLockInfo._get_transcoded_request( + _BaseConfigRestTransport._BaseGetDeployment._get_transcoded_request( http_options, request ) ) # Jsonify the query params query_params = ( - _BaseConfigRestTransport._BaseExportLockInfo._get_query_params_json( + _BaseConfigRestTransport._BaseGetDeployment._get_query_params_json( transcoded_request ) ) @@ -2974,17 +4550,17 @@ def __call__( "headers": dict(metadata), } _LOGGER.debug( - f"Sending request for google.cloud.config_v1.ConfigClient.ExportLockInfo", + f"Sending request for google.cloud.config_v1.ConfigClient.GetDeployment", extra={ "serviceName": "google.cloud.config.v1.Config", - "rpcName": "ExportLockInfo", + "rpcName": "GetDeployment", "httpRequest": http_request, "metadata": http_request["headers"], }, ) # Send the request - response = ConfigRestTransport._ExportLockInfo._get_response( + response = ConfigRestTransport._GetDeployment._get_response( self._host, metadata, query_params, @@ -2999,21 +4575,21 @@ def __call__( raise core_exceptions.from_http_response(response) # Return the response - resp = config.LockInfo() - pb_resp = config.LockInfo.pb(resp) + resp = config.Deployment() + pb_resp = config.Deployment.pb(resp) json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_export_lock_info(resp) + resp = self._interceptor.post_get_deployment(resp) response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_export_lock_info_with_metadata( + resp, _ = self._interceptor.post_get_deployment_with_metadata( resp, response_metadata ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER try: - response_payload = config.LockInfo.to_json(response) + response_payload = config.Deployment.to_json(response) except: response_payload = None http_response = { @@ -3022,21 +4598,21 @@ def __call__( "status": response.status_code, } _LOGGER.debug( - "Received response for google.cloud.config_v1.ConfigClient.export_lock_info", + "Received response for google.cloud.config_v1.ConfigClient.get_deployment", extra={ "serviceName": "google.cloud.config.v1.Config", - "rpcName": "ExportLockInfo", + "rpcName": "GetDeployment", "metadata": http_response["headers"], "httpResponse": http_response, }, ) return resp - class _ExportPreviewResult( - _BaseConfigRestTransport._BaseExportPreviewResult, ConfigRestStub + class _GetDeploymentGroup( + _BaseConfigRestTransport._BaseGetDeploymentGroup, ConfigRestStub ): def __hash__(self): - return hash("ConfigRestTransport.ExportPreviewResult") + return hash("ConfigRestTransport.GetDeploymentGroup") @staticmethod def _get_response( @@ -3057,23 +4633,23 @@ def _get_response( timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, ) return response def __call__( self, - request: config.ExportPreviewResultRequest, + request: config.GetDeploymentGroupRequest, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> config.ExportPreviewResultResponse: - r"""Call the export preview result method over HTTP. + ) -> config.DeploymentGroup: + r"""Call the get deployment group method over HTTP. Args: - request (~.config.ExportPreviewResultRequest): - The request object. A request to export preview results. + request (~.config.GetDeploymentGroupRequest): + The request object. The request message for the + GetDeploymentGroup method. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -3083,30 +4659,29 @@ def __call__( be of type `bytes`. Returns: - ~.config.ExportPreviewResultResponse: - A response to ``ExportPreviewResult`` call. Contains - preview results. + ~.config.DeploymentGroup: + A DeploymentGroup is a collection of + DeploymentUnits that in a DAG-like + structure. """ http_options = ( - _BaseConfigRestTransport._BaseExportPreviewResult._get_http_options() + _BaseConfigRestTransport._BaseGetDeploymentGroup._get_http_options() ) - request, metadata = self._interceptor.pre_export_preview_result( + request, metadata = self._interceptor.pre_get_deployment_group( request, metadata ) - transcoded_request = _BaseConfigRestTransport._BaseExportPreviewResult._get_transcoded_request( + transcoded_request = _BaseConfigRestTransport._BaseGetDeploymentGroup._get_transcoded_request( http_options, request ) - body = _BaseConfigRestTransport._BaseExportPreviewResult._get_request_body_json( - transcoded_request - ) - # Jsonify the query params - query_params = _BaseConfigRestTransport._BaseExportPreviewResult._get_query_params_json( - transcoded_request + query_params = ( + _BaseConfigRestTransport._BaseGetDeploymentGroup._get_query_params_json( + transcoded_request + ) ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( @@ -3127,24 +4702,23 @@ def __call__( "headers": dict(metadata), } _LOGGER.debug( - f"Sending request for google.cloud.config_v1.ConfigClient.ExportPreviewResult", + f"Sending request for google.cloud.config_v1.ConfigClient.GetDeploymentGroup", extra={ "serviceName": "google.cloud.config.v1.Config", - "rpcName": "ExportPreviewResult", + "rpcName": "GetDeploymentGroup", "httpRequest": http_request, "metadata": http_request["headers"], }, ) # Send the request - response = ConfigRestTransport._ExportPreviewResult._get_response( + response = ConfigRestTransport._GetDeploymentGroup._get_response( self._host, metadata, query_params, self._session, timeout, transcoded_request, - body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -3153,23 +4727,21 @@ def __call__( raise core_exceptions.from_http_response(response) # Return the response - resp = config.ExportPreviewResultResponse() - pb_resp = config.ExportPreviewResultResponse.pb(resp) + resp = config.DeploymentGroup() + pb_resp = config.DeploymentGroup.pb(resp) json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_export_preview_result(resp) + resp = self._interceptor.post_get_deployment_group(resp) response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_export_preview_result_with_metadata( + resp, _ = self._interceptor.post_get_deployment_group_with_metadata( resp, response_metadata ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER try: - response_payload = config.ExportPreviewResultResponse.to_json( - response - ) + response_payload = config.DeploymentGroup.to_json(response) except: response_payload = None http_response = { @@ -3178,21 +4750,21 @@ def __call__( "status": response.status_code, } _LOGGER.debug( - "Received response for google.cloud.config_v1.ConfigClient.export_preview_result", + "Received response for google.cloud.config_v1.ConfigClient.get_deployment_group", extra={ "serviceName": "google.cloud.config.v1.Config", - "rpcName": "ExportPreviewResult", + "rpcName": "GetDeploymentGroup", "metadata": http_response["headers"], "httpResponse": http_response, }, ) return resp - class _ExportRevisionStatefile( - _BaseConfigRestTransport._BaseExportRevisionStatefile, ConfigRestStub + class _GetDeploymentGroupRevision( + _BaseConfigRestTransport._BaseGetDeploymentGroupRevision, ConfigRestStub ): def __hash__(self): - return hash("ConfigRestTransport.ExportRevisionStatefile") + return hash("ConfigRestTransport.GetDeploymentGroupRevision") @staticmethod def _get_response( @@ -3213,55 +4785,52 @@ def _get_response( timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, ) return response def __call__( self, - request: config.ExportRevisionStatefileRequest, + request: config.GetDeploymentGroupRevisionRequest, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> config.Statefile: - r"""Call the export revision statefile method over HTTP. + ) -> config.DeploymentGroupRevision: + r"""Call the get deployment group + revision method over HTTP. - Args: - request (~.config.ExportRevisionStatefileRequest): - The request object. A request to export a state file - passed to a 'ExportRevisionStatefile' - call. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. + Args: + request (~.config.GetDeploymentGroupRevisionRequest): + The request object. The request message for the + GetDeploymentGroupRevision method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. - Returns: - ~.config.Statefile: - Contains info about a Terraform state - file + Returns: + ~.config.DeploymentGroupRevision: + A DeploymentGroupRevision represents a snapshot of a + [DeploymentGroup][google.cloud.config.v1.DeploymentGroup] + at a given point in time, created when a DeploymentGroup + is provisioned or deprovisioned. """ - http_options = _BaseConfigRestTransport._BaseExportRevisionStatefile._get_http_options() + http_options = _BaseConfigRestTransport._BaseGetDeploymentGroupRevision._get_http_options() - request, metadata = self._interceptor.pre_export_revision_statefile( + request, metadata = self._interceptor.pre_get_deployment_group_revision( request, metadata ) - transcoded_request = _BaseConfigRestTransport._BaseExportRevisionStatefile._get_transcoded_request( + transcoded_request = _BaseConfigRestTransport._BaseGetDeploymentGroupRevision._get_transcoded_request( http_options, request ) - body = _BaseConfigRestTransport._BaseExportRevisionStatefile._get_request_body_json( - transcoded_request - ) - # Jsonify the query params - query_params = _BaseConfigRestTransport._BaseExportRevisionStatefile._get_query_params_json( + query_params = _BaseConfigRestTransport._BaseGetDeploymentGroupRevision._get_query_params_json( transcoded_request ) @@ -3283,24 +4852,23 @@ def __call__( "headers": dict(metadata), } _LOGGER.debug( - f"Sending request for google.cloud.config_v1.ConfigClient.ExportRevisionStatefile", + f"Sending request for google.cloud.config_v1.ConfigClient.GetDeploymentGroupRevision", extra={ "serviceName": "google.cloud.config.v1.Config", - "rpcName": "ExportRevisionStatefile", + "rpcName": "GetDeploymentGroupRevision", "httpRequest": http_request, "metadata": http_request["headers"], }, ) # Send the request - response = ConfigRestTransport._ExportRevisionStatefile._get_response( + response = ConfigRestTransport._GetDeploymentGroupRevision._get_response( self._host, metadata, query_params, self._session, timeout, transcoded_request, - body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -3309,21 +4877,23 @@ def __call__( raise core_exceptions.from_http_response(response) # Return the response - resp = config.Statefile() - pb_resp = config.Statefile.pb(resp) + resp = config.DeploymentGroupRevision() + pb_resp = config.DeploymentGroupRevision.pb(resp) json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_export_revision_statefile(resp) + resp = self._interceptor.post_get_deployment_group_revision(resp) response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_export_revision_statefile_with_metadata( - resp, response_metadata + resp, _ = ( + self._interceptor.post_get_deployment_group_revision_with_metadata( + resp, response_metadata + ) ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER try: - response_payload = config.Statefile.to_json(response) + response_payload = config.DeploymentGroupRevision.to_json(response) except: response_payload = None http_response = { @@ -3332,21 +4902,19 @@ def __call__( "status": response.status_code, } _LOGGER.debug( - "Received response for google.cloud.config_v1.ConfigClient.export_revision_statefile", + "Received response for google.cloud.config_v1.ConfigClient.get_deployment_group_revision", extra={ "serviceName": "google.cloud.config.v1.Config", - "rpcName": "ExportRevisionStatefile", + "rpcName": "GetDeploymentGroupRevision", "metadata": http_response["headers"], "httpResponse": http_response, }, ) return resp - class _GetAutoMigrationConfig( - _BaseConfigRestTransport._BaseGetAutoMigrationConfig, ConfigRestStub - ): + class _GetPreview(_BaseConfigRestTransport._BaseGetPreview, ConfigRestStub): def __hash__(self): - return hash("ConfigRestTransport.GetAutoMigrationConfig") + return hash("ConfigRestTransport.GetPreview") @staticmethod def _get_response( @@ -3372,18 +4940,18 @@ def _get_response( def __call__( self, - request: config.GetAutoMigrationConfigRequest, + request: config.GetPreviewRequest, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> config.AutoMigrationConfig: - r"""Call the get auto migration config method over HTTP. + ) -> config.Preview: + r"""Call the get preview method over HTTP. Args: - request (~.config.GetAutoMigrationConfigRequest): - The request object. The request message for the - GetAutoMigrationConfig method. + request (~.config.GetPreviewRequest): + The request object. A request to get details about a + preview. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -3393,27 +4961,28 @@ def __call__( be of type `bytes`. Returns: - ~.config.AutoMigrationConfig: - AutoMigrationConfig contains the - automigration configuration for a - project. + ~.config.Preview: + A preview represents a set of actions + Infra Manager would perform to move the + resources towards the desired state as + specified in the configuration. """ - http_options = ( - _BaseConfigRestTransport._BaseGetAutoMigrationConfig._get_http_options() - ) + http_options = _BaseConfigRestTransport._BaseGetPreview._get_http_options() - request, metadata = self._interceptor.pre_get_auto_migration_config( - request, metadata - ) - transcoded_request = _BaseConfigRestTransport._BaseGetAutoMigrationConfig._get_transcoded_request( - http_options, request + request, metadata = self._interceptor.pre_get_preview(request, metadata) + transcoded_request = ( + _BaseConfigRestTransport._BaseGetPreview._get_transcoded_request( + http_options, request + ) ) # Jsonify the query params - query_params = _BaseConfigRestTransport._BaseGetAutoMigrationConfig._get_query_params_json( - transcoded_request + query_params = ( + _BaseConfigRestTransport._BaseGetPreview._get_query_params_json( + transcoded_request + ) ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( @@ -3434,17 +5003,17 @@ def __call__( "headers": dict(metadata), } _LOGGER.debug( - f"Sending request for google.cloud.config_v1.ConfigClient.GetAutoMigrationConfig", + f"Sending request for google.cloud.config_v1.ConfigClient.GetPreview", extra={ "serviceName": "google.cloud.config.v1.Config", - "rpcName": "GetAutoMigrationConfig", + "rpcName": "GetPreview", "httpRequest": http_request, "metadata": http_request["headers"], }, ) # Send the request - response = ConfigRestTransport._GetAutoMigrationConfig._get_response( + response = ConfigRestTransport._GetPreview._get_response( self._host, metadata, query_params, @@ -3459,21 +5028,21 @@ def __call__( raise core_exceptions.from_http_response(response) # Return the response - resp = config.AutoMigrationConfig() - pb_resp = config.AutoMigrationConfig.pb(resp) + resp = config.Preview() + pb_resp = config.Preview.pb(resp) json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_get_auto_migration_config(resp) + resp = self._interceptor.post_get_preview(resp) response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_get_auto_migration_config_with_metadata( + resp, _ = self._interceptor.post_get_preview_with_metadata( resp, response_metadata ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER try: - response_payload = config.AutoMigrationConfig.to_json(response) + response_payload = config.Preview.to_json(response) except: response_payload = None http_response = { @@ -3482,19 +5051,19 @@ def __call__( "status": response.status_code, } _LOGGER.debug( - "Received response for google.cloud.config_v1.ConfigClient.get_auto_migration_config", + "Received response for google.cloud.config_v1.ConfigClient.get_preview", extra={ "serviceName": "google.cloud.config.v1.Config", - "rpcName": "GetAutoMigrationConfig", + "rpcName": "GetPreview", "metadata": http_response["headers"], "httpResponse": http_response, }, ) return resp - class _GetDeployment(_BaseConfigRestTransport._BaseGetDeployment, ConfigRestStub): + class _GetResource(_BaseConfigRestTransport._BaseGetResource, ConfigRestStub): def __hash__(self): - return hash("ConfigRestTransport.GetDeployment") + return hash("ConfigRestTransport.GetResource") @staticmethod def _get_response( @@ -3520,17 +5089,18 @@ def _get_response( def __call__( self, - request: config.GetDeploymentRequest, + request: config.GetResourceRequest, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> config.Deployment: - r"""Call the get deployment method over HTTP. + ) -> config.Resource: + r"""Call the get resource method over HTTP. Args: - request (~.config.GetDeploymentRequest): - The request object. + request (~.config.GetResourceRequest): + The request object. A request to get a Resource from a + 'GetResource' call. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -3540,27 +5110,26 @@ def __call__( be of type `bytes`. Returns: - ~.config.Deployment: - A Deployment is a group of resources - and configs managed and provisioned by - Infra Manager. + ~.config.Resource: + Resource represents a Google Cloud + Platform resource actuated by IM. + Resources are child resources of + Revisions. """ - http_options = ( - _BaseConfigRestTransport._BaseGetDeployment._get_http_options() - ) + http_options = _BaseConfigRestTransport._BaseGetResource._get_http_options() - request, metadata = self._interceptor.pre_get_deployment(request, metadata) + request, metadata = self._interceptor.pre_get_resource(request, metadata) transcoded_request = ( - _BaseConfigRestTransport._BaseGetDeployment._get_transcoded_request( + _BaseConfigRestTransport._BaseGetResource._get_transcoded_request( http_options, request ) ) # Jsonify the query params query_params = ( - _BaseConfigRestTransport._BaseGetDeployment._get_query_params_json( + _BaseConfigRestTransport._BaseGetResource._get_query_params_json( transcoded_request ) ) @@ -3583,17 +5152,17 @@ def __call__( "headers": dict(metadata), } _LOGGER.debug( - f"Sending request for google.cloud.config_v1.ConfigClient.GetDeployment", + f"Sending request for google.cloud.config_v1.ConfigClient.GetResource", extra={ "serviceName": "google.cloud.config.v1.Config", - "rpcName": "GetDeployment", + "rpcName": "GetResource", "httpRequest": http_request, "metadata": http_request["headers"], }, ) # Send the request - response = ConfigRestTransport._GetDeployment._get_response( + response = ConfigRestTransport._GetResource._get_response( self._host, metadata, query_params, @@ -3608,21 +5177,21 @@ def __call__( raise core_exceptions.from_http_response(response) # Return the response - resp = config.Deployment() - pb_resp = config.Deployment.pb(resp) + resp = config.Resource() + pb_resp = config.Resource.pb(resp) json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_get_deployment(resp) + resp = self._interceptor.post_get_resource(resp) response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_get_deployment_with_metadata( + resp, _ = self._interceptor.post_get_resource_with_metadata( resp, response_metadata ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER try: - response_payload = config.Deployment.to_json(response) + response_payload = config.Resource.to_json(response) except: response_payload = None http_response = { @@ -3631,19 +5200,21 @@ def __call__( "status": response.status_code, } _LOGGER.debug( - "Received response for google.cloud.config_v1.ConfigClient.get_deployment", + "Received response for google.cloud.config_v1.ConfigClient.get_resource", extra={ "serviceName": "google.cloud.config.v1.Config", - "rpcName": "GetDeployment", + "rpcName": "GetResource", "metadata": http_response["headers"], "httpResponse": http_response, }, ) return resp - class _GetPreview(_BaseConfigRestTransport._BaseGetPreview, ConfigRestStub): + class _GetResourceChange( + _BaseConfigRestTransport._BaseGetResourceChange, ConfigRestStub + ): def __hash__(self): - return hash("ConfigRestTransport.GetPreview") + return hash("ConfigRestTransport.GetResourceChange") @staticmethod def _get_response( @@ -3669,18 +5240,18 @@ def _get_response( def __call__( self, - request: config.GetPreviewRequest, + request: config.GetResourceChangeRequest, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> config.Preview: - r"""Call the get preview method over HTTP. + ) -> config.ResourceChange: + r"""Call the get resource change method over HTTP. Args: - request (~.config.GetPreviewRequest): - The request object. A request to get details about a - preview. + request (~.config.GetResourceChangeRequest): + The request object. The request message for the + GetResourceChange method. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -3690,26 +5261,28 @@ def __call__( be of type `bytes`. Returns: - ~.config.Preview: - A preview represents a set of actions - Infra Manager would perform to move the - resources towards the desired state as - specified in the configuration. + ~.config.ResourceChange: + A resource change represents a change + to a resource in the state file. """ - http_options = _BaseConfigRestTransport._BaseGetPreview._get_http_options() + http_options = ( + _BaseConfigRestTransport._BaseGetResourceChange._get_http_options() + ) - request, metadata = self._interceptor.pre_get_preview(request, metadata) + request, metadata = self._interceptor.pre_get_resource_change( + request, metadata + ) transcoded_request = ( - _BaseConfigRestTransport._BaseGetPreview._get_transcoded_request( + _BaseConfigRestTransport._BaseGetResourceChange._get_transcoded_request( http_options, request ) ) # Jsonify the query params query_params = ( - _BaseConfigRestTransport._BaseGetPreview._get_query_params_json( + _BaseConfigRestTransport._BaseGetResourceChange._get_query_params_json( transcoded_request ) ) @@ -3732,17 +5305,17 @@ def __call__( "headers": dict(metadata), } _LOGGER.debug( - f"Sending request for google.cloud.config_v1.ConfigClient.GetPreview", + f"Sending request for google.cloud.config_v1.ConfigClient.GetResourceChange", extra={ "serviceName": "google.cloud.config.v1.Config", - "rpcName": "GetPreview", + "rpcName": "GetResourceChange", "httpRequest": http_request, "metadata": http_request["headers"], }, ) # Send the request - response = ConfigRestTransport._GetPreview._get_response( + response = ConfigRestTransport._GetResourceChange._get_response( self._host, metadata, query_params, @@ -3757,21 +5330,21 @@ def __call__( raise core_exceptions.from_http_response(response) # Return the response - resp = config.Preview() - pb_resp = config.Preview.pb(resp) + resp = config.ResourceChange() + pb_resp = config.ResourceChange.pb(resp) json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_get_preview(resp) + resp = self._interceptor.post_get_resource_change(resp) response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_get_preview_with_metadata( + resp, _ = self._interceptor.post_get_resource_change_with_metadata( resp, response_metadata ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER try: - response_payload = config.Preview.to_json(response) + response_payload = config.ResourceChange.to_json(response) except: response_payload = None http_response = { @@ -3780,19 +5353,21 @@ def __call__( "status": response.status_code, } _LOGGER.debug( - "Received response for google.cloud.config_v1.ConfigClient.get_preview", + "Received response for google.cloud.config_v1.ConfigClient.get_resource_change", extra={ "serviceName": "google.cloud.config.v1.Config", - "rpcName": "GetPreview", + "rpcName": "GetResourceChange", "metadata": http_response["headers"], "httpResponse": http_response, }, ) return resp - class _GetResource(_BaseConfigRestTransport._BaseGetResource, ConfigRestStub): + class _GetResourceDrift( + _BaseConfigRestTransport._BaseGetResourceDrift, ConfigRestStub + ): def __hash__(self): - return hash("ConfigRestTransport.GetResource") + return hash("ConfigRestTransport.GetResourceDrift") @staticmethod def _get_response( @@ -3818,18 +5393,18 @@ def _get_response( def __call__( self, - request: config.GetResourceRequest, + request: config.GetResourceDriftRequest, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> config.Resource: - r"""Call the get resource method over HTTP. + ) -> config.ResourceDrift: + r"""Call the get resource drift method over HTTP. Args: - request (~.config.GetResourceRequest): - The request object. A request to get a Resource from a - 'GetResource' call. + request (~.config.GetResourceDriftRequest): + The request object. The request message for the + GetResourceDrift method. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -3839,26 +5414,28 @@ def __call__( be of type `bytes`. Returns: - ~.config.Resource: - Resource represents a Google Cloud - Platform resource actuated by IM. - Resources are child resources of - Revisions. + ~.config.ResourceDrift: + A resource drift represents a drift + to a resource in the state file. """ - http_options = _BaseConfigRestTransport._BaseGetResource._get_http_options() + http_options = ( + _BaseConfigRestTransport._BaseGetResourceDrift._get_http_options() + ) - request, metadata = self._interceptor.pre_get_resource(request, metadata) + request, metadata = self._interceptor.pre_get_resource_drift( + request, metadata + ) transcoded_request = ( - _BaseConfigRestTransport._BaseGetResource._get_transcoded_request( + _BaseConfigRestTransport._BaseGetResourceDrift._get_transcoded_request( http_options, request ) ) # Jsonify the query params query_params = ( - _BaseConfigRestTransport._BaseGetResource._get_query_params_json( + _BaseConfigRestTransport._BaseGetResourceDrift._get_query_params_json( transcoded_request ) ) @@ -3881,17 +5458,17 @@ def __call__( "headers": dict(metadata), } _LOGGER.debug( - f"Sending request for google.cloud.config_v1.ConfigClient.GetResource", + f"Sending request for google.cloud.config_v1.ConfigClient.GetResourceDrift", extra={ "serviceName": "google.cloud.config.v1.Config", - "rpcName": "GetResource", + "rpcName": "GetResourceDrift", "httpRequest": http_request, "metadata": http_request["headers"], }, ) # Send the request - response = ConfigRestTransport._GetResource._get_response( + response = ConfigRestTransport._GetResourceDrift._get_response( self._host, metadata, query_params, @@ -3906,21 +5483,21 @@ def __call__( raise core_exceptions.from_http_response(response) # Return the response - resp = config.Resource() - pb_resp = config.Resource.pb(resp) + resp = config.ResourceDrift() + pb_resp = config.ResourceDrift.pb(resp) json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_get_resource(resp) + resp = self._interceptor.post_get_resource_drift(resp) response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_get_resource_with_metadata( + resp, _ = self._interceptor.post_get_resource_drift_with_metadata( resp, response_metadata ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER try: - response_payload = config.Resource.to_json(response) + response_payload = config.ResourceDrift.to_json(response) except: response_payload = None http_response = { @@ -3929,21 +5506,19 @@ def __call__( "status": response.status_code, } _LOGGER.debug( - "Received response for google.cloud.config_v1.ConfigClient.get_resource", + "Received response for google.cloud.config_v1.ConfigClient.get_resource_drift", extra={ "serviceName": "google.cloud.config.v1.Config", - "rpcName": "GetResource", + "rpcName": "GetResourceDrift", "metadata": http_response["headers"], "httpResponse": http_response, }, ) return resp - class _GetResourceChange( - _BaseConfigRestTransport._BaseGetResourceChange, ConfigRestStub - ): + class _GetRevision(_BaseConfigRestTransport._BaseGetRevision, ConfigRestStub): def __hash__(self): - return hash("ConfigRestTransport.GetResourceChange") + return hash("ConfigRestTransport.GetRevision") @staticmethod def _get_response( @@ -3969,18 +5544,18 @@ def _get_response( def __call__( self, - request: config.GetResourceChangeRequest, + request: config.GetRevisionRequest, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> config.ResourceChange: - r"""Call the get resource change method over HTTP. + ) -> config.Revision: + r"""Call the get revision method over HTTP. Args: - request (~.config.GetResourceChangeRequest): - The request object. The request message for the - GetResourceChange method. + request (~.config.GetRevisionRequest): + The request object. A request to get a Revision from a + 'GetRevision' call. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -3990,28 +5565,27 @@ def __call__( be of type `bytes`. Returns: - ~.config.ResourceChange: - A resource change represents a change - to a resource in the state file. + ~.config.Revision: + A child resource of a Deployment + generated by a 'CreateDeployment' or + 'UpdateDeployment' call. Each Revision + contains metadata pertaining to a + snapshot of a particular Deployment. """ - http_options = ( - _BaseConfigRestTransport._BaseGetResourceChange._get_http_options() - ) + http_options = _BaseConfigRestTransport._BaseGetRevision._get_http_options() - request, metadata = self._interceptor.pre_get_resource_change( - request, metadata - ) + request, metadata = self._interceptor.pre_get_revision(request, metadata) transcoded_request = ( - _BaseConfigRestTransport._BaseGetResourceChange._get_transcoded_request( + _BaseConfigRestTransport._BaseGetRevision._get_transcoded_request( http_options, request ) ) # Jsonify the query params query_params = ( - _BaseConfigRestTransport._BaseGetResourceChange._get_query_params_json( + _BaseConfigRestTransport._BaseGetRevision._get_query_params_json( transcoded_request ) ) @@ -4034,17 +5608,17 @@ def __call__( "headers": dict(metadata), } _LOGGER.debug( - f"Sending request for google.cloud.config_v1.ConfigClient.GetResourceChange", + f"Sending request for google.cloud.config_v1.ConfigClient.GetRevision", extra={ "serviceName": "google.cloud.config.v1.Config", - "rpcName": "GetResourceChange", + "rpcName": "GetRevision", "httpRequest": http_request, "metadata": http_request["headers"], }, ) # Send the request - response = ConfigRestTransport._GetResourceChange._get_response( + response = ConfigRestTransport._GetRevision._get_response( self._host, metadata, query_params, @@ -4059,21 +5633,21 @@ def __call__( raise core_exceptions.from_http_response(response) # Return the response - resp = config.ResourceChange() - pb_resp = config.ResourceChange.pb(resp) + resp = config.Revision() + pb_resp = config.Revision.pb(resp) json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_get_resource_change(resp) + resp = self._interceptor.post_get_revision(resp) response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_get_resource_change_with_metadata( + resp, _ = self._interceptor.post_get_revision_with_metadata( resp, response_metadata ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER try: - response_payload = config.ResourceChange.to_json(response) + response_payload = config.Revision.to_json(response) except: response_payload = None http_response = { @@ -4082,21 +5656,21 @@ def __call__( "status": response.status_code, } _LOGGER.debug( - "Received response for google.cloud.config_v1.ConfigClient.get_resource_change", + "Received response for google.cloud.config_v1.ConfigClient.get_revision", extra={ "serviceName": "google.cloud.config.v1.Config", - "rpcName": "GetResourceChange", + "rpcName": "GetRevision", "metadata": http_response["headers"], "httpResponse": http_response, }, ) return resp - class _GetResourceDrift( - _BaseConfigRestTransport._BaseGetResourceDrift, ConfigRestStub + class _GetTerraformVersion( + _BaseConfigRestTransport._BaseGetTerraformVersion, ConfigRestStub ): def __hash__(self): - return hash("ConfigRestTransport.GetResourceDrift") + return hash("ConfigRestTransport.GetTerraformVersion") @staticmethod def _get_response( @@ -4122,18 +5696,18 @@ def _get_response( def __call__( self, - request: config.GetResourceDriftRequest, + request: config.GetTerraformVersionRequest, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> config.ResourceDrift: - r"""Call the get resource drift method over HTTP. + ) -> config.TerraformVersion: + r"""Call the get terraform version method over HTTP. Args: - request (~.config.GetResourceDriftRequest): + request (~.config.GetTerraformVersionRequest): The request object. The request message for the - GetResourceDrift method. + GetTerraformVersion method. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -4143,30 +5717,27 @@ def __call__( be of type `bytes`. Returns: - ~.config.ResourceDrift: - A resource drift represents a drift - to a resource in the state file. + ~.config.TerraformVersion: + A TerraformVersion represents the + support state the corresponding + Terraform version. """ http_options = ( - _BaseConfigRestTransport._BaseGetResourceDrift._get_http_options() + _BaseConfigRestTransport._BaseGetTerraformVersion._get_http_options() ) - request, metadata = self._interceptor.pre_get_resource_drift( + request, metadata = self._interceptor.pre_get_terraform_version( request, metadata ) - transcoded_request = ( - _BaseConfigRestTransport._BaseGetResourceDrift._get_transcoded_request( - http_options, request - ) + transcoded_request = _BaseConfigRestTransport._BaseGetTerraformVersion._get_transcoded_request( + http_options, request ) # Jsonify the query params - query_params = ( - _BaseConfigRestTransport._BaseGetResourceDrift._get_query_params_json( - transcoded_request - ) + query_params = _BaseConfigRestTransport._BaseGetTerraformVersion._get_query_params_json( + transcoded_request ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( @@ -4187,17 +5758,17 @@ def __call__( "headers": dict(metadata), } _LOGGER.debug( - f"Sending request for google.cloud.config_v1.ConfigClient.GetResourceDrift", + f"Sending request for google.cloud.config_v1.ConfigClient.GetTerraformVersion", extra={ "serviceName": "google.cloud.config.v1.Config", - "rpcName": "GetResourceDrift", + "rpcName": "GetTerraformVersion", "httpRequest": http_request, "metadata": http_request["headers"], }, ) # Send the request - response = ConfigRestTransport._GetResourceDrift._get_response( + response = ConfigRestTransport._GetTerraformVersion._get_response( self._host, metadata, query_params, @@ -4212,21 +5783,21 @@ def __call__( raise core_exceptions.from_http_response(response) # Return the response - resp = config.ResourceDrift() - pb_resp = config.ResourceDrift.pb(resp) + resp = config.TerraformVersion() + pb_resp = config.TerraformVersion.pb(resp) json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_get_resource_drift(resp) + resp = self._interceptor.post_get_terraform_version(resp) response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_get_resource_drift_with_metadata( + resp, _ = self._interceptor.post_get_terraform_version_with_metadata( resp, response_metadata ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER try: - response_payload = config.ResourceDrift.to_json(response) + response_payload = config.TerraformVersion.to_json(response) except: response_payload = None http_response = { @@ -4235,19 +5806,21 @@ def __call__( "status": response.status_code, } _LOGGER.debug( - "Received response for google.cloud.config_v1.ConfigClient.get_resource_drift", + "Received response for google.cloud.config_v1.ConfigClient.get_terraform_version", extra={ "serviceName": "google.cloud.config.v1.Config", - "rpcName": "GetResourceDrift", + "rpcName": "GetTerraformVersion", "metadata": http_response["headers"], "httpResponse": http_response, }, ) return resp - class _GetRevision(_BaseConfigRestTransport._BaseGetRevision, ConfigRestStub): + class _ImportStatefile( + _BaseConfigRestTransport._BaseImportStatefile, ConfigRestStub + ): def __hash__(self): - return hash("ConfigRestTransport.GetRevision") + return hash("ConfigRestTransport.ImportStatefile") @staticmethod def _get_response( @@ -4268,23 +5841,24 @@ def _get_response( timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, ) return response def __call__( self, - request: config.GetRevisionRequest, + request: config.ImportStatefileRequest, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> config.Revision: - r"""Call the get revision method over HTTP. + ) -> config.Statefile: + r"""Call the import statefile method over HTTP. Args: - request (~.config.GetRevisionRequest): - The request object. A request to get a Revision from a - 'GetRevision' call. + request (~.config.ImportStatefileRequest): + The request object. A request to import a state file + passed to a 'ImportStatefile' call. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -4294,27 +5868,32 @@ def __call__( be of type `bytes`. Returns: - ~.config.Revision: - A child resource of a Deployment - generated by a 'CreateDeployment' or - 'UpdateDeployment' call. Each Revision - contains metadata pertaining to a - snapshot of a particular Deployment. + ~.config.Statefile: + Contains info about a Terraform state + file """ - http_options = _BaseConfigRestTransport._BaseGetRevision._get_http_options() + http_options = ( + _BaseConfigRestTransport._BaseImportStatefile._get_http_options() + ) - request, metadata = self._interceptor.pre_get_revision(request, metadata) + request, metadata = self._interceptor.pre_import_statefile( + request, metadata + ) transcoded_request = ( - _BaseConfigRestTransport._BaseGetRevision._get_transcoded_request( + _BaseConfigRestTransport._BaseImportStatefile._get_transcoded_request( http_options, request ) ) + body = _BaseConfigRestTransport._BaseImportStatefile._get_request_body_json( + transcoded_request + ) + # Jsonify the query params query_params = ( - _BaseConfigRestTransport._BaseGetRevision._get_query_params_json( + _BaseConfigRestTransport._BaseImportStatefile._get_query_params_json( transcoded_request ) ) @@ -4337,23 +5916,24 @@ def __call__( "headers": dict(metadata), } _LOGGER.debug( - f"Sending request for google.cloud.config_v1.ConfigClient.GetRevision", + f"Sending request for google.cloud.config_v1.ConfigClient.ImportStatefile", extra={ "serviceName": "google.cloud.config.v1.Config", - "rpcName": "GetRevision", + "rpcName": "ImportStatefile", "httpRequest": http_request, "metadata": http_request["headers"], }, ) # Send the request - response = ConfigRestTransport._GetRevision._get_response( + response = ConfigRestTransport._ImportStatefile._get_response( self._host, metadata, query_params, self._session, timeout, transcoded_request, + body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -4362,21 +5942,21 @@ def __call__( raise core_exceptions.from_http_response(response) # Return the response - resp = config.Revision() - pb_resp = config.Revision.pb(resp) + resp = config.Statefile() + pb_resp = config.Statefile.pb(resp) json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_get_revision(resp) + resp = self._interceptor.post_import_statefile(resp) response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_get_revision_with_metadata( + resp, _ = self._interceptor.post_import_statefile_with_metadata( resp, response_metadata ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER try: - response_payload = config.Revision.to_json(response) + response_payload = config.Statefile.to_json(response) except: response_payload = None http_response = { @@ -4385,21 +5965,21 @@ def __call__( "status": response.status_code, } _LOGGER.debug( - "Received response for google.cloud.config_v1.ConfigClient.get_revision", + "Received response for google.cloud.config_v1.ConfigClient.import_statefile", extra={ "serviceName": "google.cloud.config.v1.Config", - "rpcName": "GetRevision", + "rpcName": "ImportStatefile", "metadata": http_response["headers"], "httpResponse": http_response, }, ) return resp - class _GetTerraformVersion( - _BaseConfigRestTransport._BaseGetTerraformVersion, ConfigRestStub + class _ListDeploymentGroupRevisions( + _BaseConfigRestTransport._BaseListDeploymentGroupRevisions, ConfigRestStub ): def __hash__(self): - return hash("ConfigRestTransport.GetTerraformVersion") + return hash("ConfigRestTransport.ListDeploymentGroupRevisions") @staticmethod def _get_response( @@ -4425,47 +6005,45 @@ def _get_response( def __call__( self, - request: config.GetTerraformVersionRequest, + request: config.ListDeploymentGroupRevisionsRequest, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> config.TerraformVersion: - r"""Call the get terraform version method over HTTP. + ) -> config.ListDeploymentGroupRevisionsResponse: + r"""Call the list deployment group + revisions method over HTTP. - Args: - request (~.config.GetTerraformVersionRequest): - The request object. The request message for the - GetTerraformVersion method. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. + Args: + request (~.config.ListDeploymentGroupRevisionsRequest): + The request object. The request message for the + ListDeploymentGroupRevisions method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. - Returns: - ~.config.TerraformVersion: - A TerraformVersion represents the - support state the corresponding - Terraform version. + Returns: + ~.config.ListDeploymentGroupRevisionsResponse: + The response message for the + ListDeploymentGroupRevisions method. """ - http_options = ( - _BaseConfigRestTransport._BaseGetTerraformVersion._get_http_options() - ) + http_options = _BaseConfigRestTransport._BaseListDeploymentGroupRevisions._get_http_options() - request, metadata = self._interceptor.pre_get_terraform_version( + request, metadata = self._interceptor.pre_list_deployment_group_revisions( request, metadata ) - transcoded_request = _BaseConfigRestTransport._BaseGetTerraformVersion._get_transcoded_request( + transcoded_request = _BaseConfigRestTransport._BaseListDeploymentGroupRevisions._get_transcoded_request( http_options, request ) # Jsonify the query params - query_params = _BaseConfigRestTransport._BaseGetTerraformVersion._get_query_params_json( + query_params = _BaseConfigRestTransport._BaseListDeploymentGroupRevisions._get_query_params_json( transcoded_request ) @@ -4487,17 +6065,17 @@ def __call__( "headers": dict(metadata), } _LOGGER.debug( - f"Sending request for google.cloud.config_v1.ConfigClient.GetTerraformVersion", + f"Sending request for google.cloud.config_v1.ConfigClient.ListDeploymentGroupRevisions", extra={ "serviceName": "google.cloud.config.v1.Config", - "rpcName": "GetTerraformVersion", + "rpcName": "ListDeploymentGroupRevisions", "httpRequest": http_request, "metadata": http_request["headers"], }, ) # Send the request - response = ConfigRestTransport._GetTerraformVersion._get_response( + response = ConfigRestTransport._ListDeploymentGroupRevisions._get_response( self._host, metadata, query_params, @@ -4512,21 +6090,25 @@ def __call__( raise core_exceptions.from_http_response(response) # Return the response - resp = config.TerraformVersion() - pb_resp = config.TerraformVersion.pb(resp) + resp = config.ListDeploymentGroupRevisionsResponse() + pb_resp = config.ListDeploymentGroupRevisionsResponse.pb(resp) json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_get_terraform_version(resp) + resp = self._interceptor.post_list_deployment_group_revisions(resp) response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_get_terraform_version_with_metadata( - resp, response_metadata + resp, _ = ( + self._interceptor.post_list_deployment_group_revisions_with_metadata( + resp, response_metadata + ) ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER try: - response_payload = config.TerraformVersion.to_json(response) + response_payload = ( + config.ListDeploymentGroupRevisionsResponse.to_json(response) + ) except: response_payload = None http_response = { @@ -4535,21 +6117,21 @@ def __call__( "status": response.status_code, } _LOGGER.debug( - "Received response for google.cloud.config_v1.ConfigClient.get_terraform_version", + "Received response for google.cloud.config_v1.ConfigClient.list_deployment_group_revisions", extra={ "serviceName": "google.cloud.config.v1.Config", - "rpcName": "GetTerraformVersion", + "rpcName": "ListDeploymentGroupRevisions", "metadata": http_response["headers"], "httpResponse": http_response, }, ) return resp - class _ImportStatefile( - _BaseConfigRestTransport._BaseImportStatefile, ConfigRestStub + class _ListDeploymentGroups( + _BaseConfigRestTransport._BaseListDeploymentGroups, ConfigRestStub ): def __hash__(self): - return hash("ConfigRestTransport.ImportStatefile") + return hash("ConfigRestTransport.ListDeploymentGroups") @staticmethod def _get_response( @@ -4570,24 +6152,23 @@ def _get_response( timeout=timeout, headers=headers, params=rest_helpers.flatten_query_params(query_params, strict=True), - data=body, ) return response def __call__( self, - request: config.ImportStatefileRequest, + request: config.ListDeploymentGroupsRequest, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), - ) -> config.Statefile: - r"""Call the import statefile method over HTTP. + ) -> config.ListDeploymentGroupsResponse: + r"""Call the list deployment groups method over HTTP. Args: - request (~.config.ImportStatefileRequest): - The request object. A request to import a state file - passed to a 'ImportStatefile' call. + request (~.config.ListDeploymentGroupsRequest): + The request object. The request message for the + ListDeploymentGroups method. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. @@ -4597,34 +6178,26 @@ def __call__( be of type `bytes`. Returns: - ~.config.Statefile: - Contains info about a Terraform state - file + ~.config.ListDeploymentGroupsResponse: + The response message for the + ListDeploymentGroups method. """ http_options = ( - _BaseConfigRestTransport._BaseImportStatefile._get_http_options() + _BaseConfigRestTransport._BaseListDeploymentGroups._get_http_options() ) - request, metadata = self._interceptor.pre_import_statefile( + request, metadata = self._interceptor.pre_list_deployment_groups( request, metadata ) - transcoded_request = ( - _BaseConfigRestTransport._BaseImportStatefile._get_transcoded_request( - http_options, request - ) - ) - - body = _BaseConfigRestTransport._BaseImportStatefile._get_request_body_json( - transcoded_request + transcoded_request = _BaseConfigRestTransport._BaseListDeploymentGroups._get_transcoded_request( + http_options, request ) # Jsonify the query params - query_params = ( - _BaseConfigRestTransport._BaseImportStatefile._get_query_params_json( - transcoded_request - ) + query_params = _BaseConfigRestTransport._BaseListDeploymentGroups._get_query_params_json( + transcoded_request ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( @@ -4645,24 +6218,23 @@ def __call__( "headers": dict(metadata), } _LOGGER.debug( - f"Sending request for google.cloud.config_v1.ConfigClient.ImportStatefile", + f"Sending request for google.cloud.config_v1.ConfigClient.ListDeploymentGroups", extra={ "serviceName": "google.cloud.config.v1.Config", - "rpcName": "ImportStatefile", + "rpcName": "ListDeploymentGroups", "httpRequest": http_request, "metadata": http_request["headers"], }, ) # Send the request - response = ConfigRestTransport._ImportStatefile._get_response( + response = ConfigRestTransport._ListDeploymentGroups._get_response( self._host, metadata, query_params, self._session, timeout, transcoded_request, - body, ) # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception @@ -4671,21 +6243,23 @@ def __call__( raise core_exceptions.from_http_response(response) # Return the response - resp = config.Statefile() - pb_resp = config.Statefile.pb(resp) + resp = config.ListDeploymentGroupsResponse() + pb_resp = config.ListDeploymentGroupsResponse.pb(resp) json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) - resp = self._interceptor.post_import_statefile(resp) + resp = self._interceptor.post_list_deployment_groups(resp) response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_import_statefile_with_metadata( + resp, _ = self._interceptor.post_list_deployment_groups_with_metadata( resp, response_metadata ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( logging.DEBUG ): # pragma: NO COVER try: - response_payload = config.Statefile.to_json(response) + response_payload = config.ListDeploymentGroupsResponse.to_json( + response + ) except: response_payload = None http_response = { @@ -4694,10 +6268,10 @@ def __call__( "status": response.status_code, } _LOGGER.debug( - "Received response for google.cloud.config_v1.ConfigClient.import_statefile", + "Received response for google.cloud.config_v1.ConfigClient.list_deployment_groups", extra={ "serviceName": "google.cloud.config.v1.Config", - "rpcName": "ImportStatefile", + "rpcName": "ListDeploymentGroups", "metadata": http_response["headers"], "httpResponse": http_response, }, @@ -5747,19 +7321,175 @@ def __call__( "status": response.status_code, } _LOGGER.debug( - "Received response for google.cloud.config_v1.ConfigClient.list_terraform_versions", + "Received response for google.cloud.config_v1.ConfigClient.list_terraform_versions", + extra={ + "serviceName": "google.cloud.config.v1.Config", + "rpcName": "ListTerraformVersions", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _LockDeployment(_BaseConfigRestTransport._BaseLockDeployment, ConfigRestStub): + def __hash__(self): + return hash("ConfigRestTransport.LockDeployment") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__( + self, + request: config.LockDeploymentRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.Operation: + r"""Call the lock deployment method over HTTP. + + Args: + request (~.config.LockDeploymentRequest): + The request object. A request to lock a deployment passed + to a 'LockDeployment' call. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options = ( + _BaseConfigRestTransport._BaseLockDeployment._get_http_options() + ) + + request, metadata = self._interceptor.pre_lock_deployment(request, metadata) + transcoded_request = ( + _BaseConfigRestTransport._BaseLockDeployment._get_transcoded_request( + http_options, request + ) + ) + + body = _BaseConfigRestTransport._BaseLockDeployment._get_request_body_json( + transcoded_request + ) + + # Jsonify the query params + query_params = ( + _BaseConfigRestTransport._BaseLockDeployment._get_query_params_json( + transcoded_request + ) + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.config_v1.ConfigClient.LockDeployment", + extra={ + "serviceName": "google.cloud.config.v1.Config", + "rpcName": "LockDeployment", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = ConfigRestTransport._LockDeployment._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_lock_deployment(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_lock_deployment_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.config_v1.ConfigClient.lock_deployment", extra={ "serviceName": "google.cloud.config.v1.Config", - "rpcName": "ListTerraformVersions", + "rpcName": "LockDeployment", "metadata": http_response["headers"], "httpResponse": http_response, }, ) return resp - class _LockDeployment(_BaseConfigRestTransport._BaseLockDeployment, ConfigRestStub): + class _ProvisionDeploymentGroup( + _BaseConfigRestTransport._BaseProvisionDeploymentGroup, ConfigRestStub + ): def __hash__(self): - return hash("ConfigRestTransport.LockDeployment") + return hash("ConfigRestTransport.ProvisionDeploymentGroup") @staticmethod def _get_response( @@ -5786,54 +7516,51 @@ def _get_response( def __call__( self, - request: config.LockDeploymentRequest, + request: config.ProvisionDeploymentGroupRequest, *, retry: OptionalRetry = gapic_v1.method.DEFAULT, timeout: Optional[float] = None, metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), ) -> operations_pb2.Operation: - r"""Call the lock deployment method over HTTP. + r"""Call the provision deployment + group method over HTTP. - Args: - request (~.config.LockDeploymentRequest): - The request object. A request to lock a deployment passed - to a 'LockDeployment' call. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be - sent along with the request as metadata. Normally, each value must be of type `str`, - but for metadata keys ending with the suffix `-bin`, the corresponding values must - be of type `bytes`. + Args: + request (~.config.ProvisionDeploymentGroupRequest): + The request object. The request message for the + ProvisionDeploymentGroup method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. - Returns: - ~.operations_pb2.Operation: - This resource represents a - long-running operation that is the - result of a network API call. + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. """ - http_options = ( - _BaseConfigRestTransport._BaseLockDeployment._get_http_options() - ) + http_options = _BaseConfigRestTransport._BaseProvisionDeploymentGroup._get_http_options() - request, metadata = self._interceptor.pre_lock_deployment(request, metadata) - transcoded_request = ( - _BaseConfigRestTransport._BaseLockDeployment._get_transcoded_request( - http_options, request - ) + request, metadata = self._interceptor.pre_provision_deployment_group( + request, metadata + ) + transcoded_request = _BaseConfigRestTransport._BaseProvisionDeploymentGroup._get_transcoded_request( + http_options, request ) - body = _BaseConfigRestTransport._BaseLockDeployment._get_request_body_json( + body = _BaseConfigRestTransport._BaseProvisionDeploymentGroup._get_request_body_json( transcoded_request ) # Jsonify the query params - query_params = ( - _BaseConfigRestTransport._BaseLockDeployment._get_query_params_json( - transcoded_request - ) + query_params = _BaseConfigRestTransport._BaseProvisionDeploymentGroup._get_query_params_json( + transcoded_request ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( @@ -5854,17 +7581,17 @@ def __call__( "headers": dict(metadata), } _LOGGER.debug( - f"Sending request for google.cloud.config_v1.ConfigClient.LockDeployment", + f"Sending request for google.cloud.config_v1.ConfigClient.ProvisionDeploymentGroup", extra={ "serviceName": "google.cloud.config.v1.Config", - "rpcName": "LockDeployment", + "rpcName": "ProvisionDeploymentGroup", "httpRequest": http_request, "metadata": http_request["headers"], }, ) # Send the request - response = ConfigRestTransport._LockDeployment._get_response( + response = ConfigRestTransport._ProvisionDeploymentGroup._get_response( self._host, metadata, query_params, @@ -5883,9 +7610,9 @@ def __call__( resp = operations_pb2.Operation() json_format.Parse(response.content, resp, ignore_unknown_fields=True) - resp = self._interceptor.post_lock_deployment(resp) + resp = self._interceptor.post_provision_deployment_group(resp) response_metadata = [(k, str(v)) for k, v in response.headers.items()] - resp, _ = self._interceptor.post_lock_deployment_with_metadata( + resp, _ = self._interceptor.post_provision_deployment_group_with_metadata( resp, response_metadata ) if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( @@ -5901,10 +7628,10 @@ def __call__( "status": response.status_code, } _LOGGER.debug( - "Received response for google.cloud.config_v1.ConfigClient.lock_deployment", + "Received response for google.cloud.config_v1.ConfigClient.provision_deployment_group", extra={ "serviceName": "google.cloud.config.v1.Config", - "rpcName": "LockDeployment", + "rpcName": "ProvisionDeploymentGroup", "metadata": http_response["headers"], "httpResponse": http_response, }, @@ -6383,6 +8110,160 @@ def __call__( ) return resp + class _UpdateDeploymentGroup( + _BaseConfigRestTransport._BaseUpdateDeploymentGroup, ConfigRestStub + ): + def __hash__(self): + return hash("ConfigRestTransport.UpdateDeploymentGroup") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__( + self, + request: config.UpdateDeploymentGroupRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.Operation: + r"""Call the update deployment group method over HTTP. + + Args: + request (~.config.UpdateDeploymentGroupRequest): + The request object. A request message for updating a + deployment group + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options = ( + _BaseConfigRestTransport._BaseUpdateDeploymentGroup._get_http_options() + ) + + request, metadata = self._interceptor.pre_update_deployment_group( + request, metadata + ) + transcoded_request = _BaseConfigRestTransport._BaseUpdateDeploymentGroup._get_transcoded_request( + http_options, request + ) + + body = _BaseConfigRestTransport._BaseUpdateDeploymentGroup._get_request_body_json( + transcoded_request + ) + + # Jsonify the query params + query_params = _BaseConfigRestTransport._BaseUpdateDeploymentGroup._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.cloud.config_v1.ConfigClient.UpdateDeploymentGroup", + extra={ + "serviceName": "google.cloud.config.v1.Config", + "rpcName": "UpdateDeploymentGroup", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = ConfigRestTransport._UpdateDeploymentGroup._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_update_deployment_group(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_deployment_group_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.cloud.config_v1.ConfigClient.update_deployment_group", + extra={ + "serviceName": "google.cloud.config.v1.Config", + "rpcName": "UpdateDeploymentGroup", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + @property def create_deployment( self, @@ -6391,6 +8272,14 @@ def create_deployment( # In C++ this would require a dynamic_cast return self._CreateDeployment(self._session, self._host, self._interceptor) # type: ignore + @property + def create_deployment_group( + self, + ) -> Callable[[config.CreateDeploymentGroupRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateDeploymentGroup(self._session, self._host, self._interceptor) # type: ignore + @property def create_preview( self, @@ -6407,6 +8296,14 @@ def delete_deployment( # In C++ this would require a dynamic_cast return self._DeleteDeployment(self._session, self._host, self._interceptor) # type: ignore + @property + def delete_deployment_group( + self, + ) -> Callable[[config.DeleteDeploymentGroupRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteDeploymentGroup(self._session, self._host, self._interceptor) # type: ignore + @property def delete_preview( self, @@ -6423,6 +8320,16 @@ def delete_statefile( # In C++ this would require a dynamic_cast return self._DeleteStatefile(self._session, self._host, self._interceptor) # type: ignore + @property + def deprovision_deployment_group( + self, + ) -> Callable[[config.DeprovisionDeploymentGroupRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeprovisionDeploymentGroup( + self._session, self._host, self._interceptor + ) # type: ignore + @property def export_deployment_statefile( self, @@ -6479,6 +8386,26 @@ def get_deployment( # In C++ this would require a dynamic_cast return self._GetDeployment(self._session, self._host, self._interceptor) # type: ignore + @property + def get_deployment_group( + self, + ) -> Callable[[config.GetDeploymentGroupRequest], config.DeploymentGroup]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetDeploymentGroup(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_deployment_group_revision( + self, + ) -> Callable[ + [config.GetDeploymentGroupRevisionRequest], config.DeploymentGroupRevision + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetDeploymentGroupRevision( + self._session, self._host, self._interceptor + ) # type: ignore + @property def get_preview(self) -> Callable[[config.GetPreviewRequest], config.Preview]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. @@ -6529,6 +8456,29 @@ def import_statefile( # In C++ this would require a dynamic_cast return self._ImportStatefile(self._session, self._host, self._interceptor) # type: ignore + @property + def list_deployment_group_revisions( + self, + ) -> Callable[ + [config.ListDeploymentGroupRevisionsRequest], + config.ListDeploymentGroupRevisionsResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListDeploymentGroupRevisions( + self._session, self._host, self._interceptor + ) # type: ignore + + @property + def list_deployment_groups( + self, + ) -> Callable[ + [config.ListDeploymentGroupsRequest], config.ListDeploymentGroupsResponse + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListDeploymentGroups(self._session, self._host, self._interceptor) # type: ignore + @property def list_deployments( self, @@ -6599,6 +8549,16 @@ def lock_deployment( # In C++ this would require a dynamic_cast return self._LockDeployment(self._session, self._host, self._interceptor) # type: ignore + @property + def provision_deployment_group( + self, + ) -> Callable[[config.ProvisionDeploymentGroupRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ProvisionDeploymentGroup( + self._session, self._host, self._interceptor + ) # type: ignore + @property def unlock_deployment( self, @@ -6625,6 +8585,14 @@ def update_deployment( # In C++ this would require a dynamic_cast return self._UpdateDeployment(self._session, self._host, self._interceptor) # type: ignore + @property + def update_deployment_group( + self, + ) -> Callable[[config.UpdateDeploymentGroupRequest], operations_pb2.Operation]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateDeploymentGroup(self._session, self._host, self._interceptor) # type: ignore + @property def get_location(self): return self._GetLocation(self._session, self._host, self._interceptor) # type: ignore diff --git a/packages/google-cloud-config/google/cloud/config_v1/services/config/transports/rest_base.py b/packages/google-cloud-config/google/cloud/config_v1/services/config/transports/rest_base.py index 0dc1774eae33..71f58c3a92c8 100644 --- a/packages/google-cloud-config/google/cloud/config_v1/services/config/transports/rest_base.py +++ b/packages/google-cloud-config/google/cloud/config_v1/services/config/transports/rest_base.py @@ -153,6 +153,65 @@ def _get_query_params_json(transcoded_request): query_params["$alt"] = "json;enum-encoding=int" return query_params + class _BaseCreateDeploymentGroup: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "deploymentGroupId": "", + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{parent=projects/*/locations/*}/deploymentGroups", + "body": "deployment_group", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = config.CreateDeploymentGroupRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + return body + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseConfigRestTransport._BaseCreateDeploymentGroup._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + class _BaseCreatePreview: def __hash__(self): # pragma: NO COVER return NotImplementedError("__hash__ must be implemented.") @@ -257,6 +316,53 @@ def _get_query_params_json(transcoded_request): query_params["$alt"] = "json;enum-encoding=int" return query_params + class _BaseDeleteDeploymentGroup: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v1/{name=projects/*/locations/*/deploymentGroups/*}", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = config.DeleteDeploymentGroupRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseConfigRestTransport._BaseDeleteDeploymentGroup._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + class _BaseDeletePreview: def __hash__(self): # pragma: NO COVER return NotImplementedError("__hash__ must be implemented.") @@ -361,6 +467,63 @@ def _get_query_params_json(transcoded_request): query_params["$alt"] = "json;enum-encoding=int" return query_params + class _BaseDeprovisionDeploymentGroup: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{name=projects/*/locations/*/deploymentGroups/*}:deprovision", + "body": "*", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = config.DeprovisionDeploymentGroupRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + return body + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseConfigRestTransport._BaseDeprovisionDeploymentGroup._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + class _BaseExportDeploymentStatefile: def __hash__(self): # pragma: NO COVER return NotImplementedError("__hash__ must be implemented.") @@ -673,6 +836,100 @@ def _get_query_params_json(transcoded_request): query_params["$alt"] = "json;enum-encoding=int" return query_params + class _BaseGetDeploymentGroup: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/deploymentGroups/*}", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = config.GetDeploymentGroupRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseConfigRestTransport._BaseGetDeploymentGroup._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseGetDeploymentGroupRevision: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{name=projects/*/locations/*/deploymentGroups/*/revisions/*}", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = config.GetDeploymentGroupRevisionRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseConfigRestTransport._BaseGetDeploymentGroupRevision._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + class _BaseGetPreview: def __hash__(self): # pragma: NO COVER return NotImplementedError("__hash__ must be implemented.") @@ -1012,6 +1269,100 @@ def _get_query_params_json(transcoded_request): query_params["$alt"] = "json;enum-encoding=int" return query_params + class _BaseListDeploymentGroupRevisions: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{parent=projects/*/locations/*/deploymentGroups/*}/revisions", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = config.ListDeploymentGroupRevisionsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseConfigRestTransport._BaseListDeploymentGroupRevisions._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseListDeploymentGroups: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v1/{parent=projects/*/locations/*}/deploymentGroups", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = config.ListDeploymentGroupsRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseConfigRestTransport._BaseListDeploymentGroups._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + class _BaseListDeployments: def __hash__(self): # pragma: NO COVER return NotImplementedError("__hash__ must be implemented.") @@ -1398,6 +1749,63 @@ def _get_query_params_json(transcoded_request): query_params["$alt"] = "json;enum-encoding=int" return query_params + class _BaseProvisionDeploymentGroup: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v1/{name=projects/*/locations/*/deploymentGroups/*}:provision", + "body": "*", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = config.ProvisionDeploymentGroupRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + return body + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseConfigRestTransport._BaseProvisionDeploymentGroup._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + class _BaseUnlockDeployment: def __hash__(self): # pragma: NO COVER return NotImplementedError("__hash__ must be implemented.") @@ -1569,6 +1977,63 @@ def _get_query_params_json(transcoded_request): query_params["$alt"] = "json;enum-encoding=int" return query_params + class _BaseUpdateDeploymentGroup: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "patch", + "uri": "/v1/{deployment_group.name=projects/*/locations/*/deploymentGroups/*}", + "body": "deployment_group", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = config.UpdateDeploymentGroupRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + return body + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseConfigRestTransport._BaseUpdateDeploymentGroup._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + class _BaseGetLocation: def __hash__(self): # pragma: NO COVER return NotImplementedError("__hash__ must be implemented.") diff --git a/packages/google-cloud-config/google/cloud/config_v1/types/__init__.py b/packages/google-cloud-config/google/cloud/config_v1/types/__init__.py index e06713bdc121..bcd6513c88f6 100644 --- a/packages/google-cloud-config/google/cloud/config_v1/types/__init__.py +++ b/packages/google-cloud-config/google/cloud/config_v1/types/__init__.py @@ -16,19 +16,32 @@ from .config import ( ApplyResults, AutoMigrationConfig, + CreateDeploymentGroupRequest, CreateDeploymentRequest, CreatePreviewRequest, + DeleteDeploymentGroupRequest, DeleteDeploymentRequest, DeletePreviewRequest, DeleteStatefileRequest, Deployment, + DeploymentGroup, + DeploymentGroupRevision, DeploymentOperationMetadata, + DeploymentOperationSummary, + DeploymentSource, + DeploymentSpec, + DeploymentUnit, + DeploymentUnitProgress, + DeprovisionDeploymentGroupRequest, ExportDeploymentStatefileRequest, ExportLockInfoRequest, ExportPreviewResultRequest, ExportPreviewResultResponse, ExportRevisionStatefileRequest, + ExternalValueSource, GetAutoMigrationConfigRequest, + GetDeploymentGroupRequest, + GetDeploymentGroupRevisionRequest, GetDeploymentRequest, GetPreviewRequest, GetResourceChangeRequest, @@ -38,6 +51,10 @@ GetTerraformVersionRequest, GitSource, ImportStatefileRequest, + ListDeploymentGroupRevisionsRequest, + ListDeploymentGroupRevisionsResponse, + ListDeploymentGroupsRequest, + ListDeploymentGroupsResponse, ListDeploymentsRequest, ListDeploymentsResponse, ListPreviewsRequest, @@ -62,6 +79,8 @@ PropertyChange, PropertyDrift, ProviderConfig, + ProvisionDeploymentGroupOperationMetadata, + ProvisionDeploymentGroupRequest, QuotaValidation, Resource, ResourceCAIInfo, @@ -79,25 +98,39 @@ TerraformVersion, UnlockDeploymentRequest, UpdateAutoMigrationConfigRequest, + UpdateDeploymentGroupRequest, UpdateDeploymentRequest, ) __all__ = ( "ApplyResults", "AutoMigrationConfig", + "CreateDeploymentGroupRequest", "CreateDeploymentRequest", "CreatePreviewRequest", + "DeleteDeploymentGroupRequest", "DeleteDeploymentRequest", "DeletePreviewRequest", "DeleteStatefileRequest", "Deployment", + "DeploymentGroup", + "DeploymentGroupRevision", "DeploymentOperationMetadata", + "DeploymentOperationSummary", + "DeploymentSource", + "DeploymentSpec", + "DeploymentUnit", + "DeploymentUnitProgress", + "DeprovisionDeploymentGroupRequest", "ExportDeploymentStatefileRequest", "ExportLockInfoRequest", "ExportPreviewResultRequest", "ExportPreviewResultResponse", "ExportRevisionStatefileRequest", + "ExternalValueSource", "GetAutoMigrationConfigRequest", + "GetDeploymentGroupRequest", + "GetDeploymentGroupRevisionRequest", "GetDeploymentRequest", "GetPreviewRequest", "GetResourceChangeRequest", @@ -107,6 +140,10 @@ "GetTerraformVersionRequest", "GitSource", "ImportStatefileRequest", + "ListDeploymentGroupRevisionsRequest", + "ListDeploymentGroupRevisionsResponse", + "ListDeploymentGroupsRequest", + "ListDeploymentGroupsResponse", "ListDeploymentsRequest", "ListDeploymentsResponse", "ListPreviewsRequest", @@ -131,6 +168,8 @@ "PropertyChange", "PropertyDrift", "ProviderConfig", + "ProvisionDeploymentGroupOperationMetadata", + "ProvisionDeploymentGroupRequest", "Resource", "ResourceCAIInfo", "ResourceChange", @@ -147,6 +186,7 @@ "TerraformVersion", "UnlockDeploymentRequest", "UpdateAutoMigrationConfigRequest", + "UpdateDeploymentGroupRequest", "UpdateDeploymentRequest", "QuotaValidation", ) diff --git a/packages/google-cloud-config/google/cloud/config_v1/types/config.py b/packages/google-cloud-config/google/cloud/config_v1/types/config.py index 6e7b92dfcd28..c03c6b18dfac 100644 --- a/packages/google-cloud-config/google/cloud/config_v1/types/config.py +++ b/packages/google-cloud-config/google/cloud/config_v1/types/config.py @@ -30,6 +30,8 @@ "Deployment", "TerraformBlueprint", "TerraformVariable", + "ExternalValueSource", + "DeploymentSource", "ApplyResults", "TerraformOutput", "ListDeploymentsRequest", @@ -39,6 +41,9 @@ "ListRevisionsResponse", "GetRevisionRequest", "CreateDeploymentRequest", + "CreateDeploymentGroupRequest", + "UpdateDeploymentGroupRequest", + "DeleteDeploymentGroupRequest", "UpdateDeploymentRequest", "DeleteDeploymentRequest", "OperationMetadata", @@ -92,6 +97,21 @@ "GetAutoMigrationConfigRequest", "AutoMigrationConfig", "UpdateAutoMigrationConfigRequest", + "DeploymentGroup", + "DeploymentUnit", + "DeploymentSpec", + "GetDeploymentGroupRequest", + "ListDeploymentGroupsRequest", + "ListDeploymentGroupsResponse", + "ProvisionDeploymentGroupRequest", + "DeprovisionDeploymentGroupRequest", + "DeploymentOperationSummary", + "DeploymentUnitProgress", + "ProvisionDeploymentGroupOperationMetadata", + "DeploymentGroupRevision", + "GetDeploymentGroupRevisionRequest", + "ListDeploymentGroupRevisionsRequest", + "ListDeploymentGroupRevisionsResponse", }, ) @@ -492,6 +512,10 @@ class TerraformBlueprint(proto.Message): input_values (MutableMapping[str, google.cloud.config_v1.types.TerraformVariable]): Optional. Input variable values for the Terraform blueprint. + external_values (MutableMapping[str, google.cloud.config_v1.types.ExternalValueSource]): + Optional. Map of input variable names in this + blueprint to configurations for importing values + from external sources. """ gcs_source: str = proto.Field( @@ -511,6 +535,12 @@ class TerraformBlueprint(proto.Message): number=4, message="TerraformVariable", ) + external_values: MutableMapping[str, "ExternalValueSource"] = proto.MapField( + proto.STRING, + proto.MESSAGE, + number=5, + message="ExternalValueSource", + ) class TerraformVariable(proto.Message): @@ -528,6 +558,53 @@ class TerraformVariable(proto.Message): ) +class ExternalValueSource(proto.Message): + r"""Configuration for a source of an external value. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + deployment_source (google.cloud.config_v1.types.DeploymentSource): + A source from a Deployment. + + This field is a member of `oneof`_ ``source``. + """ + + deployment_source: "DeploymentSource" = proto.Field( + proto.MESSAGE, + number=1, + oneof="source", + message="DeploymentSource", + ) + + +class DeploymentSource(proto.Message): + r"""Configuration for a value sourced from a Deployment. + + Attributes: + deployment (str): + Required. The resource name of the source + Deployment to import the output from. Format: + + projects/{project}/locations/{location}/deployments/{deployment} + The source deployment must be in the same + project and location. + output_name (str): + Required. The name of the output variable in + the source deployment's latest successfully + applied revision. + """ + + deployment: str = proto.Field( + proto.STRING, + number=1, + ) + output_name: str = proto.Field( + proto.STRING, + number=2, + ) + + class ApplyResults(proto.Message): r"""Outputs and artifacts from applying a deployment. @@ -865,6 +942,205 @@ class CreateDeploymentRequest(proto.Message): ) +class CreateDeploymentGroupRequest(proto.Message): + r"""A request to create a deployment group + + Attributes: + parent (str): + Required. The parent in whose context the Deployment Group + is created. The parent value is in the format: + 'projects/{project_id}/locations/{location}' + deployment_group_id (str): + Required. The deployment group ID. + deployment_group (google.cloud.config_v1.types.DeploymentGroup): + Required. [Deployment Group][] resource to create + request_id (str): + Optional. An optional request ID to identify + requests. Specify a unique request ID so that if + you must retry your request, the server will + know to ignore the request if it has already + been completed. The server will guarantee that + for at least 60 minutes since the first request. + + For example, consider a situation where you make + an initial request and the request times out. If + you make the request again with the same request + ID, the server can check if original operation + with the same request ID was received, and if + so, will ignore the second request. This + prevents clients from accidentally creating + duplicate commitments. + + The request ID must be a valid UUID with the + exception that zero UUID is not supported + (00000000-0000-0000-0000-000000000000). + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + deployment_group_id: str = proto.Field( + proto.STRING, + number=2, + ) + deployment_group: "DeploymentGroup" = proto.Field( + proto.MESSAGE, + number=3, + message="DeploymentGroup", + ) + request_id: str = proto.Field( + proto.STRING, + number=4, + ) + + +class UpdateDeploymentGroupRequest(proto.Message): + r"""A request message for updating a deployment group + + Attributes: + update_mask (google.protobuf.field_mask_pb2.FieldMask): + Optional. Field mask used to specify the fields to be + overwritten in the Deployment Group resource by the update. + + The fields specified in the update_mask are relative to the + resource, not the full request. A field will be overwritten + if it is in the mask. If the user does not provide a mask + then all fields will be overwritten. + deployment_group (google.cloud.config_v1.types.DeploymentGroup): + Required. + [DeploymentGroup][google.cloud.config.v1.DeploymentGroup] to + update. + + The deployment group's ``name`` field is used to identify + the resource to be updated. Format: + ``projects/{project}/locations/{location}/deploymentGroups/{deployment_group_id}`` + request_id (str): + Optional. An optional request ID to identify + requests. Specify a unique request ID so that if + you must retry your request, the server will + know to ignore the request if it has already + been completed. The server will guarantee that + for at least 60 minutes since the first request. + + For example, consider a situation where you make + an initial request and the request times out. If + you make the request again with the same request + ID, the server can check if original operation + with the same request ID was received, and if + so, will ignore the second request. This + prevents clients from accidentally creating + duplicate commitments. + + The request ID must be a valid UUID with the + exception that zero UUID is not supported + (00000000-0000-0000-0000-000000000000). + """ + + update_mask: field_mask_pb2.FieldMask = proto.Field( + proto.MESSAGE, + number=1, + message=field_mask_pb2.FieldMask, + ) + deployment_group: "DeploymentGroup" = proto.Field( + proto.MESSAGE, + number=2, + message="DeploymentGroup", + ) + request_id: str = proto.Field( + proto.STRING, + number=3, + ) + + +class DeleteDeploymentGroupRequest(proto.Message): + r"""Request message for Delete DeploymentGroup + + Attributes: + name (str): + Required. The name of DeploymentGroup in the format + projects/{project_id}/locations/{location_id}/deploymentGroups/{deploymentGroup} + request_id (str): + Optional. An optional request ID to identify + requests. Specify a unique request ID so that if + you must retry your request, the server will + know to ignore the request if it has already + been completed. The server will guarantee that + for at least 60 minutes after the first request. + + For example, consider a situation where you make + an initial request and the request times out. If + you make the request again with the same request + ID, the server can check if original operation + with the same request ID was received, and if + so, will ignore the second request. This + prevents clients from accidentally creating + duplicate commitments. + + The request ID must be a valid UUID with the + exception that zero UUID is not supported + (00000000-0000-0000-0000-000000000000). + force (bool): + Optional. If set to true, any revisions for + this deployment group will also be deleted. + (Otherwise, the request will only work if the + deployment group has no revisions.) + deployment_reference_policy (google.cloud.config_v1.types.DeleteDeploymentGroupRequest.DeploymentReferencePolicy): + Optional. Policy on how to handle referenced deployments + when deleting the DeploymentGroup. If unspecified, the + default behavior is to fail the deletion if any deployments + currently referenced in the ``deployment_units`` of the + DeploymentGroup or in the latest revision are not deleted. + """ + + class DeploymentReferencePolicy(proto.Enum): + r"""Policy on how to handle referenced deployments when deleting + the DeploymentGroup. + + Values: + DEPLOYMENT_REFERENCE_POLICY_UNSPECIFIED (0): + The default behavior. If unspecified, the system will act as + if ``FAIL_IF_ANY_REFERENCES_EXIST`` is specified. + FAIL_IF_ANY_REFERENCES_EXIST (1): + Fail the deletion if any deployments currently referenced in + the ``deployment_units`` of the DeploymentGroup or in the + latest revision are not deleted. + FAIL_IF_METADATA_REFERENCES_EXIST (2): + Fail the deletion only if any deployments currently + referenced in the ``deployment_units`` of the + DeploymentGroup are not deleted. The deletion will proceed + even if the deployments in the latest revision of the + DeploymentGroup are not deleted. + IGNORE_DEPLOYMENT_REFERENCES (3): + Ignore any deployments currently referenced in the + ``deployment_units`` of the DeploymentGroup or in the latest + revision. + """ + + DEPLOYMENT_REFERENCE_POLICY_UNSPECIFIED = 0 + FAIL_IF_ANY_REFERENCES_EXIST = 1 + FAIL_IF_METADATA_REFERENCES_EXIST = 2 + IGNORE_DEPLOYMENT_REFERENCES = 3 + + name: str = proto.Field( + proto.STRING, + number=1, + ) + request_id: str = proto.Field( + proto.STRING, + number=2, + ) + force: bool = proto.Field( + proto.BOOL, + number=3, + ) + deployment_reference_policy: DeploymentReferencePolicy = proto.Field( + proto.ENUM, + number=4, + enum=DeploymentReferencePolicy, + ) + + class UpdateDeploymentRequest(proto.Message): r""" @@ -1019,6 +1295,11 @@ class OperationMetadata(proto.Message): Output only. Metadata about the preview operation state. + This field is a member of `oneof`_ ``resource_metadata``. + provision_deployment_group_metadata (google.cloud.config_v1.types.ProvisionDeploymentGroupOperationMetadata): + Output only. Metadata about + ProvisionDeploymentGroup operation state. + This field is a member of `oneof`_ ``resource_metadata``. create_time (google.protobuf.timestamp_pb2.Timestamp): Output only. Time when the operation was @@ -1060,6 +1341,14 @@ class OperationMetadata(proto.Message): oneof="resource_metadata", message="PreviewOperationMetadata", ) + provision_deployment_group_metadata: "ProvisionDeploymentGroupOperationMetadata" = ( + proto.Field( + proto.MESSAGE, + number=10, + oneof="resource_metadata", + message="ProvisionDeploymentGroupOperationMetadata", + ) + ) create_time: timestamp_pb2.Timestamp = proto.Field( proto.MESSAGE, number=1, @@ -3416,4 +3705,795 @@ class UpdateAutoMigrationConfigRequest(proto.Message): ) +class DeploymentGroup(proto.Message): + r"""A DeploymentGroup is a collection of DeploymentUnits that in + a DAG-like structure. + + Attributes: + name (str): + Identifier. The name of the deployment group. Format: + 'projects/{project_id}/locations/{location}/deploymentGroups/{deployment_group}'. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. Time when the deployment group + was created. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. Time when the deployment group + was last updated. + labels (MutableMapping[str, str]): + Optional. User-defined metadata for the + deployment group. + annotations (MutableMapping[str, str]): + Optional. Arbitrary key-value metadata + storage e.g. to help client tools identify + deployment group during automation. See + https://google.aip.dev/148#annotations for + details on format and size limitations. + state (google.cloud.config_v1.types.DeploymentGroup.State): + Output only. Current state of the deployment + group. + state_description (str): + Output only. Additional information regarding + the current state. + deployment_units (MutableSequence[google.cloud.config_v1.types.DeploymentUnit]): + The deployment units of the deployment group + in a DAG like structure. When a deployment group + is being provisioned, the deployment units are + deployed in a DAG order. + The provided units must be in a DAG order, + otherwise an error will be returned. + provisioning_state (google.cloud.config_v1.types.DeploymentGroup.ProvisioningState): + Output only. The provisioning state of the + deployment group. + provisioning_state_description (str): + Output only. Additional information regarding + the current provisioning state. + provisioning_error (google.rpc.status_pb2.Status): + Output only. The error status of the + deployment group provisioning or deprovisioning. + """ + + class State(proto.Enum): + r"""Possible states of a deployment group. + + Values: + STATE_UNSPECIFIED (0): + The default value. This value is used if the + state is omitted. + CREATING (1): + The deployment group is being created. + ACTIVE (2): + The deployment group is healthy. + UPDATING (3): + The deployment group is being updated. + DELETING (4): + The deployment group is being deleted. + FAILED (5): + The deployment group has encountered an + unexpected error. + SUSPENDED (6): + The deployment group is no longer being + actively reconciled. This may be the result of + recovering the project after deletion. + DELETED (7): + The deployment group has been deleted. + """ + + STATE_UNSPECIFIED = 0 + CREATING = 1 + ACTIVE = 2 + UPDATING = 3 + DELETING = 4 + FAILED = 5 + SUSPENDED = 6 + DELETED = 7 + + class ProvisioningState(proto.Enum): + r"""Possible provisioning states of a deployment group. + + Values: + PROVISIONING_STATE_UNSPECIFIED (0): + Unspecified provisioning state. + PROVISIONING (1): + The deployment group is being provisioned. + PROVISIONED (2): + The deployment group is provisioned. + FAILED_TO_PROVISION (3): + The deployment group failed to be + provisioned. + DEPROVISIONING (4): + The deployment group is being deprovisioned. + DEPROVISIONED (5): + The deployment group is deprovisioned. + FAILED_TO_DEPROVISION (6): + The deployment group failed to be + deprovisioned. + """ + + PROVISIONING_STATE_UNSPECIFIED = 0 + PROVISIONING = 1 + PROVISIONED = 2 + FAILED_TO_PROVISION = 3 + DEPROVISIONING = 4 + DEPROVISIONED = 5 + FAILED_TO_DEPROVISION = 6 + + name: str = proto.Field( + proto.STRING, + number=1, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=3, + message=timestamp_pb2.Timestamp, + ) + labels: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=4, + ) + annotations: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=5, + ) + state: State = proto.Field( + proto.ENUM, + number=6, + enum=State, + ) + state_description: str = proto.Field( + proto.STRING, + number=7, + ) + deployment_units: MutableSequence["DeploymentUnit"] = proto.RepeatedField( + proto.MESSAGE, + number=8, + message="DeploymentUnit", + ) + provisioning_state: ProvisioningState = proto.Field( + proto.ENUM, + number=9, + enum=ProvisioningState, + ) + provisioning_state_description: str = proto.Field( + proto.STRING, + number=10, + ) + provisioning_error: status_pb2.Status = proto.Field( + proto.MESSAGE, + number=11, + message=status_pb2.Status, + ) + + +class DeploymentUnit(proto.Message): + r"""A DeploymentUnit is a container for a deployment and its + dependencies. An existing deployment can be provided directly in the + unit, or the unit can act as a placeholder to define the DAG, with + the deployment specs supplied in a ``provisionDeploymentRequest``. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + id (str): + The id of the deployment unit. Must be unique + within the deployment group. + deployment (str): + Optional. The name of the deployment to be provisioned. + Format: + 'projects/{project_id}/locations/{location}/deployments/{deployment}'. + + This field is a member of `oneof`_ ``_deployment``. + dependencies (MutableSequence[str]): + Required. The IDs of the deployment units + within the deployment group that this unit + depends on. + """ + + id: str = proto.Field( + proto.STRING, + number=1, + ) + deployment: str = proto.Field( + proto.STRING, + number=2, + optional=True, + ) + dependencies: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + + +class DeploymentSpec(proto.Message): + r"""Spec for a deployment to be created. + + Attributes: + deployment_id (str): + Required. The id of the deployment to be + created which doesn't include the project id and + location. + deployment (google.cloud.config_v1.types.Deployment): + Required. The deployment to be created. + """ + + deployment_id: str = proto.Field( + proto.STRING, + number=1, + ) + deployment: "Deployment" = proto.Field( + proto.MESSAGE, + number=2, + message="Deployment", + ) + + +class GetDeploymentGroupRequest(proto.Message): + r"""The request message for the GetDeploymentGroup method. + + Attributes: + name (str): + Required. The name of the deployment group to retrieve. + Format: + 'projects/{project_id}/locations/{location}/deploymentGroups/{deployment_group}'. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class ListDeploymentGroupsRequest(proto.Message): + r"""The request message for the ListDeploymentGroups method. + + Attributes: + parent (str): + Required. The parent, which owns this collection of + deployment groups. Format: + 'projects/{project_id}/locations/{location}'. + page_size (int): + Optional. When requesting a page of resources, 'page_size' + specifies number of resources to return. If unspecified, at + most 500 will be returned. The maximum value is 1000. + page_token (str): + Optional. Token returned by previous call to + 'ListDeploymentGroups' which specifies the + position in the list from where to continue + listing the deployment groups. + filter (str): + Optional. Lists the DeploymentGroups that match the filter + expression. A filter expression filters the deployment + groups listed in the response. The expression must be of the + form '{field} {operator} {value}' where operators: '<', '>', + '<=', '>=', '!=', '=', ':' are supported (colon ':' + represents a HAS operator which is roughly synonymous with + equality). {field} can refer to a proto or JSON field, or a + synthetic field. Field names can be camelCase or snake_case. + + Examples: + + - Filter by name: name = + "projects/foo/locations/us-central1/deploymentGroups/bar" + + - Filter by labels: + + - Resources that have a key called 'foo' labels.foo:\* + - Resources that have a key called 'foo' whose value is + 'bar' labels.foo = bar + + - Filter by state: + + - DeploymentGroups in CREATING state. state=CREATING + order_by (str): + Optional. Field to use to sort the list. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + filter: str = proto.Field( + proto.STRING, + number=4, + ) + order_by: str = proto.Field( + proto.STRING, + number=5, + ) + + +class ListDeploymentGroupsResponse(proto.Message): + r"""The response message for the ListDeploymentGroups method. + + Attributes: + deployment_groups (MutableSequence[google.cloud.config_v1.types.DeploymentGroup]): + The deployment groups from the specified + collection. + next_page_token (str): + Token to be supplied to the next ListDeploymentGroups + request via ``page_token`` to obtain the next set of + results. + unreachable (MutableSequence[str]): + Locations that could not be reached. + """ + + @property + def raw_page(self): + return self + + deployment_groups: MutableSequence["DeploymentGroup"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="DeploymentGroup", + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + unreachable: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + + +class ProvisionDeploymentGroupRequest(proto.Message): + r"""The request message for the ProvisionDeploymentGroup method. + + Attributes: + name (str): + Required. The name of the deployment group to provision. + Format: + 'projects/{project_id}/locations/{location}/deploymentGroups/{deployment_group}'. + deployment_specs (MutableMapping[str, google.cloud.config_v1.types.DeploymentSpec]): + Optional. The deployment specs of the deployment units to be + created within the same project and location of the + deployment group. The key is the unit ID, and the value is + the ``DeploymentSpec``. Provisioning will fail if a + ``deployment_spec`` has a ``deployment_id`` that matches an + existing deployment in the same project and location. If an + existing deployment was part of the last successful revision + but is no longer in the current DeploymentGroup's + ``deployment_units``, it will be recreated if included in + ``deployment_specs``. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + deployment_specs: MutableMapping[str, "DeploymentSpec"] = proto.MapField( + proto.STRING, + proto.MESSAGE, + number=3, + message="DeploymentSpec", + ) + + +class DeprovisionDeploymentGroupRequest(proto.Message): + r"""The request message for the DeprovisionDeploymentGroup + method. + + Attributes: + name (str): + Required. The name of the deployment group to deprovision. + Format: + 'projects/{project_id}/locations/{location}/deploymentGroups/{deployment_group}'. + force (bool): + Optional. If set to true, this option is + propagated to the deletion of each deployment in + the group. This corresponds to the 'force' field + in DeleteDeploymentRequest. + delete_policy (google.cloud.config_v1.types.DeleteDeploymentRequest.DeletePolicy): + Optional. Policy on how resources within each deployment + should be handled during deletion. This policy is applied + globally to the deletion of all deployments in this group. + This corresponds to the 'delete_policy' field in + DeleteDeploymentRequest. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + force: bool = proto.Field( + proto.BOOL, + number=2, + ) + delete_policy: "DeleteDeploymentRequest.DeletePolicy" = proto.Field( + proto.ENUM, + number=3, + enum="DeleteDeploymentRequest.DeletePolicy", + ) + + +class DeploymentOperationSummary(proto.Message): + r"""The summary of the deployment operation. + + Attributes: + deployment_step (google.cloud.config_v1.types.DeploymentOperationMetadata.DeploymentStep): + Output only. The current step the deployment + operation is running. + build (str): + Output only. Cloud Build instance UUID + associated with this operation. + logs (str): + Output only. Location of Deployment operations logs in + ``gs://{bucket}/{object}`` format. + content (str): + Output only. Location of Deployment operations content in + ``gs://{bucket}/{object}`` format. + artifacts (str): + Output only. Location of Deployment operations artifacts in + ``gs://{bucket}/{object}`` format. + """ + + deployment_step: "DeploymentOperationMetadata.DeploymentStep" = proto.Field( + proto.ENUM, + number=1, + enum="DeploymentOperationMetadata.DeploymentStep", + ) + build: str = proto.Field( + proto.STRING, + number=2, + ) + logs: str = proto.Field( + proto.STRING, + number=3, + ) + content: str = proto.Field( + proto.STRING, + number=4, + ) + artifacts: str = proto.Field( + proto.STRING, + number=5, + ) + + +class DeploymentUnitProgress(proto.Message): + r"""The progress of a deployment unit provisioning or + deprovisioning. + + Attributes: + unit_id (str): + Output only. The unit id of the deployment + unit to be provisioned. + deployment (str): + Output only. The name of the deployment to be + provisioned. Format: + + 'projects/{project}/locations/{location}/deployments/{deployment}'. + state (google.cloud.config_v1.types.DeploymentUnitProgress.State): + Output only. The current step of the + deployment unit provisioning. + state_description (str): + Output only. Additional information regarding + the current state. + deployment_operation_summary (google.cloud.config_v1.types.DeploymentOperationSummary): + Output only. The summary of the deployment + operation. + error (google.rpc.status_pb2.Status): + Output only. Holds the error status of the + deployment unit provisioning. + intent (google.cloud.config_v1.types.DeploymentUnitProgress.Intent): + Output only. The intent of the deployment + unit. + """ + + class State(proto.Enum): + r"""The possible steps a deployment unit provisioning may be + running. + + Values: + STATE_UNSPECIFIED (0): + The default value. This value is unused. + QUEUED (1): + The deployment unit is queued for deployment + creation or update. + APPLYING_DEPLOYMENT (2): + The underlying deployment of the unit is + being created or updated. + SUCCEEDED (4): + The underlying deployment operation of the + unit has succeeded. + FAILED (5): + The underlying deployment operation of the + unit has failed. + ABORTED (6): + The deployment unit was aborted, likely due + to failures in other dependent deployment units. + SKIPPED (7): + The deployment unit was skipped because there + were no changes to apply. + DELETING_DEPLOYMENT (8): + The deployment is being deleted. + PREVIEWING_DEPLOYMENT (9): + The deployment is being previewed. + """ + + STATE_UNSPECIFIED = 0 + QUEUED = 1 + APPLYING_DEPLOYMENT = 2 + SUCCEEDED = 4 + FAILED = 5 + ABORTED = 6 + SKIPPED = 7 + DELETING_DEPLOYMENT = 8 + PREVIEWING_DEPLOYMENT = 9 + + class Intent(proto.Enum): + r"""The possible intents of a deployment unit. + + Values: + INTENT_UNSPECIFIED (0): + Unspecified intent. + CREATE_DEPLOYMENT (1): + Create deployment in the unit from the + deployment spec. + UPDATE_DEPLOYMENT (2): + Update deployment in the unit. + DELETE_DEPLOYMENT (3): + Delete deployment in the unit. + RECREATE_DEPLOYMENT (4): + Recreate deployment in the unit. + CLEAN_UP (5): + Delete deployment in latest successful + revision while no longer referenced in any + deployment unit in the current deployment group. + UNCHANGED (6): + Expected to be unchanged. + """ + + INTENT_UNSPECIFIED = 0 + CREATE_DEPLOYMENT = 1 + UPDATE_DEPLOYMENT = 2 + DELETE_DEPLOYMENT = 3 + RECREATE_DEPLOYMENT = 4 + CLEAN_UP = 5 + UNCHANGED = 6 + + unit_id: str = proto.Field( + proto.STRING, + number=1, + ) + deployment: str = proto.Field( + proto.STRING, + number=2, + ) + state: State = proto.Field( + proto.ENUM, + number=3, + enum=State, + ) + state_description: str = proto.Field( + proto.STRING, + number=4, + ) + deployment_operation_summary: "DeploymentOperationSummary" = proto.Field( + proto.MESSAGE, + number=5, + message="DeploymentOperationSummary", + ) + error: status_pb2.Status = proto.Field( + proto.MESSAGE, + number=6, + message=status_pb2.Status, + ) + intent: Intent = proto.Field( + proto.ENUM, + number=7, + enum=Intent, + ) + + +class ProvisionDeploymentGroupOperationMetadata(proto.Message): + r"""Operation metadata for ``ProvisionDeploymentGroup`` and + ``DeprovisionDeploymentGroup`` long-running operations. + + Attributes: + step (google.cloud.config_v1.types.ProvisionDeploymentGroupOperationMetadata.ProvisionDeploymentGroupStep): + Output only. The current step of the + deployment group operation. + deployment_unit_progresses (MutableSequence[google.cloud.config_v1.types.DeploymentUnitProgress]): + Output only. Progress information for each + deployment unit within the operation. + """ + + class ProvisionDeploymentGroupStep(proto.Enum): + r"""Possible steps during a deployment group provisioning or + deprovisioning operation. + + Values: + PROVISION_DEPLOYMENT_GROUP_STEP_UNSPECIFIED (0): + Unspecified step. + VALIDATING_DEPLOYMENT_GROUP (1): + Validating the deployment group. + ASSOCIATING_DEPLOYMENTS_TO_DEPLOYMENT_GROUP (2): + Locking the deployments to the deployment + group for atomic actuation. + PROVISIONING_DEPLOYMENT_UNITS (3): + Provisioning the deployment units. + DISASSOCIATING_DEPLOYMENTS_FROM_DEPLOYMENT_GROUP (4): + Unlocking the deployments from the deployment + group after actuation. + SUCCEEDED (5): + The operation has succeeded. + FAILED (6): + The operation has failed. + DEPROVISIONING_DEPLOYMENT_UNITS (7): + Deprovisioning the deployment units. + """ + + PROVISION_DEPLOYMENT_GROUP_STEP_UNSPECIFIED = 0 + VALIDATING_DEPLOYMENT_GROUP = 1 + ASSOCIATING_DEPLOYMENTS_TO_DEPLOYMENT_GROUP = 2 + PROVISIONING_DEPLOYMENT_UNITS = 3 + DISASSOCIATING_DEPLOYMENTS_FROM_DEPLOYMENT_GROUP = 4 + SUCCEEDED = 5 + FAILED = 6 + DEPROVISIONING_DEPLOYMENT_UNITS = 7 + + step: ProvisionDeploymentGroupStep = proto.Field( + proto.ENUM, + number=1, + enum=ProvisionDeploymentGroupStep, + ) + deployment_unit_progresses: MutableSequence["DeploymentUnitProgress"] = ( + proto.RepeatedField( + proto.MESSAGE, + number=2, + message="DeploymentUnitProgress", + ) + ) + + +class DeploymentGroupRevision(proto.Message): + r"""A DeploymentGroupRevision represents a snapshot of a + [DeploymentGroup][google.cloud.config.v1.DeploymentGroup] at a given + point in time, created when a DeploymentGroup is provisioned or + deprovisioned. + + Attributes: + name (str): + Identifier. The name of the deployment group revision. + Format: + 'projects/{project_id}/locations/{location}/deploymentGroups/{deployment_group}/revisions/{revision}'. + snapshot (google.cloud.config_v1.types.DeploymentGroup): + Output only. The snapshot of the deployment + group at this revision. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. Time when the deployment group + revision was created. + alternative_ids (MutableSequence[str]): + Output only. The alternative IDs of the + deployment group revision. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + snapshot: "DeploymentGroup" = proto.Field( + proto.MESSAGE, + number=2, + message="DeploymentGroup", + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=3, + message=timestamp_pb2.Timestamp, + ) + alternative_ids: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=4, + ) + + +class GetDeploymentGroupRevisionRequest(proto.Message): + r"""The request message for the GetDeploymentGroupRevision + method. + + Attributes: + name (str): + Required. The name of the deployment group revision to + retrieve. Format: + 'projects/{project_id}/locations/{location}/deploymentGroups/{deployment_group}/revisions/{revision}'. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class ListDeploymentGroupRevisionsRequest(proto.Message): + r"""The request message for the ListDeploymentGroupRevisions + method. + + Attributes: + parent (str): + Required. The parent, which owns this collection of + deployment group revisions. Format: + 'projects/{project_id}/locations/{location}/deploymentGroups/{deployment_group}'. + page_size (int): + Optional. When requesting a page of resources, 'page_size' + specifies number of resources to return. If unspecified, a + sensible default will be used by the server. The maximum + value is 1000; values above 1000 will be coerced to 1000. + page_token (str): + Optional. Token returned by previous call to + 'ListDeploymentGroupRevisions' which specifies the position + in the list from where to continue listing the deployment + group revisions. All other parameters provided to + ``ListDeploymentGroupRevisions`` must match the call that + provided the page token. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + + +class ListDeploymentGroupRevisionsResponse(proto.Message): + r"""The response message for the ListDeploymentGroupRevisions + method. + + Attributes: + deployment_group_revisions (MutableSequence[google.cloud.config_v1.types.DeploymentGroupRevision]): + The deployment group revisions from the + specified collection. + next_page_token (str): + Token to be supplied to the next + ListDeploymentGroupRevisions request via ``page_token`` to + obtain the next set of results. + unreachable (MutableSequence[str]): + Unordered list. Locations that could not be + reached. + """ + + @property + def raw_page(self): + return self + + deployment_group_revisions: MutableSequence["DeploymentGroupRevision"] = ( + proto.RepeatedField( + proto.MESSAGE, + number=1, + message="DeploymentGroupRevision", + ) + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + unreachable: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + + __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-config/samples/generated_samples/config_v1_generated_config_create_deployment_group_async.py b/packages/google-cloud-config/samples/generated_samples/config_v1_generated_config_create_deployment_group_async.py new file mode 100644 index 000000000000..09a4a65bcb99 --- /dev/null +++ b/packages/google-cloud-config/samples/generated_samples/config_v1_generated_config_create_deployment_group_async.py @@ -0,0 +1,58 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateDeploymentGroup +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-config + + +# [START config_v1_generated_Config_CreateDeploymentGroup_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import config_v1 + + +async def sample_create_deployment_group(): + # Create a client + client = config_v1.ConfigAsyncClient() + + # Initialize request argument(s) + request = config_v1.CreateDeploymentGroupRequest( + parent="parent_value", + deployment_group_id="deployment_group_id_value", + ) + + # Make the request + operation = client.create_deployment_group(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + +# [END config_v1_generated_Config_CreateDeploymentGroup_async] diff --git a/packages/google-cloud-config/samples/generated_samples/config_v1_generated_config_create_deployment_group_sync.py b/packages/google-cloud-config/samples/generated_samples/config_v1_generated_config_create_deployment_group_sync.py new file mode 100644 index 000000000000..4c14fcf32a50 --- /dev/null +++ b/packages/google-cloud-config/samples/generated_samples/config_v1_generated_config_create_deployment_group_sync.py @@ -0,0 +1,58 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateDeploymentGroup +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-config + + +# [START config_v1_generated_Config_CreateDeploymentGroup_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import config_v1 + + +def sample_create_deployment_group(): + # Create a client + client = config_v1.ConfigClient() + + # Initialize request argument(s) + request = config_v1.CreateDeploymentGroupRequest( + parent="parent_value", + deployment_group_id="deployment_group_id_value", + ) + + # Make the request + operation = client.create_deployment_group(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + +# [END config_v1_generated_Config_CreateDeploymentGroup_sync] diff --git a/packages/google-cloud-config/samples/generated_samples/config_v1_generated_config_delete_deployment_group_async.py b/packages/google-cloud-config/samples/generated_samples/config_v1_generated_config_delete_deployment_group_async.py new file mode 100644 index 000000000000..a6eee7ec2c07 --- /dev/null +++ b/packages/google-cloud-config/samples/generated_samples/config_v1_generated_config_delete_deployment_group_async.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteDeploymentGroup +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-config + + +# [START config_v1_generated_Config_DeleteDeploymentGroup_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import config_v1 + + +async def sample_delete_deployment_group(): + # Create a client + client = config_v1.ConfigAsyncClient() + + # Initialize request argument(s) + request = config_v1.DeleteDeploymentGroupRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_deployment_group(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + +# [END config_v1_generated_Config_DeleteDeploymentGroup_async] diff --git a/packages/google-cloud-config/samples/generated_samples/config_v1_generated_config_delete_deployment_group_sync.py b/packages/google-cloud-config/samples/generated_samples/config_v1_generated_config_delete_deployment_group_sync.py new file mode 100644 index 000000000000..287e6f377067 --- /dev/null +++ b/packages/google-cloud-config/samples/generated_samples/config_v1_generated_config_delete_deployment_group_sync.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteDeploymentGroup +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-config + + +# [START config_v1_generated_Config_DeleteDeploymentGroup_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import config_v1 + + +def sample_delete_deployment_group(): + # Create a client + client = config_v1.ConfigClient() + + # Initialize request argument(s) + request = config_v1.DeleteDeploymentGroupRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_deployment_group(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + +# [END config_v1_generated_Config_DeleteDeploymentGroup_sync] diff --git a/packages/google-cloud-config/samples/generated_samples/config_v1_generated_config_deprovision_deployment_group_async.py b/packages/google-cloud-config/samples/generated_samples/config_v1_generated_config_deprovision_deployment_group_async.py new file mode 100644 index 000000000000..66c61f13ba6b --- /dev/null +++ b/packages/google-cloud-config/samples/generated_samples/config_v1_generated_config_deprovision_deployment_group_async.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeprovisionDeploymentGroup +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-config + + +# [START config_v1_generated_Config_DeprovisionDeploymentGroup_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import config_v1 + + +async def sample_deprovision_deployment_group(): + # Create a client + client = config_v1.ConfigAsyncClient() + + # Initialize request argument(s) + request = config_v1.DeprovisionDeploymentGroupRequest( + name="name_value", + ) + + # Make the request + operation = client.deprovision_deployment_group(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + +# [END config_v1_generated_Config_DeprovisionDeploymentGroup_async] diff --git a/packages/google-cloud-config/samples/generated_samples/config_v1_generated_config_deprovision_deployment_group_sync.py b/packages/google-cloud-config/samples/generated_samples/config_v1_generated_config_deprovision_deployment_group_sync.py new file mode 100644 index 000000000000..74906d772d31 --- /dev/null +++ b/packages/google-cloud-config/samples/generated_samples/config_v1_generated_config_deprovision_deployment_group_sync.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeprovisionDeploymentGroup +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-config + + +# [START config_v1_generated_Config_DeprovisionDeploymentGroup_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import config_v1 + + +def sample_deprovision_deployment_group(): + # Create a client + client = config_v1.ConfigClient() + + # Initialize request argument(s) + request = config_v1.DeprovisionDeploymentGroupRequest( + name="name_value", + ) + + # Make the request + operation = client.deprovision_deployment_group(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + +# [END config_v1_generated_Config_DeprovisionDeploymentGroup_sync] diff --git a/packages/google-cloud-config/samples/generated_samples/config_v1_generated_config_get_deployment_group_async.py b/packages/google-cloud-config/samples/generated_samples/config_v1_generated_config_get_deployment_group_async.py new file mode 100644 index 000000000000..5a7394a7eb72 --- /dev/null +++ b/packages/google-cloud-config/samples/generated_samples/config_v1_generated_config_get_deployment_group_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetDeploymentGroup +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-config + + +# [START config_v1_generated_Config_GetDeploymentGroup_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import config_v1 + + +async def sample_get_deployment_group(): + # Create a client + client = config_v1.ConfigAsyncClient() + + # Initialize request argument(s) + request = config_v1.GetDeploymentGroupRequest( + name="name_value", + ) + + # Make the request + response = await client.get_deployment_group(request=request) + + # Handle the response + print(response) + + +# [END config_v1_generated_Config_GetDeploymentGroup_async] diff --git a/packages/google-cloud-config/samples/generated_samples/config_v1_generated_config_get_deployment_group_revision_async.py b/packages/google-cloud-config/samples/generated_samples/config_v1_generated_config_get_deployment_group_revision_async.py new file mode 100644 index 000000000000..d77c5250835e --- /dev/null +++ b/packages/google-cloud-config/samples/generated_samples/config_v1_generated_config_get_deployment_group_revision_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetDeploymentGroupRevision +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-config + + +# [START config_v1_generated_Config_GetDeploymentGroupRevision_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import config_v1 + + +async def sample_get_deployment_group_revision(): + # Create a client + client = config_v1.ConfigAsyncClient() + + # Initialize request argument(s) + request = config_v1.GetDeploymentGroupRevisionRequest( + name="name_value", + ) + + # Make the request + response = await client.get_deployment_group_revision(request=request) + + # Handle the response + print(response) + + +# [END config_v1_generated_Config_GetDeploymentGroupRevision_async] diff --git a/packages/google-cloud-config/samples/generated_samples/config_v1_generated_config_get_deployment_group_revision_sync.py b/packages/google-cloud-config/samples/generated_samples/config_v1_generated_config_get_deployment_group_revision_sync.py new file mode 100644 index 000000000000..383f6d708d6d --- /dev/null +++ b/packages/google-cloud-config/samples/generated_samples/config_v1_generated_config_get_deployment_group_revision_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetDeploymentGroupRevision +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-config + + +# [START config_v1_generated_Config_GetDeploymentGroupRevision_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import config_v1 + + +def sample_get_deployment_group_revision(): + # Create a client + client = config_v1.ConfigClient() + + # Initialize request argument(s) + request = config_v1.GetDeploymentGroupRevisionRequest( + name="name_value", + ) + + # Make the request + response = client.get_deployment_group_revision(request=request) + + # Handle the response + print(response) + + +# [END config_v1_generated_Config_GetDeploymentGroupRevision_sync] diff --git a/packages/google-cloud-config/samples/generated_samples/config_v1_generated_config_get_deployment_group_sync.py b/packages/google-cloud-config/samples/generated_samples/config_v1_generated_config_get_deployment_group_sync.py new file mode 100644 index 000000000000..19b6c4c98f31 --- /dev/null +++ b/packages/google-cloud-config/samples/generated_samples/config_v1_generated_config_get_deployment_group_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetDeploymentGroup +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-config + + +# [START config_v1_generated_Config_GetDeploymentGroup_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import config_v1 + + +def sample_get_deployment_group(): + # Create a client + client = config_v1.ConfigClient() + + # Initialize request argument(s) + request = config_v1.GetDeploymentGroupRequest( + name="name_value", + ) + + # Make the request + response = client.get_deployment_group(request=request) + + # Handle the response + print(response) + + +# [END config_v1_generated_Config_GetDeploymentGroup_sync] diff --git a/packages/google-cloud-config/samples/generated_samples/config_v1_generated_config_list_deployment_group_revisions_async.py b/packages/google-cloud-config/samples/generated_samples/config_v1_generated_config_list_deployment_group_revisions_async.py new file mode 100644 index 000000000000..c69c853504dc --- /dev/null +++ b/packages/google-cloud-config/samples/generated_samples/config_v1_generated_config_list_deployment_group_revisions_async.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListDeploymentGroupRevisions +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-config + + +# [START config_v1_generated_Config_ListDeploymentGroupRevisions_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import config_v1 + + +async def sample_list_deployment_group_revisions(): + # Create a client + client = config_v1.ConfigAsyncClient() + + # Initialize request argument(s) + request = config_v1.ListDeploymentGroupRevisionsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_deployment_group_revisions(request=request) + + # Handle the response + async for response in page_result: + print(response) + + +# [END config_v1_generated_Config_ListDeploymentGroupRevisions_async] diff --git a/packages/google-cloud-config/samples/generated_samples/config_v1_generated_config_list_deployment_group_revisions_sync.py b/packages/google-cloud-config/samples/generated_samples/config_v1_generated_config_list_deployment_group_revisions_sync.py new file mode 100644 index 000000000000..d0343a56d6be --- /dev/null +++ b/packages/google-cloud-config/samples/generated_samples/config_v1_generated_config_list_deployment_group_revisions_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListDeploymentGroupRevisions +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-config + + +# [START config_v1_generated_Config_ListDeploymentGroupRevisions_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import config_v1 + + +def sample_list_deployment_group_revisions(): + # Create a client + client = config_v1.ConfigClient() + + # Initialize request argument(s) + request = config_v1.ListDeploymentGroupRevisionsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_deployment_group_revisions(request=request) + + # Handle the response + for response in page_result: + print(response) + + +# [END config_v1_generated_Config_ListDeploymentGroupRevisions_sync] diff --git a/packages/google-cloud-config/samples/generated_samples/config_v1_generated_config_list_deployment_groups_async.py b/packages/google-cloud-config/samples/generated_samples/config_v1_generated_config_list_deployment_groups_async.py new file mode 100644 index 000000000000..9b390df027fc --- /dev/null +++ b/packages/google-cloud-config/samples/generated_samples/config_v1_generated_config_list_deployment_groups_async.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListDeploymentGroups +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-config + + +# [START config_v1_generated_Config_ListDeploymentGroups_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import config_v1 + + +async def sample_list_deployment_groups(): + # Create a client + client = config_v1.ConfigAsyncClient() + + # Initialize request argument(s) + request = config_v1.ListDeploymentGroupsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_deployment_groups(request=request) + + # Handle the response + async for response in page_result: + print(response) + + +# [END config_v1_generated_Config_ListDeploymentGroups_async] diff --git a/packages/google-cloud-config/samples/generated_samples/config_v1_generated_config_list_deployment_groups_sync.py b/packages/google-cloud-config/samples/generated_samples/config_v1_generated_config_list_deployment_groups_sync.py new file mode 100644 index 000000000000..e0b2901bc6af --- /dev/null +++ b/packages/google-cloud-config/samples/generated_samples/config_v1_generated_config_list_deployment_groups_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListDeploymentGroups +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-config + + +# [START config_v1_generated_Config_ListDeploymentGroups_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import config_v1 + + +def sample_list_deployment_groups(): + # Create a client + client = config_v1.ConfigClient() + + # Initialize request argument(s) + request = config_v1.ListDeploymentGroupsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_deployment_groups(request=request) + + # Handle the response + for response in page_result: + print(response) + + +# [END config_v1_generated_Config_ListDeploymentGroups_sync] diff --git a/packages/google-cloud-config/samples/generated_samples/config_v1_generated_config_provision_deployment_group_async.py b/packages/google-cloud-config/samples/generated_samples/config_v1_generated_config_provision_deployment_group_async.py new file mode 100644 index 000000000000..7fe96855bf13 --- /dev/null +++ b/packages/google-cloud-config/samples/generated_samples/config_v1_generated_config_provision_deployment_group_async.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ProvisionDeploymentGroup +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-config + + +# [START config_v1_generated_Config_ProvisionDeploymentGroup_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import config_v1 + + +async def sample_provision_deployment_group(): + # Create a client + client = config_v1.ConfigAsyncClient() + + # Initialize request argument(s) + request = config_v1.ProvisionDeploymentGroupRequest( + name="name_value", + ) + + # Make the request + operation = client.provision_deployment_group(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + +# [END config_v1_generated_Config_ProvisionDeploymentGroup_async] diff --git a/packages/google-cloud-config/samples/generated_samples/config_v1_generated_config_provision_deployment_group_sync.py b/packages/google-cloud-config/samples/generated_samples/config_v1_generated_config_provision_deployment_group_sync.py new file mode 100644 index 000000000000..02758b9f0037 --- /dev/null +++ b/packages/google-cloud-config/samples/generated_samples/config_v1_generated_config_provision_deployment_group_sync.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ProvisionDeploymentGroup +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-config + + +# [START config_v1_generated_Config_ProvisionDeploymentGroup_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import config_v1 + + +def sample_provision_deployment_group(): + # Create a client + client = config_v1.ConfigClient() + + # Initialize request argument(s) + request = config_v1.ProvisionDeploymentGroupRequest( + name="name_value", + ) + + # Make the request + operation = client.provision_deployment_group(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + +# [END config_v1_generated_Config_ProvisionDeploymentGroup_sync] diff --git a/packages/google-cloud-config/samples/generated_samples/config_v1_generated_config_update_deployment_group_async.py b/packages/google-cloud-config/samples/generated_samples/config_v1_generated_config_update_deployment_group_async.py new file mode 100644 index 000000000000..02fff367a67c --- /dev/null +++ b/packages/google-cloud-config/samples/generated_samples/config_v1_generated_config_update_deployment_group_async.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateDeploymentGroup +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-config + + +# [START config_v1_generated_Config_UpdateDeploymentGroup_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import config_v1 + + +async def sample_update_deployment_group(): + # Create a client + client = config_v1.ConfigAsyncClient() + + # Initialize request argument(s) + request = config_v1.UpdateDeploymentGroupRequest() + + # Make the request + operation = client.update_deployment_group(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + +# [END config_v1_generated_Config_UpdateDeploymentGroup_async] diff --git a/packages/google-cloud-config/samples/generated_samples/config_v1_generated_config_update_deployment_group_sync.py b/packages/google-cloud-config/samples/generated_samples/config_v1_generated_config_update_deployment_group_sync.py new file mode 100644 index 000000000000..61fe63a760be --- /dev/null +++ b/packages/google-cloud-config/samples/generated_samples/config_v1_generated_config_update_deployment_group_sync.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateDeploymentGroup +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-config + + +# [START config_v1_generated_Config_UpdateDeploymentGroup_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import config_v1 + + +def sample_update_deployment_group(): + # Create a client + client = config_v1.ConfigClient() + + # Initialize request argument(s) + request = config_v1.UpdateDeploymentGroupRequest() + + # Make the request + operation = client.update_deployment_group(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + +# [END config_v1_generated_Config_UpdateDeploymentGroup_sync] diff --git a/packages/google-cloud-config/samples/generated_samples/snippet_metadata_google.cloud.config.v1.json b/packages/google-cloud-config/samples/generated_samples/snippet_metadata_google.cloud.config.v1.json index ff7c1b9c2acd..0d1e734fadc6 100644 --- a/packages/google-cloud-config/samples/generated_samples/snippet_metadata_google.cloud.config.v1.json +++ b/packages/google-cloud-config/samples/generated_samples/snippet_metadata_google.cloud.config.v1.json @@ -11,6 +11,183 @@ "version": "0.5.0" }, "snippets": [ + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.config_v1.ConfigAsyncClient", + "shortName": "ConfigAsyncClient" + }, + "fullName": "google.cloud.config_v1.ConfigAsyncClient.create_deployment_group", + "method": { + "fullName": "google.cloud.config.v1.Config.CreateDeploymentGroup", + "service": { + "fullName": "google.cloud.config.v1.Config", + "shortName": "Config" + }, + "shortName": "CreateDeploymentGroup" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.config_v1.types.CreateDeploymentGroupRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "deployment_group", + "type": "google.cloud.config_v1.types.DeploymentGroup" + }, + { + "name": "deployment_group_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "create_deployment_group" + }, + "description": "Sample for CreateDeploymentGroup", + "file": "config_v1_generated_config_create_deployment_group_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "config_v1_generated_Config_CreateDeploymentGroup_async", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "config_v1_generated_config_create_deployment_group_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.config_v1.ConfigClient", + "shortName": "ConfigClient" + }, + "fullName": "google.cloud.config_v1.ConfigClient.create_deployment_group", + "method": { + "fullName": "google.cloud.config.v1.Config.CreateDeploymentGroup", + "service": { + "fullName": "google.cloud.config.v1.Config", + "shortName": "Config" + }, + "shortName": "CreateDeploymentGroup" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.config_v1.types.CreateDeploymentGroupRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "deployment_group", + "type": "google.cloud.config_v1.types.DeploymentGroup" + }, + { + "name": "deployment_group_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "create_deployment_group" + }, + "description": "Sample for CreateDeploymentGroup", + "file": "config_v1_generated_config_create_deployment_group_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "config_v1_generated_Config_CreateDeploymentGroup_sync", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "config_v1_generated_config_create_deployment_group_sync.py" + }, { "canonical": true, "clientMethod": { @@ -365,19 +542,19 @@ "fullName": "google.cloud.config_v1.ConfigAsyncClient", "shortName": "ConfigAsyncClient" }, - "fullName": "google.cloud.config_v1.ConfigAsyncClient.delete_deployment", + "fullName": "google.cloud.config_v1.ConfigAsyncClient.delete_deployment_group", "method": { - "fullName": "google.cloud.config.v1.Config.DeleteDeployment", + "fullName": "google.cloud.config.v1.Config.DeleteDeploymentGroup", "service": { "fullName": "google.cloud.config.v1.Config", "shortName": "Config" }, - "shortName": "DeleteDeployment" + "shortName": "DeleteDeploymentGroup" }, "parameters": [ { "name": "request", - "type": "google.cloud.config_v1.types.DeleteDeploymentRequest" + "type": "google.cloud.config_v1.types.DeleteDeploymentGroupRequest" }, { "name": "name", @@ -397,13 +574,13 @@ } ], "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "delete_deployment" + "shortName": "delete_deployment_group" }, - "description": "Sample for DeleteDeployment", - "file": "config_v1_generated_config_delete_deployment_async.py", + "description": "Sample for DeleteDeploymentGroup", + "file": "config_v1_generated_config_delete_deployment_group_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "config_v1_generated_Config_DeleteDeployment_async", + "regionTag": "config_v1_generated_Config_DeleteDeploymentGroup_async", "segments": [ { "end": 55, @@ -436,7 +613,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "config_v1_generated_config_delete_deployment_async.py" + "title": "config_v1_generated_config_delete_deployment_group_async.py" }, { "canonical": true, @@ -445,19 +622,19 @@ "fullName": "google.cloud.config_v1.ConfigClient", "shortName": "ConfigClient" }, - "fullName": "google.cloud.config_v1.ConfigClient.delete_deployment", + "fullName": "google.cloud.config_v1.ConfigClient.delete_deployment_group", "method": { - "fullName": "google.cloud.config.v1.Config.DeleteDeployment", + "fullName": "google.cloud.config.v1.Config.DeleteDeploymentGroup", "service": { "fullName": "google.cloud.config.v1.Config", "shortName": "Config" }, - "shortName": "DeleteDeployment" + "shortName": "DeleteDeploymentGroup" }, "parameters": [ { "name": "request", - "type": "google.cloud.config_v1.types.DeleteDeploymentRequest" + "type": "google.cloud.config_v1.types.DeleteDeploymentGroupRequest" }, { "name": "name", @@ -477,13 +654,13 @@ } ], "resultType": "google.api_core.operation.Operation", - "shortName": "delete_deployment" + "shortName": "delete_deployment_group" }, - "description": "Sample for DeleteDeployment", - "file": "config_v1_generated_config_delete_deployment_sync.py", + "description": "Sample for DeleteDeploymentGroup", + "file": "config_v1_generated_config_delete_deployment_group_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "config_v1_generated_Config_DeleteDeployment_sync", + "regionTag": "config_v1_generated_Config_DeleteDeploymentGroup_sync", "segments": [ { "end": 55, @@ -516,7 +693,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "config_v1_generated_config_delete_deployment_sync.py" + "title": "config_v1_generated_config_delete_deployment_group_sync.py" }, { "canonical": true, @@ -526,19 +703,19 @@ "fullName": "google.cloud.config_v1.ConfigAsyncClient", "shortName": "ConfigAsyncClient" }, - "fullName": "google.cloud.config_v1.ConfigAsyncClient.delete_preview", + "fullName": "google.cloud.config_v1.ConfigAsyncClient.delete_deployment", "method": { - "fullName": "google.cloud.config.v1.Config.DeletePreview", + "fullName": "google.cloud.config.v1.Config.DeleteDeployment", "service": { "fullName": "google.cloud.config.v1.Config", "shortName": "Config" }, - "shortName": "DeletePreview" + "shortName": "DeleteDeployment" }, "parameters": [ { "name": "request", - "type": "google.cloud.config_v1.types.DeletePreviewRequest" + "type": "google.cloud.config_v1.types.DeleteDeploymentRequest" }, { "name": "name", @@ -558,13 +735,13 @@ } ], "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "delete_preview" + "shortName": "delete_deployment" }, - "description": "Sample for DeletePreview", - "file": "config_v1_generated_config_delete_preview_async.py", + "description": "Sample for DeleteDeployment", + "file": "config_v1_generated_config_delete_deployment_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "config_v1_generated_Config_DeletePreview_async", + "regionTag": "config_v1_generated_Config_DeleteDeployment_async", "segments": [ { "end": 55, @@ -597,7 +774,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "config_v1_generated_config_delete_preview_async.py" + "title": "config_v1_generated_config_delete_deployment_async.py" }, { "canonical": true, @@ -606,19 +783,19 @@ "fullName": "google.cloud.config_v1.ConfigClient", "shortName": "ConfigClient" }, - "fullName": "google.cloud.config_v1.ConfigClient.delete_preview", + "fullName": "google.cloud.config_v1.ConfigClient.delete_deployment", "method": { - "fullName": "google.cloud.config.v1.Config.DeletePreview", + "fullName": "google.cloud.config.v1.Config.DeleteDeployment", "service": { "fullName": "google.cloud.config.v1.Config", "shortName": "Config" }, - "shortName": "DeletePreview" + "shortName": "DeleteDeployment" }, "parameters": [ { "name": "request", - "type": "google.cloud.config_v1.types.DeletePreviewRequest" + "type": "google.cloud.config_v1.types.DeleteDeploymentRequest" }, { "name": "name", @@ -638,13 +815,13 @@ } ], "resultType": "google.api_core.operation.Operation", - "shortName": "delete_preview" + "shortName": "delete_deployment" }, - "description": "Sample for DeletePreview", - "file": "config_v1_generated_config_delete_preview_sync.py", + "description": "Sample for DeleteDeployment", + "file": "config_v1_generated_config_delete_deployment_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "config_v1_generated_Config_DeletePreview_sync", + "regionTag": "config_v1_generated_Config_DeleteDeployment_sync", "segments": [ { "end": 55, @@ -677,7 +854,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "config_v1_generated_config_delete_preview_sync.py" + "title": "config_v1_generated_config_delete_deployment_sync.py" }, { "canonical": true, @@ -687,19 +864,19 @@ "fullName": "google.cloud.config_v1.ConfigAsyncClient", "shortName": "ConfigAsyncClient" }, - "fullName": "google.cloud.config_v1.ConfigAsyncClient.delete_statefile", + "fullName": "google.cloud.config_v1.ConfigAsyncClient.delete_preview", "method": { - "fullName": "google.cloud.config.v1.Config.DeleteStatefile", + "fullName": "google.cloud.config.v1.Config.DeletePreview", "service": { "fullName": "google.cloud.config.v1.Config", "shortName": "Config" }, - "shortName": "DeleteStatefile" + "shortName": "DeletePreview" }, "parameters": [ { "name": "request", - "type": "google.cloud.config_v1.types.DeleteStatefileRequest" + "type": "google.cloud.config_v1.types.DeletePreviewRequest" }, { "name": "name", @@ -718,21 +895,22 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "shortName": "delete_statefile" + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "delete_preview" }, - "description": "Sample for DeleteStatefile", - "file": "config_v1_generated_config_delete_statefile_async.py", + "description": "Sample for DeletePreview", + "file": "config_v1_generated_config_delete_preview_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "config_v1_generated_Config_DeleteStatefile_async", + "regionTag": "config_v1_generated_Config_DeletePreview_async", "segments": [ { - "end": 50, + "end": 55, "start": 27, "type": "FULL" }, { - "end": 50, + "end": 55, "start": 27, "type": "SHORT" }, @@ -742,20 +920,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 46, + "end": 45, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "start": 47, + "end": 52, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 51, + "end": 56, + "start": 53, "type": "RESPONSE_HANDLING" } ], - "title": "config_v1_generated_config_delete_statefile_async.py" + "title": "config_v1_generated_config_delete_preview_async.py" }, { "canonical": true, @@ -764,19 +944,19 @@ "fullName": "google.cloud.config_v1.ConfigClient", "shortName": "ConfigClient" }, - "fullName": "google.cloud.config_v1.ConfigClient.delete_statefile", + "fullName": "google.cloud.config_v1.ConfigClient.delete_preview", "method": { - "fullName": "google.cloud.config.v1.Config.DeleteStatefile", + "fullName": "google.cloud.config.v1.Config.DeletePreview", "service": { "fullName": "google.cloud.config.v1.Config", "shortName": "Config" }, - "shortName": "DeleteStatefile" + "shortName": "DeletePreview" }, "parameters": [ { "name": "request", - "type": "google.cloud.config_v1.types.DeleteStatefileRequest" + "type": "google.cloud.config_v1.types.DeletePreviewRequest" }, { "name": "name", @@ -795,21 +975,958 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "shortName": "delete_statefile" + "resultType": "google.api_core.operation.Operation", + "shortName": "delete_preview" }, - "description": "Sample for DeleteStatefile", - "file": "config_v1_generated_config_delete_statefile_sync.py", + "description": "Sample for DeletePreview", + "file": "config_v1_generated_config_delete_preview_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "config_v1_generated_Config_DeleteStatefile_sync", + "regionTag": "config_v1_generated_Config_DeletePreview_sync", "segments": [ { - "end": 50, + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "config_v1_generated_config_delete_preview_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.config_v1.ConfigAsyncClient", + "shortName": "ConfigAsyncClient" + }, + "fullName": "google.cloud.config_v1.ConfigAsyncClient.delete_statefile", + "method": { + "fullName": "google.cloud.config.v1.Config.DeleteStatefile", + "service": { + "fullName": "google.cloud.config.v1.Config", + "shortName": "Config" + }, + "shortName": "DeleteStatefile" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.config_v1.types.DeleteStatefileRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "shortName": "delete_statefile" + }, + "description": "Sample for DeleteStatefile", + "file": "config_v1_generated_config_delete_statefile_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "config_v1_generated_Config_DeleteStatefile_async", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "config_v1_generated_config_delete_statefile_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.config_v1.ConfigClient", + "shortName": "ConfigClient" + }, + "fullName": "google.cloud.config_v1.ConfigClient.delete_statefile", + "method": { + "fullName": "google.cloud.config.v1.Config.DeleteStatefile", + "service": { + "fullName": "google.cloud.config.v1.Config", + "shortName": "Config" + }, + "shortName": "DeleteStatefile" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.config_v1.types.DeleteStatefileRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "shortName": "delete_statefile" + }, + "description": "Sample for DeleteStatefile", + "file": "config_v1_generated_config_delete_statefile_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "config_v1_generated_Config_DeleteStatefile_sync", + "segments": [ + { + "end": 50, + "start": 27, + "type": "FULL" + }, + { + "end": 50, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 51, + "type": "RESPONSE_HANDLING" + } + ], + "title": "config_v1_generated_config_delete_statefile_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.config_v1.ConfigAsyncClient", + "shortName": "ConfigAsyncClient" + }, + "fullName": "google.cloud.config_v1.ConfigAsyncClient.deprovision_deployment_group", + "method": { + "fullName": "google.cloud.config.v1.Config.DeprovisionDeploymentGroup", + "service": { + "fullName": "google.cloud.config.v1.Config", + "shortName": "Config" + }, + "shortName": "DeprovisionDeploymentGroup" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.config_v1.types.DeprovisionDeploymentGroupRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "deprovision_deployment_group" + }, + "description": "Sample for DeprovisionDeploymentGroup", + "file": "config_v1_generated_config_deprovision_deployment_group_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "config_v1_generated_Config_DeprovisionDeploymentGroup_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "config_v1_generated_config_deprovision_deployment_group_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.config_v1.ConfigClient", + "shortName": "ConfigClient" + }, + "fullName": "google.cloud.config_v1.ConfigClient.deprovision_deployment_group", + "method": { + "fullName": "google.cloud.config.v1.Config.DeprovisionDeploymentGroup", + "service": { + "fullName": "google.cloud.config.v1.Config", + "shortName": "Config" + }, + "shortName": "DeprovisionDeploymentGroup" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.config_v1.types.DeprovisionDeploymentGroupRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "deprovision_deployment_group" + }, + "description": "Sample for DeprovisionDeploymentGroup", + "file": "config_v1_generated_config_deprovision_deployment_group_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "config_v1_generated_Config_DeprovisionDeploymentGroup_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "config_v1_generated_config_deprovision_deployment_group_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.config_v1.ConfigAsyncClient", + "shortName": "ConfigAsyncClient" + }, + "fullName": "google.cloud.config_v1.ConfigAsyncClient.export_deployment_statefile", + "method": { + "fullName": "google.cloud.config.v1.Config.ExportDeploymentStatefile", + "service": { + "fullName": "google.cloud.config.v1.Config", + "shortName": "Config" + }, + "shortName": "ExportDeploymentStatefile" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.config_v1.types.ExportDeploymentStatefileRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.config_v1.types.Statefile", + "shortName": "export_deployment_statefile" + }, + "description": "Sample for ExportDeploymentStatefile", + "file": "config_v1_generated_config_export_deployment_statefile_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "config_v1_generated_Config_ExportDeploymentStatefile_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "config_v1_generated_config_export_deployment_statefile_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.config_v1.ConfigClient", + "shortName": "ConfigClient" + }, + "fullName": "google.cloud.config_v1.ConfigClient.export_deployment_statefile", + "method": { + "fullName": "google.cloud.config.v1.Config.ExportDeploymentStatefile", + "service": { + "fullName": "google.cloud.config.v1.Config", + "shortName": "Config" + }, + "shortName": "ExportDeploymentStatefile" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.config_v1.types.ExportDeploymentStatefileRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.config_v1.types.Statefile", + "shortName": "export_deployment_statefile" + }, + "description": "Sample for ExportDeploymentStatefile", + "file": "config_v1_generated_config_export_deployment_statefile_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "config_v1_generated_Config_ExportDeploymentStatefile_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "config_v1_generated_config_export_deployment_statefile_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.config_v1.ConfigAsyncClient", + "shortName": "ConfigAsyncClient" + }, + "fullName": "google.cloud.config_v1.ConfigAsyncClient.export_lock_info", + "method": { + "fullName": "google.cloud.config.v1.Config.ExportLockInfo", + "service": { + "fullName": "google.cloud.config.v1.Config", + "shortName": "Config" + }, + "shortName": "ExportLockInfo" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.config_v1.types.ExportLockInfoRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.config_v1.types.LockInfo", + "shortName": "export_lock_info" + }, + "description": "Sample for ExportLockInfo", + "file": "config_v1_generated_config_export_lock_info_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "config_v1_generated_Config_ExportLockInfo_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "config_v1_generated_config_export_lock_info_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.config_v1.ConfigClient", + "shortName": "ConfigClient" + }, + "fullName": "google.cloud.config_v1.ConfigClient.export_lock_info", + "method": { + "fullName": "google.cloud.config.v1.Config.ExportLockInfo", + "service": { + "fullName": "google.cloud.config.v1.Config", + "shortName": "Config" + }, + "shortName": "ExportLockInfo" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.config_v1.types.ExportLockInfoRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.config_v1.types.LockInfo", + "shortName": "export_lock_info" + }, + "description": "Sample for ExportLockInfo", + "file": "config_v1_generated_config_export_lock_info_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "config_v1_generated_Config_ExportLockInfo_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "config_v1_generated_config_export_lock_info_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.config_v1.ConfigAsyncClient", + "shortName": "ConfigAsyncClient" + }, + "fullName": "google.cloud.config_v1.ConfigAsyncClient.export_preview_result", + "method": { + "fullName": "google.cloud.config.v1.Config.ExportPreviewResult", + "service": { + "fullName": "google.cloud.config.v1.Config", + "shortName": "Config" + }, + "shortName": "ExportPreviewResult" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.config_v1.types.ExportPreviewResultRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.config_v1.types.ExportPreviewResultResponse", + "shortName": "export_preview_result" + }, + "description": "Sample for ExportPreviewResult", + "file": "config_v1_generated_config_export_preview_result_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "config_v1_generated_Config_ExportPreviewResult_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "config_v1_generated_config_export_preview_result_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.config_v1.ConfigClient", + "shortName": "ConfigClient" + }, + "fullName": "google.cloud.config_v1.ConfigClient.export_preview_result", + "method": { + "fullName": "google.cloud.config.v1.Config.ExportPreviewResult", + "service": { + "fullName": "google.cloud.config.v1.Config", + "shortName": "Config" + }, + "shortName": "ExportPreviewResult" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.config_v1.types.ExportPreviewResultRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.config_v1.types.ExportPreviewResultResponse", + "shortName": "export_preview_result" + }, + "description": "Sample for ExportPreviewResult", + "file": "config_v1_generated_config_export_preview_result_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "config_v1_generated_Config_ExportPreviewResult_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "config_v1_generated_config_export_preview_result_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.config_v1.ConfigAsyncClient", + "shortName": "ConfigAsyncClient" + }, + "fullName": "google.cloud.config_v1.ConfigAsyncClient.export_revision_statefile", + "method": { + "fullName": "google.cloud.config.v1.Config.ExportRevisionStatefile", + "service": { + "fullName": "google.cloud.config.v1.Config", + "shortName": "Config" + }, + "shortName": "ExportRevisionStatefile" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.config_v1.types.ExportRevisionStatefileRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.config_v1.types.Statefile", + "shortName": "export_revision_statefile" + }, + "description": "Sample for ExportRevisionStatefile", + "file": "config_v1_generated_config_export_revision_statefile_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "config_v1_generated_Config_ExportRevisionStatefile_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "config_v1_generated_config_export_revision_statefile_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.config_v1.ConfigClient", + "shortName": "ConfigClient" + }, + "fullName": "google.cloud.config_v1.ConfigClient.export_revision_statefile", + "method": { + "fullName": "google.cloud.config.v1.Config.ExportRevisionStatefile", + "service": { + "fullName": "google.cloud.config.v1.Config", + "shortName": "Config" + }, + "shortName": "ExportRevisionStatefile" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.config_v1.types.ExportRevisionStatefileRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.config_v1.types.Statefile", + "shortName": "export_revision_statefile" + }, + "description": "Sample for ExportRevisionStatefile", + "file": "config_v1_generated_config_export_revision_statefile_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "config_v1_generated_Config_ExportRevisionStatefile_sync", + "segments": [ + { + "end": 51, "start": 27, "type": "FULL" }, { - "end": 50, + "end": 51, "start": 27, "type": "SHORT" }, @@ -819,20 +1936,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 46, + "end": 45, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "start": 47, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 51, + "end": 52, + "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "config_v1_generated_config_delete_statefile_sync.py" + "title": "config_v1_generated_config_export_revision_statefile_sync.py" }, { "canonical": true, @@ -842,19 +1961,23 @@ "fullName": "google.cloud.config_v1.ConfigAsyncClient", "shortName": "ConfigAsyncClient" }, - "fullName": "google.cloud.config_v1.ConfigAsyncClient.export_deployment_statefile", + "fullName": "google.cloud.config_v1.ConfigAsyncClient.get_auto_migration_config", "method": { - "fullName": "google.cloud.config.v1.Config.ExportDeploymentStatefile", + "fullName": "google.cloud.config.v1.Config.GetAutoMigrationConfig", "service": { "fullName": "google.cloud.config.v1.Config", "shortName": "Config" }, - "shortName": "ExportDeploymentStatefile" + "shortName": "GetAutoMigrationConfig" }, "parameters": [ { "name": "request", - "type": "google.cloud.config_v1.types.ExportDeploymentStatefileRequest" + "type": "google.cloud.config_v1.types.GetAutoMigrationConfigRequest" + }, + { + "name": "name", + "type": "str" }, { "name": "retry", @@ -869,14 +1992,14 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.config_v1.types.Statefile", - "shortName": "export_deployment_statefile" + "resultType": "google.cloud.config_v1.types.AutoMigrationConfig", + "shortName": "get_auto_migration_config" }, - "description": "Sample for ExportDeploymentStatefile", - "file": "config_v1_generated_config_export_deployment_statefile_async.py", + "description": "Sample for GetAutoMigrationConfig", + "file": "config_v1_generated_config_get_auto_migration_config_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "config_v1_generated_Config_ExportDeploymentStatefile_async", + "regionTag": "config_v1_generated_Config_GetAutoMigrationConfig_async", "segments": [ { "end": 51, @@ -909,7 +2032,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "config_v1_generated_config_export_deployment_statefile_async.py" + "title": "config_v1_generated_config_get_auto_migration_config_async.py" }, { "canonical": true, @@ -918,19 +2041,23 @@ "fullName": "google.cloud.config_v1.ConfigClient", "shortName": "ConfigClient" }, - "fullName": "google.cloud.config_v1.ConfigClient.export_deployment_statefile", + "fullName": "google.cloud.config_v1.ConfigClient.get_auto_migration_config", "method": { - "fullName": "google.cloud.config.v1.Config.ExportDeploymentStatefile", + "fullName": "google.cloud.config.v1.Config.GetAutoMigrationConfig", "service": { "fullName": "google.cloud.config.v1.Config", "shortName": "Config" }, - "shortName": "ExportDeploymentStatefile" + "shortName": "GetAutoMigrationConfig" }, "parameters": [ { "name": "request", - "type": "google.cloud.config_v1.types.ExportDeploymentStatefileRequest" + "type": "google.cloud.config_v1.types.GetAutoMigrationConfigRequest" + }, + { + "name": "name", + "type": "str" }, { "name": "retry", @@ -945,14 +2072,14 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.config_v1.types.Statefile", - "shortName": "export_deployment_statefile" + "resultType": "google.cloud.config_v1.types.AutoMigrationConfig", + "shortName": "get_auto_migration_config" }, - "description": "Sample for ExportDeploymentStatefile", - "file": "config_v1_generated_config_export_deployment_statefile_sync.py", + "description": "Sample for GetAutoMigrationConfig", + "file": "config_v1_generated_config_get_auto_migration_config_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "config_v1_generated_Config_ExportDeploymentStatefile_sync", + "regionTag": "config_v1_generated_Config_GetAutoMigrationConfig_sync", "segments": [ { "end": 51, @@ -985,7 +2112,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "config_v1_generated_config_export_deployment_statefile_sync.py" + "title": "config_v1_generated_config_get_auto_migration_config_sync.py" }, { "canonical": true, @@ -995,19 +2122,19 @@ "fullName": "google.cloud.config_v1.ConfigAsyncClient", "shortName": "ConfigAsyncClient" }, - "fullName": "google.cloud.config_v1.ConfigAsyncClient.export_lock_info", + "fullName": "google.cloud.config_v1.ConfigAsyncClient.get_deployment_group_revision", "method": { - "fullName": "google.cloud.config.v1.Config.ExportLockInfo", + "fullName": "google.cloud.config.v1.Config.GetDeploymentGroupRevision", "service": { "fullName": "google.cloud.config.v1.Config", "shortName": "Config" }, - "shortName": "ExportLockInfo" + "shortName": "GetDeploymentGroupRevision" }, "parameters": [ { "name": "request", - "type": "google.cloud.config_v1.types.ExportLockInfoRequest" + "type": "google.cloud.config_v1.types.GetDeploymentGroupRevisionRequest" }, { "name": "name", @@ -1026,14 +2153,14 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.config_v1.types.LockInfo", - "shortName": "export_lock_info" + "resultType": "google.cloud.config_v1.types.DeploymentGroupRevision", + "shortName": "get_deployment_group_revision" }, - "description": "Sample for ExportLockInfo", - "file": "config_v1_generated_config_export_lock_info_async.py", + "description": "Sample for GetDeploymentGroupRevision", + "file": "config_v1_generated_config_get_deployment_group_revision_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "config_v1_generated_Config_ExportLockInfo_async", + "regionTag": "config_v1_generated_Config_GetDeploymentGroupRevision_async", "segments": [ { "end": 51, @@ -1066,7 +2193,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "config_v1_generated_config_export_lock_info_async.py" + "title": "config_v1_generated_config_get_deployment_group_revision_async.py" }, { "canonical": true, @@ -1075,19 +2202,19 @@ "fullName": "google.cloud.config_v1.ConfigClient", "shortName": "ConfigClient" }, - "fullName": "google.cloud.config_v1.ConfigClient.export_lock_info", + "fullName": "google.cloud.config_v1.ConfigClient.get_deployment_group_revision", "method": { - "fullName": "google.cloud.config.v1.Config.ExportLockInfo", + "fullName": "google.cloud.config.v1.Config.GetDeploymentGroupRevision", "service": { "fullName": "google.cloud.config.v1.Config", "shortName": "Config" }, - "shortName": "ExportLockInfo" + "shortName": "GetDeploymentGroupRevision" }, "parameters": [ { "name": "request", - "type": "google.cloud.config_v1.types.ExportLockInfoRequest" + "type": "google.cloud.config_v1.types.GetDeploymentGroupRevisionRequest" }, { "name": "name", @@ -1106,14 +2233,14 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.config_v1.types.LockInfo", - "shortName": "export_lock_info" + "resultType": "google.cloud.config_v1.types.DeploymentGroupRevision", + "shortName": "get_deployment_group_revision" }, - "description": "Sample for ExportLockInfo", - "file": "config_v1_generated_config_export_lock_info_sync.py", + "description": "Sample for GetDeploymentGroupRevision", + "file": "config_v1_generated_config_get_deployment_group_revision_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "config_v1_generated_Config_ExportLockInfo_sync", + "regionTag": "config_v1_generated_Config_GetDeploymentGroupRevision_sync", "segments": [ { "end": 51, @@ -1146,7 +2273,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "config_v1_generated_config_export_lock_info_sync.py" + "title": "config_v1_generated_config_get_deployment_group_revision_sync.py" }, { "canonical": true, @@ -1156,19 +2283,23 @@ "fullName": "google.cloud.config_v1.ConfigAsyncClient", "shortName": "ConfigAsyncClient" }, - "fullName": "google.cloud.config_v1.ConfigAsyncClient.export_preview_result", + "fullName": "google.cloud.config_v1.ConfigAsyncClient.get_deployment_group", "method": { - "fullName": "google.cloud.config.v1.Config.ExportPreviewResult", + "fullName": "google.cloud.config.v1.Config.GetDeploymentGroup", "service": { "fullName": "google.cloud.config.v1.Config", "shortName": "Config" }, - "shortName": "ExportPreviewResult" + "shortName": "GetDeploymentGroup" }, "parameters": [ { "name": "request", - "type": "google.cloud.config_v1.types.ExportPreviewResultRequest" + "type": "google.cloud.config_v1.types.GetDeploymentGroupRequest" + }, + { + "name": "name", + "type": "str" }, { "name": "retry", @@ -1183,14 +2314,14 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.config_v1.types.ExportPreviewResultResponse", - "shortName": "export_preview_result" + "resultType": "google.cloud.config_v1.types.DeploymentGroup", + "shortName": "get_deployment_group" }, - "description": "Sample for ExportPreviewResult", - "file": "config_v1_generated_config_export_preview_result_async.py", + "description": "Sample for GetDeploymentGroup", + "file": "config_v1_generated_config_get_deployment_group_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "config_v1_generated_Config_ExportPreviewResult_async", + "regionTag": "config_v1_generated_Config_GetDeploymentGroup_async", "segments": [ { "end": 51, @@ -1223,7 +2354,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "config_v1_generated_config_export_preview_result_async.py" + "title": "config_v1_generated_config_get_deployment_group_async.py" }, { "canonical": true, @@ -1232,19 +2363,23 @@ "fullName": "google.cloud.config_v1.ConfigClient", "shortName": "ConfigClient" }, - "fullName": "google.cloud.config_v1.ConfigClient.export_preview_result", + "fullName": "google.cloud.config_v1.ConfigClient.get_deployment_group", "method": { - "fullName": "google.cloud.config.v1.Config.ExportPreviewResult", + "fullName": "google.cloud.config.v1.Config.GetDeploymentGroup", "service": { "fullName": "google.cloud.config.v1.Config", "shortName": "Config" }, - "shortName": "ExportPreviewResult" + "shortName": "GetDeploymentGroup" }, "parameters": [ { "name": "request", - "type": "google.cloud.config_v1.types.ExportPreviewResultRequest" + "type": "google.cloud.config_v1.types.GetDeploymentGroupRequest" + }, + { + "name": "name", + "type": "str" }, { "name": "retry", @@ -1259,14 +2394,14 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.config_v1.types.ExportPreviewResultResponse", - "shortName": "export_preview_result" + "resultType": "google.cloud.config_v1.types.DeploymentGroup", + "shortName": "get_deployment_group" }, - "description": "Sample for ExportPreviewResult", - "file": "config_v1_generated_config_export_preview_result_sync.py", + "description": "Sample for GetDeploymentGroup", + "file": "config_v1_generated_config_get_deployment_group_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "config_v1_generated_Config_ExportPreviewResult_sync", + "regionTag": "config_v1_generated_Config_GetDeploymentGroup_sync", "segments": [ { "end": 51, @@ -1299,7 +2434,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "config_v1_generated_config_export_preview_result_sync.py" + "title": "config_v1_generated_config_get_deployment_group_sync.py" }, { "canonical": true, @@ -1309,19 +2444,23 @@ "fullName": "google.cloud.config_v1.ConfigAsyncClient", "shortName": "ConfigAsyncClient" }, - "fullName": "google.cloud.config_v1.ConfigAsyncClient.export_revision_statefile", + "fullName": "google.cloud.config_v1.ConfigAsyncClient.get_deployment", "method": { - "fullName": "google.cloud.config.v1.Config.ExportRevisionStatefile", + "fullName": "google.cloud.config.v1.Config.GetDeployment", "service": { "fullName": "google.cloud.config.v1.Config", "shortName": "Config" }, - "shortName": "ExportRevisionStatefile" + "shortName": "GetDeployment" }, "parameters": [ { "name": "request", - "type": "google.cloud.config_v1.types.ExportRevisionStatefileRequest" + "type": "google.cloud.config_v1.types.GetDeploymentRequest" + }, + { + "name": "name", + "type": "str" }, { "name": "retry", @@ -1336,14 +2475,14 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.config_v1.types.Statefile", - "shortName": "export_revision_statefile" + "resultType": "google.cloud.config_v1.types.Deployment", + "shortName": "get_deployment" }, - "description": "Sample for ExportRevisionStatefile", - "file": "config_v1_generated_config_export_revision_statefile_async.py", + "description": "Sample for GetDeployment", + "file": "config_v1_generated_config_get_deployment_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "config_v1_generated_Config_ExportRevisionStatefile_async", + "regionTag": "config_v1_generated_Config_GetDeployment_async", "segments": [ { "end": 51, @@ -1376,7 +2515,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "config_v1_generated_config_export_revision_statefile_async.py" + "title": "config_v1_generated_config_get_deployment_async.py" }, { "canonical": true, @@ -1385,19 +2524,23 @@ "fullName": "google.cloud.config_v1.ConfigClient", "shortName": "ConfigClient" }, - "fullName": "google.cloud.config_v1.ConfigClient.export_revision_statefile", + "fullName": "google.cloud.config_v1.ConfigClient.get_deployment", "method": { - "fullName": "google.cloud.config.v1.Config.ExportRevisionStatefile", + "fullName": "google.cloud.config.v1.Config.GetDeployment", "service": { "fullName": "google.cloud.config.v1.Config", "shortName": "Config" }, - "shortName": "ExportRevisionStatefile" + "shortName": "GetDeployment" }, "parameters": [ { "name": "request", - "type": "google.cloud.config_v1.types.ExportRevisionStatefileRequest" + "type": "google.cloud.config_v1.types.GetDeploymentRequest" + }, + { + "name": "name", + "type": "str" }, { "name": "retry", @@ -1412,14 +2555,14 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.config_v1.types.Statefile", - "shortName": "export_revision_statefile" + "resultType": "google.cloud.config_v1.types.Deployment", + "shortName": "get_deployment" }, - "description": "Sample for ExportRevisionStatefile", - "file": "config_v1_generated_config_export_revision_statefile_sync.py", + "description": "Sample for GetDeployment", + "file": "config_v1_generated_config_get_deployment_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "config_v1_generated_Config_ExportRevisionStatefile_sync", + "regionTag": "config_v1_generated_Config_GetDeployment_sync", "segments": [ { "end": 51, @@ -1452,7 +2595,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "config_v1_generated_config_export_revision_statefile_sync.py" + "title": "config_v1_generated_config_get_deployment_sync.py" }, { "canonical": true, @@ -1462,19 +2605,19 @@ "fullName": "google.cloud.config_v1.ConfigAsyncClient", "shortName": "ConfigAsyncClient" }, - "fullName": "google.cloud.config_v1.ConfigAsyncClient.get_auto_migration_config", + "fullName": "google.cloud.config_v1.ConfigAsyncClient.get_preview", "method": { - "fullName": "google.cloud.config.v1.Config.GetAutoMigrationConfig", + "fullName": "google.cloud.config.v1.Config.GetPreview", "service": { "fullName": "google.cloud.config.v1.Config", "shortName": "Config" }, - "shortName": "GetAutoMigrationConfig" + "shortName": "GetPreview" }, "parameters": [ { "name": "request", - "type": "google.cloud.config_v1.types.GetAutoMigrationConfigRequest" + "type": "google.cloud.config_v1.types.GetPreviewRequest" }, { "name": "name", @@ -1493,14 +2636,14 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.config_v1.types.AutoMigrationConfig", - "shortName": "get_auto_migration_config" + "resultType": "google.cloud.config_v1.types.Preview", + "shortName": "get_preview" }, - "description": "Sample for GetAutoMigrationConfig", - "file": "config_v1_generated_config_get_auto_migration_config_async.py", + "description": "Sample for GetPreview", + "file": "config_v1_generated_config_get_preview_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "config_v1_generated_Config_GetAutoMigrationConfig_async", + "regionTag": "config_v1_generated_Config_GetPreview_async", "segments": [ { "end": 51, @@ -1533,7 +2676,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "config_v1_generated_config_get_auto_migration_config_async.py" + "title": "config_v1_generated_config_get_preview_async.py" }, { "canonical": true, @@ -1542,19 +2685,19 @@ "fullName": "google.cloud.config_v1.ConfigClient", "shortName": "ConfigClient" }, - "fullName": "google.cloud.config_v1.ConfigClient.get_auto_migration_config", + "fullName": "google.cloud.config_v1.ConfigClient.get_preview", "method": { - "fullName": "google.cloud.config.v1.Config.GetAutoMigrationConfig", + "fullName": "google.cloud.config.v1.Config.GetPreview", "service": { "fullName": "google.cloud.config.v1.Config", "shortName": "Config" }, - "shortName": "GetAutoMigrationConfig" + "shortName": "GetPreview" }, "parameters": [ { "name": "request", - "type": "google.cloud.config_v1.types.GetAutoMigrationConfigRequest" + "type": "google.cloud.config_v1.types.GetPreviewRequest" }, { "name": "name", @@ -1573,14 +2716,14 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.config_v1.types.AutoMigrationConfig", - "shortName": "get_auto_migration_config" + "resultType": "google.cloud.config_v1.types.Preview", + "shortName": "get_preview" }, - "description": "Sample for GetAutoMigrationConfig", - "file": "config_v1_generated_config_get_auto_migration_config_sync.py", + "description": "Sample for GetPreview", + "file": "config_v1_generated_config_get_preview_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "config_v1_generated_Config_GetAutoMigrationConfig_sync", + "regionTag": "config_v1_generated_Config_GetPreview_sync", "segments": [ { "end": 51, @@ -1613,7 +2756,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "config_v1_generated_config_get_auto_migration_config_sync.py" + "title": "config_v1_generated_config_get_preview_sync.py" }, { "canonical": true, @@ -1623,19 +2766,19 @@ "fullName": "google.cloud.config_v1.ConfigAsyncClient", "shortName": "ConfigAsyncClient" }, - "fullName": "google.cloud.config_v1.ConfigAsyncClient.get_deployment", + "fullName": "google.cloud.config_v1.ConfigAsyncClient.get_resource_change", "method": { - "fullName": "google.cloud.config.v1.Config.GetDeployment", + "fullName": "google.cloud.config.v1.Config.GetResourceChange", "service": { "fullName": "google.cloud.config.v1.Config", "shortName": "Config" }, - "shortName": "GetDeployment" + "shortName": "GetResourceChange" }, "parameters": [ { "name": "request", - "type": "google.cloud.config_v1.types.GetDeploymentRequest" + "type": "google.cloud.config_v1.types.GetResourceChangeRequest" }, { "name": "name", @@ -1654,14 +2797,14 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.config_v1.types.Deployment", - "shortName": "get_deployment" + "resultType": "google.cloud.config_v1.types.ResourceChange", + "shortName": "get_resource_change" }, - "description": "Sample for GetDeployment", - "file": "config_v1_generated_config_get_deployment_async.py", + "description": "Sample for GetResourceChange", + "file": "config_v1_generated_config_get_resource_change_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "config_v1_generated_Config_GetDeployment_async", + "regionTag": "config_v1_generated_Config_GetResourceChange_async", "segments": [ { "end": 51, @@ -1694,7 +2837,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "config_v1_generated_config_get_deployment_async.py" + "title": "config_v1_generated_config_get_resource_change_async.py" }, { "canonical": true, @@ -1703,19 +2846,19 @@ "fullName": "google.cloud.config_v1.ConfigClient", "shortName": "ConfigClient" }, - "fullName": "google.cloud.config_v1.ConfigClient.get_deployment", + "fullName": "google.cloud.config_v1.ConfigClient.get_resource_change", "method": { - "fullName": "google.cloud.config.v1.Config.GetDeployment", + "fullName": "google.cloud.config.v1.Config.GetResourceChange", "service": { "fullName": "google.cloud.config.v1.Config", "shortName": "Config" }, - "shortName": "GetDeployment" + "shortName": "GetResourceChange" }, "parameters": [ { "name": "request", - "type": "google.cloud.config_v1.types.GetDeploymentRequest" + "type": "google.cloud.config_v1.types.GetResourceChangeRequest" }, { "name": "name", @@ -1734,14 +2877,14 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.config_v1.types.Deployment", - "shortName": "get_deployment" + "resultType": "google.cloud.config_v1.types.ResourceChange", + "shortName": "get_resource_change" }, - "description": "Sample for GetDeployment", - "file": "config_v1_generated_config_get_deployment_sync.py", + "description": "Sample for GetResourceChange", + "file": "config_v1_generated_config_get_resource_change_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "config_v1_generated_Config_GetDeployment_sync", + "regionTag": "config_v1_generated_Config_GetResourceChange_sync", "segments": [ { "end": 51, @@ -1774,7 +2917,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "config_v1_generated_config_get_deployment_sync.py" + "title": "config_v1_generated_config_get_resource_change_sync.py" }, { "canonical": true, @@ -1784,19 +2927,19 @@ "fullName": "google.cloud.config_v1.ConfigAsyncClient", "shortName": "ConfigAsyncClient" }, - "fullName": "google.cloud.config_v1.ConfigAsyncClient.get_preview", + "fullName": "google.cloud.config_v1.ConfigAsyncClient.get_resource_drift", "method": { - "fullName": "google.cloud.config.v1.Config.GetPreview", + "fullName": "google.cloud.config.v1.Config.GetResourceDrift", "service": { "fullName": "google.cloud.config.v1.Config", "shortName": "Config" }, - "shortName": "GetPreview" + "shortName": "GetResourceDrift" }, "parameters": [ { "name": "request", - "type": "google.cloud.config_v1.types.GetPreviewRequest" + "type": "google.cloud.config_v1.types.GetResourceDriftRequest" }, { "name": "name", @@ -1815,14 +2958,14 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.config_v1.types.Preview", - "shortName": "get_preview" + "resultType": "google.cloud.config_v1.types.ResourceDrift", + "shortName": "get_resource_drift" }, - "description": "Sample for GetPreview", - "file": "config_v1_generated_config_get_preview_async.py", + "description": "Sample for GetResourceDrift", + "file": "config_v1_generated_config_get_resource_drift_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "config_v1_generated_Config_GetPreview_async", + "regionTag": "config_v1_generated_Config_GetResourceDrift_async", "segments": [ { "end": 51, @@ -1855,7 +2998,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "config_v1_generated_config_get_preview_async.py" + "title": "config_v1_generated_config_get_resource_drift_async.py" }, { "canonical": true, @@ -1864,19 +3007,19 @@ "fullName": "google.cloud.config_v1.ConfigClient", "shortName": "ConfigClient" }, - "fullName": "google.cloud.config_v1.ConfigClient.get_preview", + "fullName": "google.cloud.config_v1.ConfigClient.get_resource_drift", "method": { - "fullName": "google.cloud.config.v1.Config.GetPreview", + "fullName": "google.cloud.config.v1.Config.GetResourceDrift", "service": { "fullName": "google.cloud.config.v1.Config", "shortName": "Config" }, - "shortName": "GetPreview" + "shortName": "GetResourceDrift" }, "parameters": [ { "name": "request", - "type": "google.cloud.config_v1.types.GetPreviewRequest" + "type": "google.cloud.config_v1.types.GetResourceDriftRequest" }, { "name": "name", @@ -1895,14 +3038,14 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.config_v1.types.Preview", - "shortName": "get_preview" + "resultType": "google.cloud.config_v1.types.ResourceDrift", + "shortName": "get_resource_drift" }, - "description": "Sample for GetPreview", - "file": "config_v1_generated_config_get_preview_sync.py", + "description": "Sample for GetResourceDrift", + "file": "config_v1_generated_config_get_resource_drift_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "config_v1_generated_Config_GetPreview_sync", + "regionTag": "config_v1_generated_Config_GetResourceDrift_sync", "segments": [ { "end": 51, @@ -1935,7 +3078,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "config_v1_generated_config_get_preview_sync.py" + "title": "config_v1_generated_config_get_resource_drift_sync.py" }, { "canonical": true, @@ -1945,19 +3088,19 @@ "fullName": "google.cloud.config_v1.ConfigAsyncClient", "shortName": "ConfigAsyncClient" }, - "fullName": "google.cloud.config_v1.ConfigAsyncClient.get_resource_change", + "fullName": "google.cloud.config_v1.ConfigAsyncClient.get_resource", "method": { - "fullName": "google.cloud.config.v1.Config.GetResourceChange", + "fullName": "google.cloud.config.v1.Config.GetResource", "service": { "fullName": "google.cloud.config.v1.Config", "shortName": "Config" }, - "shortName": "GetResourceChange" + "shortName": "GetResource" }, "parameters": [ { "name": "request", - "type": "google.cloud.config_v1.types.GetResourceChangeRequest" + "type": "google.cloud.config_v1.types.GetResourceRequest" }, { "name": "name", @@ -1976,14 +3119,14 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.config_v1.types.ResourceChange", - "shortName": "get_resource_change" + "resultType": "google.cloud.config_v1.types.Resource", + "shortName": "get_resource" }, - "description": "Sample for GetResourceChange", - "file": "config_v1_generated_config_get_resource_change_async.py", + "description": "Sample for GetResource", + "file": "config_v1_generated_config_get_resource_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "config_v1_generated_Config_GetResourceChange_async", + "regionTag": "config_v1_generated_Config_GetResource_async", "segments": [ { "end": 51, @@ -2016,7 +3159,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "config_v1_generated_config_get_resource_change_async.py" + "title": "config_v1_generated_config_get_resource_async.py" }, { "canonical": true, @@ -2025,19 +3168,19 @@ "fullName": "google.cloud.config_v1.ConfigClient", "shortName": "ConfigClient" }, - "fullName": "google.cloud.config_v1.ConfigClient.get_resource_change", + "fullName": "google.cloud.config_v1.ConfigClient.get_resource", "method": { - "fullName": "google.cloud.config.v1.Config.GetResourceChange", + "fullName": "google.cloud.config.v1.Config.GetResource", "service": { "fullName": "google.cloud.config.v1.Config", "shortName": "Config" }, - "shortName": "GetResourceChange" + "shortName": "GetResource" }, "parameters": [ { "name": "request", - "type": "google.cloud.config_v1.types.GetResourceChangeRequest" + "type": "google.cloud.config_v1.types.GetResourceRequest" }, { "name": "name", @@ -2056,14 +3199,14 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.config_v1.types.ResourceChange", - "shortName": "get_resource_change" + "resultType": "google.cloud.config_v1.types.Resource", + "shortName": "get_resource" }, - "description": "Sample for GetResourceChange", - "file": "config_v1_generated_config_get_resource_change_sync.py", + "description": "Sample for GetResource", + "file": "config_v1_generated_config_get_resource_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "config_v1_generated_Config_GetResourceChange_sync", + "regionTag": "config_v1_generated_Config_GetResource_sync", "segments": [ { "end": 51, @@ -2096,7 +3239,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "config_v1_generated_config_get_resource_change_sync.py" + "title": "config_v1_generated_config_get_resource_sync.py" }, { "canonical": true, @@ -2106,19 +3249,19 @@ "fullName": "google.cloud.config_v1.ConfigAsyncClient", "shortName": "ConfigAsyncClient" }, - "fullName": "google.cloud.config_v1.ConfigAsyncClient.get_resource_drift", + "fullName": "google.cloud.config_v1.ConfigAsyncClient.get_revision", "method": { - "fullName": "google.cloud.config.v1.Config.GetResourceDrift", + "fullName": "google.cloud.config.v1.Config.GetRevision", "service": { "fullName": "google.cloud.config.v1.Config", "shortName": "Config" }, - "shortName": "GetResourceDrift" + "shortName": "GetRevision" }, "parameters": [ { "name": "request", - "type": "google.cloud.config_v1.types.GetResourceDriftRequest" + "type": "google.cloud.config_v1.types.GetRevisionRequest" }, { "name": "name", @@ -2137,14 +3280,14 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.config_v1.types.ResourceDrift", - "shortName": "get_resource_drift" + "resultType": "google.cloud.config_v1.types.Revision", + "shortName": "get_revision" }, - "description": "Sample for GetResourceDrift", - "file": "config_v1_generated_config_get_resource_drift_async.py", + "description": "Sample for GetRevision", + "file": "config_v1_generated_config_get_revision_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "config_v1_generated_Config_GetResourceDrift_async", + "regionTag": "config_v1_generated_Config_GetRevision_async", "segments": [ { "end": 51, @@ -2177,7 +3320,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "config_v1_generated_config_get_resource_drift_async.py" + "title": "config_v1_generated_config_get_revision_async.py" }, { "canonical": true, @@ -2186,19 +3329,19 @@ "fullName": "google.cloud.config_v1.ConfigClient", "shortName": "ConfigClient" }, - "fullName": "google.cloud.config_v1.ConfigClient.get_resource_drift", + "fullName": "google.cloud.config_v1.ConfigClient.get_revision", "method": { - "fullName": "google.cloud.config.v1.Config.GetResourceDrift", + "fullName": "google.cloud.config.v1.Config.GetRevision", "service": { "fullName": "google.cloud.config.v1.Config", "shortName": "Config" }, - "shortName": "GetResourceDrift" + "shortName": "GetRevision" }, "parameters": [ { "name": "request", - "type": "google.cloud.config_v1.types.GetResourceDriftRequest" + "type": "google.cloud.config_v1.types.GetRevisionRequest" }, { "name": "name", @@ -2217,14 +3360,14 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.config_v1.types.ResourceDrift", - "shortName": "get_resource_drift" + "resultType": "google.cloud.config_v1.types.Revision", + "shortName": "get_revision" }, - "description": "Sample for GetResourceDrift", - "file": "config_v1_generated_config_get_resource_drift_sync.py", + "description": "Sample for GetRevision", + "file": "config_v1_generated_config_get_revision_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "config_v1_generated_Config_GetResourceDrift_sync", + "regionTag": "config_v1_generated_Config_GetRevision_sync", "segments": [ { "end": 51, @@ -2257,7 +3400,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "config_v1_generated_config_get_resource_drift_sync.py" + "title": "config_v1_generated_config_get_revision_sync.py" }, { "canonical": true, @@ -2267,19 +3410,19 @@ "fullName": "google.cloud.config_v1.ConfigAsyncClient", "shortName": "ConfigAsyncClient" }, - "fullName": "google.cloud.config_v1.ConfigAsyncClient.get_resource", + "fullName": "google.cloud.config_v1.ConfigAsyncClient.get_terraform_version", "method": { - "fullName": "google.cloud.config.v1.Config.GetResource", + "fullName": "google.cloud.config.v1.Config.GetTerraformVersion", "service": { "fullName": "google.cloud.config.v1.Config", "shortName": "Config" }, - "shortName": "GetResource" + "shortName": "GetTerraformVersion" }, "parameters": [ { "name": "request", - "type": "google.cloud.config_v1.types.GetResourceRequest" + "type": "google.cloud.config_v1.types.GetTerraformVersionRequest" }, { "name": "name", @@ -2298,14 +3441,14 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.config_v1.types.Resource", - "shortName": "get_resource" + "resultType": "google.cloud.config_v1.types.TerraformVersion", + "shortName": "get_terraform_version" }, - "description": "Sample for GetResource", - "file": "config_v1_generated_config_get_resource_async.py", + "description": "Sample for GetTerraformVersion", + "file": "config_v1_generated_config_get_terraform_version_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "config_v1_generated_Config_GetResource_async", + "regionTag": "config_v1_generated_Config_GetTerraformVersion_async", "segments": [ { "end": 51, @@ -2338,7 +3481,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "config_v1_generated_config_get_resource_async.py" + "title": "config_v1_generated_config_get_terraform_version_async.py" }, { "canonical": true, @@ -2347,19 +3490,19 @@ "fullName": "google.cloud.config_v1.ConfigClient", "shortName": "ConfigClient" }, - "fullName": "google.cloud.config_v1.ConfigClient.get_resource", + "fullName": "google.cloud.config_v1.ConfigClient.get_terraform_version", "method": { - "fullName": "google.cloud.config.v1.Config.GetResource", + "fullName": "google.cloud.config.v1.Config.GetTerraformVersion", "service": { "fullName": "google.cloud.config.v1.Config", "shortName": "Config" }, - "shortName": "GetResource" + "shortName": "GetTerraformVersion" }, "parameters": [ { "name": "request", - "type": "google.cloud.config_v1.types.GetResourceRequest" + "type": "google.cloud.config_v1.types.GetTerraformVersionRequest" }, { "name": "name", @@ -2378,14 +3521,14 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.config_v1.types.Resource", - "shortName": "get_resource" + "resultType": "google.cloud.config_v1.types.TerraformVersion", + "shortName": "get_terraform_version" }, - "description": "Sample for GetResource", - "file": "config_v1_generated_config_get_resource_sync.py", + "description": "Sample for GetTerraformVersion", + "file": "config_v1_generated_config_get_terraform_version_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "config_v1_generated_Config_GetResource_sync", + "regionTag": "config_v1_generated_Config_GetTerraformVersion_sync", "segments": [ { "end": 51, @@ -2418,7 +3561,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "config_v1_generated_config_get_resource_sync.py" + "title": "config_v1_generated_config_get_terraform_version_sync.py" }, { "canonical": true, @@ -2428,24 +3571,28 @@ "fullName": "google.cloud.config_v1.ConfigAsyncClient", "shortName": "ConfigAsyncClient" }, - "fullName": "google.cloud.config_v1.ConfigAsyncClient.get_revision", + "fullName": "google.cloud.config_v1.ConfigAsyncClient.import_statefile", "method": { - "fullName": "google.cloud.config.v1.Config.GetRevision", + "fullName": "google.cloud.config.v1.Config.ImportStatefile", "service": { "fullName": "google.cloud.config.v1.Config", "shortName": "Config" }, - "shortName": "GetRevision" + "shortName": "ImportStatefile" }, "parameters": [ { "name": "request", - "type": "google.cloud.config_v1.types.GetRevisionRequest" + "type": "google.cloud.config_v1.types.ImportStatefileRequest" }, { - "name": "name", + "name": "parent", "type": "str" }, + { + "name": "lock_id", + "type": "int" + }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -2459,22 +3606,22 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.config_v1.types.Revision", - "shortName": "get_revision" + "resultType": "google.cloud.config_v1.types.Statefile", + "shortName": "import_statefile" }, - "description": "Sample for GetRevision", - "file": "config_v1_generated_config_get_revision_async.py", + "description": "Sample for ImportStatefile", + "file": "config_v1_generated_config_import_statefile_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "config_v1_generated_Config_GetRevision_async", + "regionTag": "config_v1_generated_Config_ImportStatefile_async", "segments": [ { - "end": 51, + "end": 52, "start": 27, "type": "FULL" }, { - "end": 51, + "end": 52, "start": 27, "type": "SHORT" }, @@ -2484,22 +3631,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 45, + "end": 46, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 48, - "start": 46, + "end": 49, + "start": 47, "type": "REQUEST_EXECUTION" }, { - "end": 52, - "start": 49, + "end": 53, + "start": 50, "type": "RESPONSE_HANDLING" } ], - "title": "config_v1_generated_config_get_revision_async.py" + "title": "config_v1_generated_config_import_statefile_async.py" }, { "canonical": true, @@ -2508,24 +3655,28 @@ "fullName": "google.cloud.config_v1.ConfigClient", "shortName": "ConfigClient" }, - "fullName": "google.cloud.config_v1.ConfigClient.get_revision", + "fullName": "google.cloud.config_v1.ConfigClient.import_statefile", "method": { - "fullName": "google.cloud.config.v1.Config.GetRevision", + "fullName": "google.cloud.config.v1.Config.ImportStatefile", "service": { "fullName": "google.cloud.config.v1.Config", "shortName": "Config" }, - "shortName": "GetRevision" + "shortName": "ImportStatefile" }, "parameters": [ { "name": "request", - "type": "google.cloud.config_v1.types.GetRevisionRequest" + "type": "google.cloud.config_v1.types.ImportStatefileRequest" }, { - "name": "name", + "name": "parent", "type": "str" }, + { + "name": "lock_id", + "type": "int" + }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -2539,22 +3690,22 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.config_v1.types.Revision", - "shortName": "get_revision" + "resultType": "google.cloud.config_v1.types.Statefile", + "shortName": "import_statefile" }, - "description": "Sample for GetRevision", - "file": "config_v1_generated_config_get_revision_sync.py", + "description": "Sample for ImportStatefile", + "file": "config_v1_generated_config_import_statefile_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "config_v1_generated_Config_GetRevision_sync", + "regionTag": "config_v1_generated_Config_ImportStatefile_sync", "segments": [ { - "end": 51, + "end": 52, "start": 27, "type": "FULL" }, { - "end": 51, + "end": 52, "start": 27, "type": "SHORT" }, @@ -2564,22 +3715,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 45, + "end": 46, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 48, - "start": 46, + "end": 49, + "start": 47, "type": "REQUEST_EXECUTION" }, { - "end": 52, - "start": 49, + "end": 53, + "start": 50, "type": "RESPONSE_HANDLING" } ], - "title": "config_v1_generated_config_get_revision_sync.py" + "title": "config_v1_generated_config_import_statefile_sync.py" }, { "canonical": true, @@ -2589,22 +3740,22 @@ "fullName": "google.cloud.config_v1.ConfigAsyncClient", "shortName": "ConfigAsyncClient" }, - "fullName": "google.cloud.config_v1.ConfigAsyncClient.get_terraform_version", + "fullName": "google.cloud.config_v1.ConfigAsyncClient.list_deployment_group_revisions", "method": { - "fullName": "google.cloud.config.v1.Config.GetTerraformVersion", + "fullName": "google.cloud.config.v1.Config.ListDeploymentGroupRevisions", "service": { "fullName": "google.cloud.config.v1.Config", "shortName": "Config" }, - "shortName": "GetTerraformVersion" + "shortName": "ListDeploymentGroupRevisions" }, "parameters": [ { "name": "request", - "type": "google.cloud.config_v1.types.GetTerraformVersionRequest" + "type": "google.cloud.config_v1.types.ListDeploymentGroupRevisionsRequest" }, { - "name": "name", + "name": "parent", "type": "str" }, { @@ -2620,22 +3771,22 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.config_v1.types.TerraformVersion", - "shortName": "get_terraform_version" + "resultType": "google.cloud.config_v1.services.config.pagers.ListDeploymentGroupRevisionsAsyncPager", + "shortName": "list_deployment_group_revisions" }, - "description": "Sample for GetTerraformVersion", - "file": "config_v1_generated_config_get_terraform_version_async.py", + "description": "Sample for ListDeploymentGroupRevisions", + "file": "config_v1_generated_config_list_deployment_group_revisions_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "config_v1_generated_Config_GetTerraformVersion_async", + "regionTag": "config_v1_generated_Config_ListDeploymentGroupRevisions_async", "segments": [ { - "end": 51, + "end": 52, "start": 27, "type": "FULL" }, { - "end": 51, + "end": 52, "start": 27, "type": "SHORT" }, @@ -2655,12 +3806,12 @@ "type": "REQUEST_EXECUTION" }, { - "end": 52, + "end": 53, "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "config_v1_generated_config_get_terraform_version_async.py" + "title": "config_v1_generated_config_list_deployment_group_revisions_async.py" }, { "canonical": true, @@ -2669,22 +3820,22 @@ "fullName": "google.cloud.config_v1.ConfigClient", "shortName": "ConfigClient" }, - "fullName": "google.cloud.config_v1.ConfigClient.get_terraform_version", + "fullName": "google.cloud.config_v1.ConfigClient.list_deployment_group_revisions", "method": { - "fullName": "google.cloud.config.v1.Config.GetTerraformVersion", + "fullName": "google.cloud.config.v1.Config.ListDeploymentGroupRevisions", "service": { "fullName": "google.cloud.config.v1.Config", "shortName": "Config" }, - "shortName": "GetTerraformVersion" + "shortName": "ListDeploymentGroupRevisions" }, "parameters": [ { "name": "request", - "type": "google.cloud.config_v1.types.GetTerraformVersionRequest" + "type": "google.cloud.config_v1.types.ListDeploymentGroupRevisionsRequest" }, { - "name": "name", + "name": "parent", "type": "str" }, { @@ -2700,22 +3851,22 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.config_v1.types.TerraformVersion", - "shortName": "get_terraform_version" + "resultType": "google.cloud.config_v1.services.config.pagers.ListDeploymentGroupRevisionsPager", + "shortName": "list_deployment_group_revisions" }, - "description": "Sample for GetTerraformVersion", - "file": "config_v1_generated_config_get_terraform_version_sync.py", + "description": "Sample for ListDeploymentGroupRevisions", + "file": "config_v1_generated_config_list_deployment_group_revisions_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "config_v1_generated_Config_GetTerraformVersion_sync", + "regionTag": "config_v1_generated_Config_ListDeploymentGroupRevisions_sync", "segments": [ { - "end": 51, + "end": 52, "start": 27, "type": "FULL" }, { - "end": 51, + "end": 52, "start": 27, "type": "SHORT" }, @@ -2735,12 +3886,12 @@ "type": "REQUEST_EXECUTION" }, { - "end": 52, + "end": 53, "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "config_v1_generated_config_get_terraform_version_sync.py" + "title": "config_v1_generated_config_list_deployment_group_revisions_sync.py" }, { "canonical": true, @@ -2750,28 +3901,24 @@ "fullName": "google.cloud.config_v1.ConfigAsyncClient", "shortName": "ConfigAsyncClient" }, - "fullName": "google.cloud.config_v1.ConfigAsyncClient.import_statefile", + "fullName": "google.cloud.config_v1.ConfigAsyncClient.list_deployment_groups", "method": { - "fullName": "google.cloud.config.v1.Config.ImportStatefile", + "fullName": "google.cloud.config.v1.Config.ListDeploymentGroups", "service": { "fullName": "google.cloud.config.v1.Config", "shortName": "Config" }, - "shortName": "ImportStatefile" + "shortName": "ListDeploymentGroups" }, "parameters": [ { "name": "request", - "type": "google.cloud.config_v1.types.ImportStatefileRequest" + "type": "google.cloud.config_v1.types.ListDeploymentGroupsRequest" }, { "name": "parent", "type": "str" }, - { - "name": "lock_id", - "type": "int" - }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -2785,14 +3932,14 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.config_v1.types.Statefile", - "shortName": "import_statefile" + "resultType": "google.cloud.config_v1.services.config.pagers.ListDeploymentGroupsAsyncPager", + "shortName": "list_deployment_groups" }, - "description": "Sample for ImportStatefile", - "file": "config_v1_generated_config_import_statefile_async.py", + "description": "Sample for ListDeploymentGroups", + "file": "config_v1_generated_config_list_deployment_groups_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "config_v1_generated_Config_ImportStatefile_async", + "regionTag": "config_v1_generated_Config_ListDeploymentGroups_async", "segments": [ { "end": 52, @@ -2810,22 +3957,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 46, + "end": 45, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 49, - "start": 47, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { "end": 53, - "start": 50, + "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "config_v1_generated_config_import_statefile_async.py" + "title": "config_v1_generated_config_list_deployment_groups_async.py" }, { "canonical": true, @@ -2834,28 +3981,24 @@ "fullName": "google.cloud.config_v1.ConfigClient", "shortName": "ConfigClient" }, - "fullName": "google.cloud.config_v1.ConfigClient.import_statefile", + "fullName": "google.cloud.config_v1.ConfigClient.list_deployment_groups", "method": { - "fullName": "google.cloud.config.v1.Config.ImportStatefile", + "fullName": "google.cloud.config.v1.Config.ListDeploymentGroups", "service": { "fullName": "google.cloud.config.v1.Config", "shortName": "Config" }, - "shortName": "ImportStatefile" + "shortName": "ListDeploymentGroups" }, "parameters": [ { "name": "request", - "type": "google.cloud.config_v1.types.ImportStatefileRequest" + "type": "google.cloud.config_v1.types.ListDeploymentGroupsRequest" }, { "name": "parent", "type": "str" }, - { - "name": "lock_id", - "type": "int" - }, { "name": "retry", "type": "google.api_core.retry.Retry" @@ -2869,14 +4012,14 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.config_v1.types.Statefile", - "shortName": "import_statefile" + "resultType": "google.cloud.config_v1.services.config.pagers.ListDeploymentGroupsPager", + "shortName": "list_deployment_groups" }, - "description": "Sample for ImportStatefile", - "file": "config_v1_generated_config_import_statefile_sync.py", + "description": "Sample for ListDeploymentGroups", + "file": "config_v1_generated_config_list_deployment_groups_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "config_v1_generated_Config_ImportStatefile_sync", + "regionTag": "config_v1_generated_Config_ListDeploymentGroups_sync", "segments": [ { "end": 52, @@ -2894,22 +4037,22 @@ "type": "CLIENT_INITIALIZATION" }, { - "end": 46, + "end": 45, "start": 41, "type": "REQUEST_INITIALIZATION" }, { - "end": 49, - "start": 47, + "end": 48, + "start": 46, "type": "REQUEST_EXECUTION" }, { "end": 53, - "start": 50, + "start": 49, "type": "RESPONSE_HANDLING" } ], - "title": "config_v1_generated_config_import_statefile_sync.py" + "title": "config_v1_generated_config_list_deployment_groups_sync.py" }, { "canonical": true, @@ -3762,7 +4905,168 @@ "file": "config_v1_generated_config_list_revisions_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "config_v1_generated_Config_ListRevisions_async", + "regionTag": "config_v1_generated_Config_ListRevisions_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "config_v1_generated_config_list_revisions_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.config_v1.ConfigClient", + "shortName": "ConfigClient" + }, + "fullName": "google.cloud.config_v1.ConfigClient.list_revisions", + "method": { + "fullName": "google.cloud.config.v1.Config.ListRevisions", + "service": { + "fullName": "google.cloud.config.v1.Config", + "shortName": "Config" + }, + "shortName": "ListRevisions" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.config_v1.types.ListRevisionsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.config_v1.services.config.pagers.ListRevisionsPager", + "shortName": "list_revisions" + }, + "description": "Sample for ListRevisions", + "file": "config_v1_generated_config_list_revisions_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "config_v1_generated_Config_ListRevisions_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "config_v1_generated_config_list_revisions_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.config_v1.ConfigAsyncClient", + "shortName": "ConfigAsyncClient" + }, + "fullName": "google.cloud.config_v1.ConfigAsyncClient.list_terraform_versions", + "method": { + "fullName": "google.cloud.config.v1.Config.ListTerraformVersions", + "service": { + "fullName": "google.cloud.config.v1.Config", + "shortName": "Config" + }, + "shortName": "ListTerraformVersions" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.config_v1.types.ListTerraformVersionsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.config_v1.services.config.pagers.ListTerraformVersionsAsyncPager", + "shortName": "list_terraform_versions" + }, + "description": "Sample for ListTerraformVersions", + "file": "config_v1_generated_config_list_terraform_versions_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "config_v1_generated_Config_ListTerraformVersions_async", "segments": [ { "end": 52, @@ -3795,7 +5099,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "config_v1_generated_config_list_revisions_async.py" + "title": "config_v1_generated_config_list_terraform_versions_async.py" }, { "canonical": true, @@ -3804,19 +5108,19 @@ "fullName": "google.cloud.config_v1.ConfigClient", "shortName": "ConfigClient" }, - "fullName": "google.cloud.config_v1.ConfigClient.list_revisions", + "fullName": "google.cloud.config_v1.ConfigClient.list_terraform_versions", "method": { - "fullName": "google.cloud.config.v1.Config.ListRevisions", + "fullName": "google.cloud.config.v1.Config.ListTerraformVersions", "service": { "fullName": "google.cloud.config.v1.Config", "shortName": "Config" }, - "shortName": "ListRevisions" + "shortName": "ListTerraformVersions" }, "parameters": [ { "name": "request", - "type": "google.cloud.config_v1.types.ListRevisionsRequest" + "type": "google.cloud.config_v1.types.ListTerraformVersionsRequest" }, { "name": "parent", @@ -3835,14 +5139,14 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.config_v1.services.config.pagers.ListRevisionsPager", - "shortName": "list_revisions" + "resultType": "google.cloud.config_v1.services.config.pagers.ListTerraformVersionsPager", + "shortName": "list_terraform_versions" }, - "description": "Sample for ListRevisions", - "file": "config_v1_generated_config_list_revisions_sync.py", + "description": "Sample for ListTerraformVersions", + "file": "config_v1_generated_config_list_terraform_versions_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "config_v1_generated_Config_ListRevisions_sync", + "regionTag": "config_v1_generated_Config_ListTerraformVersions_sync", "segments": [ { "end": 52, @@ -3875,7 +5179,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "config_v1_generated_config_list_revisions_sync.py" + "title": "config_v1_generated_config_list_terraform_versions_sync.py" }, { "canonical": true, @@ -3885,22 +5189,22 @@ "fullName": "google.cloud.config_v1.ConfigAsyncClient", "shortName": "ConfigAsyncClient" }, - "fullName": "google.cloud.config_v1.ConfigAsyncClient.list_terraform_versions", + "fullName": "google.cloud.config_v1.ConfigAsyncClient.lock_deployment", "method": { - "fullName": "google.cloud.config.v1.Config.ListTerraformVersions", + "fullName": "google.cloud.config.v1.Config.LockDeployment", "service": { "fullName": "google.cloud.config.v1.Config", "shortName": "Config" }, - "shortName": "ListTerraformVersions" + "shortName": "LockDeployment" }, "parameters": [ { "name": "request", - "type": "google.cloud.config_v1.types.ListTerraformVersionsRequest" + "type": "google.cloud.config_v1.types.LockDeploymentRequest" }, { - "name": "parent", + "name": "name", "type": "str" }, { @@ -3916,22 +5220,22 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.config_v1.services.config.pagers.ListTerraformVersionsAsyncPager", - "shortName": "list_terraform_versions" + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "lock_deployment" }, - "description": "Sample for ListTerraformVersions", - "file": "config_v1_generated_config_list_terraform_versions_async.py", + "description": "Sample for LockDeployment", + "file": "config_v1_generated_config_lock_deployment_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "config_v1_generated_Config_ListTerraformVersions_async", + "regionTag": "config_v1_generated_Config_LockDeployment_async", "segments": [ { - "end": 52, + "end": 55, "start": 27, "type": "FULL" }, { - "end": 52, + "end": 55, "start": 27, "type": "SHORT" }, @@ -3946,17 +5250,17 @@ "type": "REQUEST_INITIALIZATION" }, { - "end": 48, + "end": 52, "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 53, - "start": 49, + "end": 56, + "start": 53, "type": "RESPONSE_HANDLING" } ], - "title": "config_v1_generated_config_list_terraform_versions_async.py" + "title": "config_v1_generated_config_lock_deployment_async.py" }, { "canonical": true, @@ -3965,22 +5269,22 @@ "fullName": "google.cloud.config_v1.ConfigClient", "shortName": "ConfigClient" }, - "fullName": "google.cloud.config_v1.ConfigClient.list_terraform_versions", + "fullName": "google.cloud.config_v1.ConfigClient.lock_deployment", "method": { - "fullName": "google.cloud.config.v1.Config.ListTerraformVersions", + "fullName": "google.cloud.config.v1.Config.LockDeployment", "service": { "fullName": "google.cloud.config.v1.Config", "shortName": "Config" }, - "shortName": "ListTerraformVersions" + "shortName": "LockDeployment" }, "parameters": [ { "name": "request", - "type": "google.cloud.config_v1.types.ListTerraformVersionsRequest" + "type": "google.cloud.config_v1.types.LockDeploymentRequest" }, { - "name": "parent", + "name": "name", "type": "str" }, { @@ -3996,22 +5300,22 @@ "type": "Sequence[Tuple[str, Union[str, bytes]]]" } ], - "resultType": "google.cloud.config_v1.services.config.pagers.ListTerraformVersionsPager", - "shortName": "list_terraform_versions" + "resultType": "google.api_core.operation.Operation", + "shortName": "lock_deployment" }, - "description": "Sample for ListTerraformVersions", - "file": "config_v1_generated_config_list_terraform_versions_sync.py", + "description": "Sample for LockDeployment", + "file": "config_v1_generated_config_lock_deployment_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "config_v1_generated_Config_ListTerraformVersions_sync", + "regionTag": "config_v1_generated_Config_LockDeployment_sync", "segments": [ { - "end": 52, + "end": 55, "start": 27, "type": "FULL" }, { - "end": 52, + "end": 55, "start": 27, "type": "SHORT" }, @@ -4026,17 +5330,17 @@ "type": "REQUEST_INITIALIZATION" }, { - "end": 48, + "end": 52, "start": 46, "type": "REQUEST_EXECUTION" }, { - "end": 53, - "start": 49, + "end": 56, + "start": 53, "type": "RESPONSE_HANDLING" } ], - "title": "config_v1_generated_config_list_terraform_versions_sync.py" + "title": "config_v1_generated_config_lock_deployment_sync.py" }, { "canonical": true, @@ -4046,19 +5350,19 @@ "fullName": "google.cloud.config_v1.ConfigAsyncClient", "shortName": "ConfigAsyncClient" }, - "fullName": "google.cloud.config_v1.ConfigAsyncClient.lock_deployment", + "fullName": "google.cloud.config_v1.ConfigAsyncClient.provision_deployment_group", "method": { - "fullName": "google.cloud.config.v1.Config.LockDeployment", + "fullName": "google.cloud.config.v1.Config.ProvisionDeploymentGroup", "service": { "fullName": "google.cloud.config.v1.Config", "shortName": "Config" }, - "shortName": "LockDeployment" + "shortName": "ProvisionDeploymentGroup" }, "parameters": [ { "name": "request", - "type": "google.cloud.config_v1.types.LockDeploymentRequest" + "type": "google.cloud.config_v1.types.ProvisionDeploymentGroupRequest" }, { "name": "name", @@ -4078,13 +5382,13 @@ } ], "resultType": "google.api_core.operation_async.AsyncOperation", - "shortName": "lock_deployment" + "shortName": "provision_deployment_group" }, - "description": "Sample for LockDeployment", - "file": "config_v1_generated_config_lock_deployment_async.py", + "description": "Sample for ProvisionDeploymentGroup", + "file": "config_v1_generated_config_provision_deployment_group_async.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "config_v1_generated_Config_LockDeployment_async", + "regionTag": "config_v1_generated_Config_ProvisionDeploymentGroup_async", "segments": [ { "end": 55, @@ -4117,7 +5421,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "config_v1_generated_config_lock_deployment_async.py" + "title": "config_v1_generated_config_provision_deployment_group_async.py" }, { "canonical": true, @@ -4126,19 +5430,19 @@ "fullName": "google.cloud.config_v1.ConfigClient", "shortName": "ConfigClient" }, - "fullName": "google.cloud.config_v1.ConfigClient.lock_deployment", + "fullName": "google.cloud.config_v1.ConfigClient.provision_deployment_group", "method": { - "fullName": "google.cloud.config.v1.Config.LockDeployment", + "fullName": "google.cloud.config.v1.Config.ProvisionDeploymentGroup", "service": { "fullName": "google.cloud.config.v1.Config", "shortName": "Config" }, - "shortName": "LockDeployment" + "shortName": "ProvisionDeploymentGroup" }, "parameters": [ { "name": "request", - "type": "google.cloud.config_v1.types.LockDeploymentRequest" + "type": "google.cloud.config_v1.types.ProvisionDeploymentGroupRequest" }, { "name": "name", @@ -4158,13 +5462,13 @@ } ], "resultType": "google.api_core.operation.Operation", - "shortName": "lock_deployment" + "shortName": "provision_deployment_group" }, - "description": "Sample for LockDeployment", - "file": "config_v1_generated_config_lock_deployment_sync.py", + "description": "Sample for ProvisionDeploymentGroup", + "file": "config_v1_generated_config_provision_deployment_group_sync.py", "language": "PYTHON", "origin": "API_DEFINITION", - "regionTag": "config_v1_generated_Config_LockDeployment_sync", + "regionTag": "config_v1_generated_Config_ProvisionDeploymentGroup_sync", "segments": [ { "end": 55, @@ -4197,7 +5501,7 @@ "type": "RESPONSE_HANDLING" } ], - "title": "config_v1_generated_config_lock_deployment_sync.py" + "title": "config_v1_generated_config_provision_deployment_group_sync.py" }, { "canonical": true, @@ -4537,6 +5841,175 @@ ], "title": "config_v1_generated_config_update_auto_migration_config_sync.py" }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.config_v1.ConfigAsyncClient", + "shortName": "ConfigAsyncClient" + }, + "fullName": "google.cloud.config_v1.ConfigAsyncClient.update_deployment_group", + "method": { + "fullName": "google.cloud.config.v1.Config.UpdateDeploymentGroup", + "service": { + "fullName": "google.cloud.config.v1.Config", + "shortName": "Config" + }, + "shortName": "UpdateDeploymentGroup" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.config_v1.types.UpdateDeploymentGroupRequest" + }, + { + "name": "deployment_group", + "type": "google.cloud.config_v1.types.DeploymentGroup" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "update_deployment_group" + }, + "description": "Sample for UpdateDeploymentGroup", + "file": "config_v1_generated_config_update_deployment_group_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "config_v1_generated_Config_UpdateDeploymentGroup_async", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 51, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 52, + "type": "RESPONSE_HANDLING" + } + ], + "title": "config_v1_generated_config_update_deployment_group_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.config_v1.ConfigClient", + "shortName": "ConfigClient" + }, + "fullName": "google.cloud.config_v1.ConfigClient.update_deployment_group", + "method": { + "fullName": "google.cloud.config.v1.Config.UpdateDeploymentGroup", + "service": { + "fullName": "google.cloud.config.v1.Config", + "shortName": "Config" + }, + "shortName": "UpdateDeploymentGroup" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.config_v1.types.UpdateDeploymentGroupRequest" + }, + { + "name": "deployment_group", + "type": "google.cloud.config_v1.types.DeploymentGroup" + }, + { + "name": "update_mask", + "type": "google.protobuf.field_mask_pb2.FieldMask" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "update_deployment_group" + }, + "description": "Sample for UpdateDeploymentGroup", + "file": "config_v1_generated_config_update_deployment_group_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "config_v1_generated_Config_UpdateDeploymentGroup_sync", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 51, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 52, + "type": "RESPONSE_HANDLING" + } + ], + "title": "config_v1_generated_config_update_deployment_group_sync.py" + }, { "canonical": true, "clientMethod": { diff --git a/packages/google-cloud-config/tests/unit/gapic/config_v1/test_config.py b/packages/google-cloud-config/tests/unit/gapic/config_v1/test_config.py index 0130d81d8f67..c08376af2c14 100644 --- a/packages/google-cloud-config/tests/unit/gapic/config_v1/test_config.py +++ b/packages/google-cloud-config/tests/unit/gapic/config_v1/test_config.py @@ -12424,13 +12424,95 @@ async def test_update_auto_migration_config_flattened_error_async(): ) -def test_list_deployments_rest_use_cached_wrapped_rpc(): +@pytest.mark.parametrize( + "request_type", + [ + config.GetDeploymentGroupRequest, + dict, + ], +) +def test_get_deployment_group(request_type, transport: str = "grpc"): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_deployment_group), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = config.DeploymentGroup( + name="name_value", + state=config.DeploymentGroup.State.CREATING, + state_description="state_description_value", + provisioning_state=config.DeploymentGroup.ProvisioningState.PROVISIONING, + provisioning_state_description="provisioning_state_description_value", + ) + response = client.get_deployment_group(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = config.GetDeploymentGroupRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, config.DeploymentGroup) + assert response.name == "name_value" + assert response.state == config.DeploymentGroup.State.CREATING + assert response.state_description == "state_description_value" + assert ( + response.provisioning_state + == config.DeploymentGroup.ProvisioningState.PROVISIONING + ) + assert ( + response.provisioning_state_description + == "provisioning_state_description_value" + ) + + +def test_get_deployment_group_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = config.GetDeploymentGroupRequest( + name="name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_deployment_group), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.get_deployment_group(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == config.GetDeploymentGroupRequest( + name="name_value", + ) + + +def test_get_deployment_group_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", + transport="grpc", ) # Should wrap all calls on client creation @@ -12438,259 +12520,353 @@ def test_list_deployments_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.list_deployments in client._transport._wrapped_methods + assert ( + client._transport.get_deployment_group in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.list_deployments] = ( + client._transport._wrapped_methods[client._transport.get_deployment_group] = ( mock_rpc ) - request = {} - client.list_deployments(request) + client.get_deployment_group(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.list_deployments(request) + client.get_deployment_group(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_list_deployments_rest_required_fields( - request_type=config.ListDeploymentsRequest, +@pytest.mark.asyncio +async def test_get_deployment_group_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", ): - transport_class = transports.ConfigRestTransport + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = ConfigAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) - ) + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() - # verify fields with default values are dropped + # Ensure method has been cached + assert ( + client._client._transport.get_deployment_group + in client._client._transport._wrapped_methods + ) - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).list_deployments._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.get_deployment_group + ] = mock_rpc - # verify required fields with default values are now present + request = {} + await client.get_deployment_group(request) - jsonified_request["parent"] = "parent_value" + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).list_deployments._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set( - ( - "filter", - "order_by", - "page_size", - "page_token", + await client.get_deployment_group(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_get_deployment_group_async( + transport: str = "grpc_asyncio", request_type=config.GetDeploymentGroupRequest +): + client = ConfigAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_deployment_group), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + config.DeploymentGroup( + name="name_value", + state=config.DeploymentGroup.State.CREATING, + state_description="state_description_value", + provisioning_state=config.DeploymentGroup.ProvisioningState.PROVISIONING, + provisioning_state_description="provisioning_state_description_value", + ) ) + response = await client.get_deployment_group(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = config.GetDeploymentGroupRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, config.DeploymentGroup) + assert response.name == "name_value" + assert response.state == config.DeploymentGroup.State.CREATING + assert response.state_description == "state_description_value" + assert ( + response.provisioning_state + == config.DeploymentGroup.ProvisioningState.PROVISIONING + ) + assert ( + response.provisioning_state_description + == "provisioning_state_description_value" ) - jsonified_request.update(unset_fields) - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" +@pytest.mark.asyncio +async def test_get_deployment_group_async_from_dict(): + await test_get_deployment_group_async(request_type=dict) + + +def test_get_deployment_group_field_headers(): client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = config.ListDeploymentsResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "get", - "query_params": pb_request, - } - transcode.return_value = transcode_result - response_value = Response() - response_value.status_code = 200 + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = config.GetDeploymentGroupRequest() - # Convert return value to protobuf type - return_value = config.ListDeploymentsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) + request.name = "name_value" - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_deployment_group), "__call__" + ) as call: + call.return_value = config.DeploymentGroup() + client.get_deployment_group(request) - response = client.list_deployments(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] -def test_list_deployments_rest_unset_required_fields(): - transport = transports.ConfigRestTransport( - credentials=ga_credentials.AnonymousCredentials +@pytest.mark.asyncio +async def test_get_deployment_group_field_headers_async(): + client = ConfigAsyncClient( + credentials=async_anonymous_credentials(), ) - unset_fields = transport.list_deployments._get_unset_required_fields({}) - assert set(unset_fields) == ( - set( - ( - "filter", - "orderBy", - "pageSize", - "pageToken", - ) + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = config.GetDeploymentGroupRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_deployment_group), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + config.DeploymentGroup() ) - & set(("parent",)) - ) + await client.get_deployment_group(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] -def test_list_deployments_rest_flattened(): +def test_get_deployment_group_flattened(): client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", ) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = config.ListDeploymentsResponse() - - # get arguments that satisfy an http rule for this method - sample_request = {"parent": "projects/sample1/locations/sample2"} + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_deployment_group), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = config.DeploymentGroup() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_deployment_group( + name="name_value", + ) - # get truthy value for each flattened field - mock_args = dict( - parent="parent_value", + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_get_deployment_group_flattened_error(): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_deployment_group( + config.GetDeploymentGroupRequest(), + name="name_value", ) - mock_args.update(sample_request) - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = config.ListDeploymentsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.list_deployments(**mock_args) +@pytest.mark.asyncio +async def test_get_deployment_group_flattened_async(): + client = ConfigAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_deployment_group), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = config.DeploymentGroup() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + config.DeploymentGroup() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_deployment_group( + name="name_value", + ) # Establish that the underlying call was made with the expected # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{parent=projects/*/locations/*}/deployments" - % client.transport._host, - args[1], - ) + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val -def test_list_deployments_rest_flattened_error(transport: str = "rest"): - client = ConfigClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, +@pytest.mark.asyncio +async def test_get_deployment_group_flattened_error_async(): + client = ConfigAsyncClient( + credentials=async_anonymous_credentials(), ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.list_deployments( - config.ListDeploymentsRequest(), - parent="parent_value", + await client.get_deployment_group( + config.GetDeploymentGroupRequest(), + name="name_value", ) -def test_list_deployments_rest_pager(transport: str = "rest"): +@pytest.mark.parametrize( + "request_type", + [ + config.CreateDeploymentGroupRequest, + dict, + ], +) +def test_create_deployment_group(request_type, transport: str = "grpc"): client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - # with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - config.ListDeploymentsResponse( - deployments=[ - config.Deployment(), - config.Deployment(), - config.Deployment(), - ], - next_page_token="abc", - ), - config.ListDeploymentsResponse( - deployments=[], - next_page_token="def", - ), - config.ListDeploymentsResponse( - deployments=[ - config.Deployment(), - ], - next_page_token="ghi", - ), - config.ListDeploymentsResponse( - deployments=[ - config.Deployment(), - config.Deployment(), - ], - ), - ) - # Two responses for two calls - response = response + response + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() - # Wrap the values into proper Response objs - response = tuple(config.ListDeploymentsResponse.to_json(x) for x in response) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode("UTF-8") - return_val.status_code = 200 - req.side_effect = return_values + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_deployment_group), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.create_deployment_group(request) - sample_request = {"parent": "projects/sample1/locations/sample2"} + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = config.CreateDeploymentGroupRequest() + assert args[0] == request - pager = client.list_deployments(request=sample_request) + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, config.Deployment) for i in results) - pages = list(client.list_deployments(request=sample_request).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token +def test_create_deployment_group_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = config.CreateDeploymentGroupRequest( + parent="parent_value", + deployment_group_id="deployment_group_id_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_deployment_group), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.create_deployment_group(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == config.CreateDeploymentGroupRequest( + parent="parent_value", + deployment_group_id="deployment_group_id_value", + ) -def test_get_deployment_rest_use_cached_wrapped_rpc(): +def test_create_deployment_group_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", + transport="grpc", ) # Should wrap all calls on client creation @@ -12698,177 +12874,361 @@ def test_get_deployment_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.get_deployment in client._transport._wrapped_methods + assert ( + client._transport.create_deployment_group + in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.get_deployment] = mock_rpc - + client._transport._wrapped_methods[ + client._transport.create_deployment_group + ] = mock_rpc request = {} - client.get_deployment(request) + client.create_deployment_group(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.get_deployment(request) + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_deployment_group(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_get_deployment_rest_required_fields(request_type=config.GetDeploymentRequest): - transport_class = transports.ConfigRestTransport +@pytest.mark.asyncio +async def test_create_deployment_group_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = ConfigAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) - ) + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() - # verify fields with default values are dropped + # Ensure method has been cached + assert ( + client._client._transport.create_deployment_group + in client._client._transport._wrapped_methods + ) - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).get_deployment._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.create_deployment_group + ] = mock_rpc - # verify required fields with default values are now present + request = {} + await client.create_deployment_group(request) - jsonified_request["name"] = "name_value" + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).get_deployment._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" + await client.create_deployment_group(request) - client = ConfigClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_create_deployment_group_async( + transport: str = "grpc_asyncio", request_type=config.CreateDeploymentGroupRequest +): + client = ConfigAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, ) - request = request_type(**request_init) - # Designate an appropriate value for the returned response. - return_value = config.Deployment() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "get", - "query_params": pb_request, - } - transcode.return_value = transcode_result + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() - response_value = Response() - response_value.status_code = 200 + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_deployment_group), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.create_deployment_group(request) - # Convert return value to protobuf type - return_value = config.Deployment.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = config.CreateDeploymentGroupRequest() + assert args[0] == request - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) - response = client.get_deployment(request) - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params +@pytest.mark.asyncio +async def test_create_deployment_group_async_from_dict(): + await test_create_deployment_group_async(request_type=dict) -def test_get_deployment_rest_unset_required_fields(): - transport = transports.ConfigRestTransport( - credentials=ga_credentials.AnonymousCredentials +def test_create_deployment_group_field_headers(): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), ) - unset_fields = transport.get_deployment._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = config.CreateDeploymentGroupRequest() + + request.parent = "parent_value" + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_deployment_group), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.create_deployment_group(request) -def test_get_deployment_rest_flattened(): + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_create_deployment_group_field_headers_async(): + client = ConfigAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = config.CreateDeploymentGroupRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_deployment_group), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.create_deployment_group(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_create_deployment_group_flattened(): client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", ) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = config.Deployment() + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_deployment_group), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_deployment_group( + parent="parent_value", + deployment_group=config.DeploymentGroup(name="name_value"), + deployment_group_id="deployment_group_id_value", + ) - # get arguments that satisfy an http rule for this method - sample_request = { - "name": "projects/sample1/locations/sample2/deployments/sample3" - } + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].deployment_group + mock_val = config.DeploymentGroup(name="name_value") + assert arg == mock_val + arg = args[0].deployment_group_id + mock_val = "deployment_group_id_value" + assert arg == mock_val - # get truthy value for each flattened field - mock_args = dict( - name="name_value", + +def test_create_deployment_group_flattened_error(): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_deployment_group( + config.CreateDeploymentGroupRequest(), + parent="parent_value", + deployment_group=config.DeploymentGroup(name="name_value"), + deployment_group_id="deployment_group_id_value", ) - mock_args.update(sample_request) - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = config.Deployment.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.get_deployment(**mock_args) +@pytest.mark.asyncio +async def test_create_deployment_group_flattened_async(): + client = ConfigAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_deployment_group), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_deployment_group( + parent="parent_value", + deployment_group=config.DeploymentGroup(name="name_value"), + deployment_group_id="deployment_group_id_value", + ) # Establish that the underlying call was made with the expected # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/deployments/*}" - % client.transport._host, - args[1], + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].deployment_group + mock_val = config.DeploymentGroup(name="name_value") + assert arg == mock_val + arg = args[0].deployment_group_id + mock_val = "deployment_group_id_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_create_deployment_group_flattened_error_async(): + client = ConfigAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_deployment_group( + config.CreateDeploymentGroupRequest(), + parent="parent_value", + deployment_group=config.DeploymentGroup(name="name_value"), + deployment_group_id="deployment_group_id_value", ) -def test_get_deployment_rest_flattened_error(transport: str = "rest"): +@pytest.mark.parametrize( + "request_type", + [ + config.UpdateDeploymentGroupRequest, + dict, + ], +) +def test_update_deployment_group(request_type, transport: str = "grpc"): client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_deployment( - config.GetDeploymentRequest(), - name="name_value", + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_deployment_group), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.update_deployment_group(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = config.UpdateDeploymentGroupRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_update_deployment_group_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = config.UpdateDeploymentGroupRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_deployment_group), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. ) + client.update_deployment_group(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == config.UpdateDeploymentGroupRequest() -def test_create_deployment_rest_use_cached_wrapped_rpc(): +def test_update_deployment_group_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", + transport="grpc", ) # Should wrap all calls on client creation @@ -12876,225 +13236,355 @@ def test_create_deployment_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.create_deployment in client._transport._wrapped_methods + assert ( + client._transport.update_deployment_group + in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.create_deployment] = ( - mock_rpc + client._transport._wrapped_methods[ + client._transport.update_deployment_group + ] = mock_rpc + request = {} + client.update_deployment_group(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.update_deployment_group(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_update_deployment_group_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = ConfigAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.update_deployment_group + in client._client._transport._wrapped_methods ) + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.update_deployment_group + ] = mock_rpc + request = {} - client.create_deployment(request) + await client.update_deployment_group(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() - client.create_deployment(request) + await client.update_deployment_group(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_create_deployment_rest_required_fields( - request_type=config.CreateDeploymentRequest, +@pytest.mark.asyncio +async def test_update_deployment_group_async( + transport: str = "grpc_asyncio", request_type=config.UpdateDeploymentGroupRequest ): - transport_class = transports.ConfigRestTransport - - request_init = {} - request_init["parent"] = "" - request_init["deployment_id"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) + client = ConfigAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, ) - # verify fields with default values are dropped - assert "deploymentId" not in jsonified_request + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).create_deployment._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_deployment_group), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.update_deployment_group(request) - # verify required fields with default values are now present - assert "deploymentId" in jsonified_request - assert jsonified_request["deploymentId"] == request_init["deployment_id"] + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = config.UpdateDeploymentGroupRequest() + assert args[0] == request - jsonified_request["parent"] = "parent_value" - jsonified_request["deploymentId"] = "deployment_id_value" + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).create_deployment._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set( - ( - "deployment_id", - "request_id", - ) - ) - jsonified_request.update(unset_fields) - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" - assert "deploymentId" in jsonified_request - assert jsonified_request["deploymentId"] == "deployment_id_value" +@pytest.mark.asyncio +async def test_update_deployment_group_async_from_dict(): + await test_update_deployment_group_async(request_type=dict) + +def test_update_deployment_group_field_headers(): client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", ) - request = request_type(**request_init) - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "post", - "query_params": pb_request, - } - transcode_result["body"] = pb_request - transcode.return_value = transcode_result + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = config.UpdateDeploymentGroupRequest() - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) + request.deployment_group.name = "name_value" - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_deployment_group), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.update_deployment_group(request) - response = client.create_deployment(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request - expected_params = [ - ( - "deploymentId", - "", - ), - ("$alt", "json;enum-encoding=int"), - ] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "deployment_group.name=name_value", + ) in kw["metadata"] -def test_create_deployment_rest_unset_required_fields(): - transport = transports.ConfigRestTransport( - credentials=ga_credentials.AnonymousCredentials +@pytest.mark.asyncio +async def test_update_deployment_group_field_headers_async(): + client = ConfigAsyncClient( + credentials=async_anonymous_credentials(), ) - unset_fields = transport.create_deployment._get_unset_required_fields({}) - assert set(unset_fields) == ( - set( - ( - "deploymentId", - "requestId", - ) - ) - & set( - ( - "parent", - "deploymentId", - "deployment", - ) + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = config.UpdateDeploymentGroupRequest() + + request.deployment_group.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_deployment_group), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") ) - ) + await client.update_deployment_group(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "deployment_group.name=name_value", + ) in kw["metadata"] -def test_create_deployment_rest_flattened(): +def test_update_deployment_group_flattened(): client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", ) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") - - # get arguments that satisfy an http rule for this method - sample_request = {"parent": "projects/sample1/locations/sample2"} - - # get truthy value for each flattened field - mock_args = dict( - parent="parent_value", - deployment=config.Deployment( - terraform_blueprint=config.TerraformBlueprint( - gcs_source="gcs_source_value" - ) - ), - deployment_id="deployment_id_value", + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_deployment_group), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.update_deployment_group( + deployment_group=config.DeploymentGroup(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.create_deployment(**mock_args) # Establish that the underlying call was made with the expected # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{parent=projects/*/locations/*}/deployments" - % client.transport._host, - args[1], - ) + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].deployment_group + mock_val = config.DeploymentGroup(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val -def test_create_deployment_rest_flattened_error(transport: str = "rest"): +def test_update_deployment_group_flattened_error(): client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.create_deployment( - config.CreateDeploymentRequest(), - parent="parent_value", - deployment=config.Deployment( - terraform_blueprint=config.TerraformBlueprint( - gcs_source="gcs_source_value" - ) - ), - deployment_id="deployment_id_value", + client.update_deployment_group( + config.UpdateDeploymentGroupRequest(), + deployment_group=config.DeploymentGroup(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) -def test_update_deployment_rest_use_cached_wrapped_rpc(): +@pytest.mark.asyncio +async def test_update_deployment_group_flattened_async(): + client = ConfigAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_deployment_group), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.update_deployment_group( + deployment_group=config.DeploymentGroup(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].deployment_group + mock_val = config.DeploymentGroup(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_update_deployment_group_flattened_error_async(): + client = ConfigAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.update_deployment_group( + config.UpdateDeploymentGroupRequest(), + deployment_group=config.DeploymentGroup(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +@pytest.mark.parametrize( + "request_type", + [ + config.DeleteDeploymentGroupRequest, + dict, + ], +) +def test_delete_deployment_group(request_type, transport: str = "grpc"): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_deployment_group), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.delete_deployment_group(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = config.DeleteDeploymentGroupRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_delete_deployment_group_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = config.DeleteDeploymentGroupRequest( + name="name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_deployment_group), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.delete_deployment_group(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == config.DeleteDeploymentGroupRequest( + name="name_value", + ) + + +def test_delete_deployment_group_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", + transport="grpc", ) # Should wrap all calls on client creation @@ -13102,203 +13592,356 @@ def test_update_deployment_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.update_deployment in client._transport._wrapped_methods + assert ( + client._transport.delete_deployment_group + in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.update_deployment] = ( - mock_rpc - ) - + client._transport._wrapped_methods[ + client._transport.delete_deployment_group + ] = mock_rpc request = {} - client.update_deployment(request) + client.delete_deployment_group(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper wrapper_fn.reset_mock() - client.update_deployment(request) + client.delete_deployment_group(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_update_deployment_rest_required_fields( - request_type=config.UpdateDeploymentRequest, +@pytest.mark.asyncio +async def test_delete_deployment_group_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", ): - transport_class = transports.ConfigRestTransport + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = ConfigAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) - request_init = {} - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) - ) + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() - # verify fields with default values are dropped + # Ensure method has been cached + assert ( + client._client._transport.delete_deployment_group + in client._client._transport._wrapped_methods + ) - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).update_deployment._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.delete_deployment_group + ] = mock_rpc - # verify required fields with default values are now present + request = {} + await client.delete_deployment_group(request) - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).update_deployment._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set( - ( - "request_id", - "update_mask", - ) - ) - jsonified_request.update(unset_fields) + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 - # verify required fields with non-default values are left alone + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() - client = ConfigClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type(**request_init) + await client.delete_deployment_group(request) - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "patch", - "query_params": pb_request, - } - transcode_result["body"] = pb_request - transcode.return_value = transcode_result + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} +@pytest.mark.asyncio +async def test_delete_deployment_group_async( + transport: str = "grpc_asyncio", request_type=config.DeleteDeploymentGroupRequest +): + client = ConfigAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) - response = client.update_deployment(request) + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_deployment_group), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.delete_deployment_group(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = config.DeleteDeploymentGroupRequest() + assert args[0] == request -def test_update_deployment_rest_unset_required_fields(): - transport = transports.ConfigRestTransport( - credentials=ga_credentials.AnonymousCredentials - ) + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) - unset_fields = transport.update_deployment._get_unset_required_fields({}) - assert set(unset_fields) == ( - set( - ( - "requestId", - "updateMask", - ) - ) - & set(("deployment",)) - ) + +@pytest.mark.asyncio +async def test_delete_deployment_group_async_from_dict(): + await test_delete_deployment_group_async(request_type=dict) -def test_update_deployment_rest_flattened(): +def test_delete_deployment_group_field_headers(): client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", ) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = config.DeleteDeploymentGroupRequest() - # get arguments that satisfy an http rule for this method - sample_request = { - "deployment": { - "name": "projects/sample1/locations/sample2/deployments/sample3" - } - } + request.name = "name_value" - # get truthy value for each flattened field - mock_args = dict( - deployment=config.Deployment( - terraform_blueprint=config.TerraformBlueprint( - gcs_source="gcs_source_value" - ) - ), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_deployment_group), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.delete_deployment_group(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_delete_deployment_group_field_headers_async(): + client = ConfigAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = config.DeleteDeploymentGroupRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_deployment_group), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") ) - mock_args.update(sample_request) + await client.delete_deployment_group(request) - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request - client.update_deployment(**mock_args) + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_delete_deployment_group_flattened(): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_deployment_group), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_deployment_group( + name="name_value", + ) # Establish that the underlying call was made with the expected # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{deployment.name=projects/*/locations/*/deployments/*}" - % client.transport._host, - args[1], - ) + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val -def test_update_deployment_rest_flattened_error(transport: str = "rest"): +def test_delete_deployment_group_flattened_error(): client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.update_deployment( - config.UpdateDeploymentRequest(), - deployment=config.Deployment( - terraform_blueprint=config.TerraformBlueprint( - gcs_source="gcs_source_value" - ) - ), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + client.delete_deployment_group( + config.DeleteDeploymentGroupRequest(), + name="name_value", ) -def test_delete_deployment_rest_use_cached_wrapped_rpc(): +@pytest.mark.asyncio +async def test_delete_deployment_group_flattened_async(): + client = ConfigAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_deployment_group), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_deployment_group( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_delete_deployment_group_flattened_error_async(): + client = ConfigAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_deployment_group( + config.DeleteDeploymentGroupRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + config.ListDeploymentGroupsRequest, + dict, + ], +) +def test_list_deployment_groups(request_type, transport: str = "grpc"): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_deployment_groups), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = config.ListDeploymentGroupsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + response = client.list_deployment_groups(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = config.ListDeploymentGroupsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListDeploymentGroupsPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +def test_list_deployment_groups_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = config.ListDeploymentGroupsRequest( + parent="parent_value", + page_token="page_token_value", + filter="filter_value", + order_by="order_by_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_deployment_groups), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.list_deployment_groups(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == config.ListDeploymentGroupsRequest( + parent="parent_value", + page_token="page_token_value", + filter="filter_value", + order_by="order_by_value", + ) + + +def test_list_deployment_groups_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", + transport="grpc", ) # Should wrap all calls on client creation @@ -13306,457 +13949,542 @@ def test_delete_deployment_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.delete_deployment in client._transport._wrapped_methods + assert ( + client._transport.list_deployment_groups + in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.delete_deployment] = ( + client._transport._wrapped_methods[client._transport.list_deployment_groups] = ( mock_rpc ) - request = {} - client.delete_deployment(request) + client.list_deployment_groups(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.delete_deployment(request) + client.list_deployment_groups(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_delete_deployment_rest_required_fields( - request_type=config.DeleteDeploymentRequest, +@pytest.mark.asyncio +async def test_list_deployment_groups_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", ): - transport_class = transports.ConfigRestTransport + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = ConfigAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) - ) + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() - # verify fields with default values are dropped + # Ensure method has been cached + assert ( + client._client._transport.list_deployment_groups + in client._client._transport._wrapped_methods + ) - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).delete_deployment._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.list_deployment_groups + ] = mock_rpc - # verify required fields with default values are now present + request = {} + await client.list_deployment_groups(request) - jsonified_request["name"] = "name_value" + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).delete_deployment._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set( - ( - "delete_policy", - "force", - "request_id", - ) + await client.list_deployment_groups(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_list_deployment_groups_async( + transport: str = "grpc_asyncio", request_type=config.ListDeploymentGroupsRequest +): + client = ConfigAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, ) - jsonified_request.update(unset_fields) - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_deployment_groups), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + config.ListDeploymentGroupsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + ) + response = await client.list_deployment_groups(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = config.ListDeploymentGroupsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListDeploymentGroupsAsyncPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +@pytest.mark.asyncio +async def test_list_deployment_groups_async_from_dict(): + await test_list_deployment_groups_async(request_type=dict) + +def test_list_deployment_groups_field_headers(): client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", ) - request = request_type(**request_init) - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "delete", - "query_params": pb_request, - } - transcode.return_value = transcode_result + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = config.ListDeploymentGroupsRequest() - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) + request.parent = "parent_value" - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_deployment_groups), "__call__" + ) as call: + call.return_value = config.ListDeploymentGroupsResponse() + client.list_deployment_groups(request) - response = client.delete_deployment(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] -def test_delete_deployment_rest_unset_required_fields(): - transport = transports.ConfigRestTransport( - credentials=ga_credentials.AnonymousCredentials +@pytest.mark.asyncio +async def test_list_deployment_groups_field_headers_async(): + client = ConfigAsyncClient( + credentials=async_anonymous_credentials(), ) - unset_fields = transport.delete_deployment._get_unset_required_fields({}) - assert set(unset_fields) == ( - set( - ( - "deletePolicy", - "force", - "requestId", - ) + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = config.ListDeploymentGroupsRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_deployment_groups), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + config.ListDeploymentGroupsResponse() ) - & set(("name",)) - ) + await client.list_deployment_groups(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request -def test_delete_deployment_rest_flattened(): + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_list_deployment_groups_flattened(): client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", ) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") - - # get arguments that satisfy an http rule for this method - sample_request = { - "name": "projects/sample1/locations/sample2/deployments/sample3" - } - - # get truthy value for each flattened field - mock_args = dict( - name="name_value", + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_deployment_groups), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = config.ListDeploymentGroupsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_deployment_groups( + parent="parent_value", ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.delete_deployment(**mock_args) # Establish that the underlying call was made with the expected # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/deployments/*}" - % client.transport._host, - args[1], - ) + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val -def test_delete_deployment_rest_flattened_error(transport: str = "rest"): +def test_list_deployment_groups_flattened_error(): client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.delete_deployment( - config.DeleteDeploymentRequest(), - name="name_value", + client.list_deployment_groups( + config.ListDeploymentGroupsRequest(), + parent="parent_value", ) -def test_list_revisions_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = ConfigClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() +@pytest.mark.asyncio +async def test_list_deployment_groups_flattened_async(): + client = ConfigAsyncClient( + credentials=async_anonymous_credentials(), + ) - # Ensure method has been cached - assert client._transport.list_revisions in client._transport._wrapped_methods + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_deployment_groups), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = config.ListDeploymentGroupsResponse() - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + config.ListDeploymentGroupsResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_deployment_groups( + parent="parent_value", ) - client._transport._wrapped_methods[client._transport.list_revisions] = mock_rpc - request = {} - client.list_revisions(request) + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - client.list_revisions(request) +@pytest.mark.asyncio +async def test_list_deployment_groups_flattened_error_async(): + client = ConfigAsyncClient( + credentials=async_anonymous_credentials(), + ) - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_deployment_groups( + config.ListDeploymentGroupsRequest(), + parent="parent_value", + ) -def test_list_revisions_rest_required_fields(request_type=config.ListRevisionsRequest): - transport_class = transports.ConfigRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) - ) - - # verify fields with default values are dropped - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).list_revisions._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = "parent_value" - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).list_revisions._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set( - ( - "filter", - "order_by", - "page_size", - "page_token", - ) - ) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" - +def test_list_deployment_groups_pager(transport_name: str = "grpc"): client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = config.ListRevisionsResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "get", - "query_params": pb_request, - } - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = config.ListRevisionsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.list_revisions(request) - - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params - - -def test_list_revisions_rest_unset_required_fields(): - transport = transports.ConfigRestTransport( - credentials=ga_credentials.AnonymousCredentials + transport=transport_name, ) - unset_fields = transport.list_revisions._get_unset_required_fields({}) - assert set(unset_fields) == ( - set( - ( - "filter", - "orderBy", - "pageSize", - "pageToken", - ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_deployment_groups), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + config.ListDeploymentGroupsResponse( + deployment_groups=[ + config.DeploymentGroup(), + config.DeploymentGroup(), + config.DeploymentGroup(), + ], + next_page_token="abc", + ), + config.ListDeploymentGroupsResponse( + deployment_groups=[], + next_page_token="def", + ), + config.ListDeploymentGroupsResponse( + deployment_groups=[ + config.DeploymentGroup(), + ], + next_page_token="ghi", + ), + config.ListDeploymentGroupsResponse( + deployment_groups=[ + config.DeploymentGroup(), + config.DeploymentGroup(), + ], + ), + RuntimeError, ) - & set(("parent",)) - ) - - -def test_list_revisions_rest_flattened(): - client = ConfigClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = config.ListRevisionsResponse() - - # get arguments that satisfy an http rule for this method - sample_request = { - "parent": "projects/sample1/locations/sample2/deployments/sample3" - } - # get truthy value for each flattened field - mock_args = dict( - parent="parent_value", + expected_metadata = () + retry = retries.Retry() + timeout = 5 + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = config.ListRevisionsResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + pager = client.list_deployment_groups(request={}, retry=retry, timeout=timeout) - client.list_revisions(**mock_args) + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{parent=projects/*/locations/*/deployments/*}/revisions" - % client.transport._host, - args[1], - ) + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, config.DeploymentGroup) for i in results) -def test_list_revisions_rest_flattened_error(transport: str = "rest"): +def test_list_deployment_groups_pages(transport_name: str = "grpc"): client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + transport=transport_name, ) - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_revisions( - config.ListRevisionsRequest(), - parent="parent_value", + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_deployment_groups), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + config.ListDeploymentGroupsResponse( + deployment_groups=[ + config.DeploymentGroup(), + config.DeploymentGroup(), + config.DeploymentGroup(), + ], + next_page_token="abc", + ), + config.ListDeploymentGroupsResponse( + deployment_groups=[], + next_page_token="def", + ), + config.ListDeploymentGroupsResponse( + deployment_groups=[ + config.DeploymentGroup(), + ], + next_page_token="ghi", + ), + config.ListDeploymentGroupsResponse( + deployment_groups=[ + config.DeploymentGroup(), + config.DeploymentGroup(), + ], + ), + RuntimeError, ) + pages = list(client.list_deployment_groups(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token -def test_list_revisions_rest_pager(transport: str = "rest"): - client = ConfigClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, +@pytest.mark.asyncio +async def test_list_deployment_groups_async_pager(): + client = ConfigAsyncClient( + credentials=async_anonymous_credentials(), ) - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - # with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - config.ListRevisionsResponse( - revisions=[ - config.Revision(), - config.Revision(), - config.Revision(), + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_deployment_groups), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + config.ListDeploymentGroupsResponse( + deployment_groups=[ + config.DeploymentGroup(), + config.DeploymentGroup(), + config.DeploymentGroup(), ], next_page_token="abc", ), - config.ListRevisionsResponse( - revisions=[], + config.ListDeploymentGroupsResponse( + deployment_groups=[], next_page_token="def", ), - config.ListRevisionsResponse( - revisions=[ - config.Revision(), + config.ListDeploymentGroupsResponse( + deployment_groups=[ + config.DeploymentGroup(), ], next_page_token="ghi", ), - config.ListRevisionsResponse( - revisions=[ - config.Revision(), - config.Revision(), + config.ListDeploymentGroupsResponse( + deployment_groups=[ + config.DeploymentGroup(), + config.DeploymentGroup(), ], ), + RuntimeError, ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple(config.ListRevisionsResponse.to_json(x) for x in response) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode("UTF-8") - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = { - "parent": "projects/sample1/locations/sample2/deployments/sample3" - } - - pager = client.list_revisions(request=sample_request) + async_pager = await client.list_deployment_groups( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, config.Revision) for i in results) + assert len(responses) == 6 + assert all(isinstance(i, config.DeploymentGroup) for i in responses) - pages = list(client.list_revisions(request=sample_request).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token +@pytest.mark.asyncio +async def test_list_deployment_groups_async_pages(): + client = ConfigAsyncClient( + credentials=async_anonymous_credentials(), + ) -def test_get_revision_rest_use_cached_wrapped_rpc(): + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_deployment_groups), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + config.ListDeploymentGroupsResponse( + deployment_groups=[ + config.DeploymentGroup(), + config.DeploymentGroup(), + config.DeploymentGroup(), + ], + next_page_token="abc", + ), + config.ListDeploymentGroupsResponse( + deployment_groups=[], + next_page_token="def", + ), + config.ListDeploymentGroupsResponse( + deployment_groups=[ + config.DeploymentGroup(), + ], + next_page_token="ghi", + ), + config.ListDeploymentGroupsResponse( + deployment_groups=[ + config.DeploymentGroup(), + config.DeploymentGroup(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_deployment_groups(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + config.ProvisionDeploymentGroupRequest, + dict, + ], +) +def test_provision_deployment_group(request_type, transport: str = "grpc"): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.provision_deployment_group), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.provision_deployment_group(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = config.ProvisionDeploymentGroupRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_provision_deployment_group_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = config.ProvisionDeploymentGroupRequest( + name="name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.provision_deployment_group), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.provision_deployment_group(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == config.ProvisionDeploymentGroupRequest( + name="name_value", + ) + + +def test_provision_deployment_group_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", + transport="grpc", ) # Should wrap all calls on client creation @@ -13764,177 +14492,345 @@ def test_get_revision_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.get_revision in client._transport._wrapped_methods + assert ( + client._transport.provision_deployment_group + in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.get_revision] = mock_rpc - + client._transport._wrapped_methods[ + client._transport.provision_deployment_group + ] = mock_rpc request = {} - client.get_revision(request) + client.provision_deployment_group(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.get_revision(request) + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.provision_deployment_group(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_get_revision_rest_required_fields(request_type=config.GetRevisionRequest): - transport_class = transports.ConfigRestTransport - - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) - ) +@pytest.mark.asyncio +async def test_provision_deployment_group_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = ConfigAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) - # verify fields with default values are dropped + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).get_revision._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) + # Ensure method has been cached + assert ( + client._client._transport.provision_deployment_group + in client._client._transport._wrapped_methods + ) - # verify required fields with default values are now present + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.provision_deployment_group + ] = mock_rpc - jsonified_request["name"] = "name_value" + request = {} + await client.provision_deployment_group(request) - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).get_revision._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() - client = ConfigClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type(**request_init) + await client.provision_deployment_group(request) - # Designate an appropriate value for the returned response. - return_value = config.Revision() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "get", - "query_params": pb_request, - } - transcode.return_value = transcode_result + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = config.Revision.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) +@pytest.mark.asyncio +async def test_provision_deployment_group_async( + transport: str = "grpc_asyncio", request_type=config.ProvisionDeploymentGroupRequest +): + client = ConfigAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() - response = client.get_revision(request) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.provision_deployment_group), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.provision_deployment_group(request) - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = config.ProvisionDeploymentGroupRequest() + assert args[0] == request + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) -def test_get_revision_rest_unset_required_fields(): - transport = transports.ConfigRestTransport( - credentials=ga_credentials.AnonymousCredentials - ) - unset_fields = transport.get_revision._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) +@pytest.mark.asyncio +async def test_provision_deployment_group_async_from_dict(): + await test_provision_deployment_group_async(request_type=dict) -def test_get_revision_rest_flattened(): +def test_provision_deployment_group_field_headers(): client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", ) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = config.Revision() + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = config.ProvisionDeploymentGroupRequest() - # get arguments that satisfy an http rule for this method - sample_request = { - "name": "projects/sample1/locations/sample2/deployments/sample3/revisions/sample4" - } + request.name = "name_value" - # get truthy value for each flattened field - mock_args = dict( - name="name_value", - ) - mock_args.update(sample_request) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.provision_deployment_group), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.provision_deployment_group(request) - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = config.Revision.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request - client.get_revision(**mock_args) + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_provision_deployment_group_field_headers_async(): + client = ConfigAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = config.ProvisionDeploymentGroupRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.provision_deployment_group), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.provision_deployment_group(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_provision_deployment_group_flattened(): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.provision_deployment_group), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.provision_deployment_group( + name="name_value", + ) # Establish that the underlying call was made with the expected # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/deployments/*/revisions/*}" - % client.transport._host, - args[1], - ) + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val -def test_get_revision_rest_flattened_error(transport: str = "rest"): +def test_provision_deployment_group_flattened_error(): client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_revision( - config.GetRevisionRequest(), + client.provision_deployment_group( + config.ProvisionDeploymentGroupRequest(), name="name_value", ) -def test_get_resource_rest_use_cached_wrapped_rpc(): +@pytest.mark.asyncio +async def test_provision_deployment_group_flattened_async(): + client = ConfigAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.provision_deployment_group), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.provision_deployment_group( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_provision_deployment_group_flattened_error_async(): + client = ConfigAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.provision_deployment_group( + config.ProvisionDeploymentGroupRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + config.DeprovisionDeploymentGroupRequest, + dict, + ], +) +def test_deprovision_deployment_group(request_type, transport: str = "grpc"): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.deprovision_deployment_group), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.deprovision_deployment_group(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = config.DeprovisionDeploymentGroupRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_deprovision_deployment_group_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = config.DeprovisionDeploymentGroupRequest( + name="name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.deprovision_deployment_group), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.deprovision_deployment_group(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == config.DeprovisionDeploymentGroupRequest( + name="name_value", + ) + + +def test_deprovision_deployment_group_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", + transport="grpc", ) # Should wrap all calls on client creation @@ -13942,177 +14838,351 @@ def test_get_resource_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.get_resource in client._transport._wrapped_methods + assert ( + client._transport.deprovision_deployment_group + in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.get_resource] = mock_rpc - + client._transport._wrapped_methods[ + client._transport.deprovision_deployment_group + ] = mock_rpc request = {} - client.get_resource(request) + client.deprovision_deployment_group(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.get_resource(request) + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.deprovision_deployment_group(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_get_resource_rest_required_fields(request_type=config.GetResourceRequest): - transport_class = transports.ConfigRestTransport +@pytest.mark.asyncio +async def test_deprovision_deployment_group_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = ConfigAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) - request_init = {} - request_init["name"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) - ) + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() - # verify fields with default values are dropped + # Ensure method has been cached + assert ( + client._client._transport.deprovision_deployment_group + in client._client._transport._wrapped_methods + ) - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).get_resource._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.deprovision_deployment_group + ] = mock_rpc - # verify required fields with default values are now present + request = {} + await client.deprovision_deployment_group(request) - jsonified_request["name"] = "name_value" + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).get_resource._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() - # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" + await client.deprovision_deployment_group(request) - client = ConfigClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_deprovision_deployment_group_async( + transport: str = "grpc_asyncio", + request_type=config.DeprovisionDeploymentGroupRequest, +): + client = ConfigAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, ) - request = request_type(**request_init) - # Designate an appropriate value for the returned response. - return_value = config.Resource() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "get", - "query_params": pb_request, - } - transcode.return_value = transcode_result + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() - response_value = Response() - response_value.status_code = 200 + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.deprovision_deployment_group), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.deprovision_deployment_group(request) - # Convert return value to protobuf type - return_value = config.Resource.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = config.DeprovisionDeploymentGroupRequest() + assert args[0] == request - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) - response = client.get_resource(request) - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params +@pytest.mark.asyncio +async def test_deprovision_deployment_group_async_from_dict(): + await test_deprovision_deployment_group_async(request_type=dict) -def test_get_resource_rest_unset_required_fields(): - transport = transports.ConfigRestTransport( - credentials=ga_credentials.AnonymousCredentials +def test_deprovision_deployment_group_field_headers(): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), ) - unset_fields = transport.get_resource._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = config.DeprovisionDeploymentGroupRequest() + request.name = "name_value" -def test_get_resource_rest_flattened(): + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.deprovision_deployment_group), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.deprovision_deployment_group(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_deprovision_deployment_group_field_headers_async(): + client = ConfigAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = config.DeprovisionDeploymentGroupRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.deprovision_deployment_group), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.deprovision_deployment_group(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_deprovision_deployment_group_flattened(): client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", ) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = config.Resource() + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.deprovision_deployment_group), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.deprovision_deployment_group( + name="name_value", + ) - # get arguments that satisfy an http rule for this method - sample_request = { - "name": "projects/sample1/locations/sample2/deployments/sample3/revisions/sample4/resources/sample5" - } + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val - # get truthy value for each flattened field - mock_args = dict( + +def test_deprovision_deployment_group_flattened_error(): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.deprovision_deployment_group( + config.DeprovisionDeploymentGroupRequest(), name="name_value", ) - mock_args.update(sample_request) - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = config.Resource.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.get_resource(**mock_args) +@pytest.mark.asyncio +async def test_deprovision_deployment_group_flattened_async(): + client = ConfigAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.deprovision_deployment_group), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.deprovision_deployment_group( + name="name_value", + ) # Establish that the underlying call was made with the expected # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/deployments/*/revisions/*/resources/*}" - % client.transport._host, - args[1], + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_deprovision_deployment_group_flattened_error_async(): + client = ConfigAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.deprovision_deployment_group( + config.DeprovisionDeploymentGroupRequest(), + name="name_value", ) -def test_get_resource_rest_flattened_error(transport: str = "rest"): +@pytest.mark.parametrize( + "request_type", + [ + config.GetDeploymentGroupRevisionRequest, + dict, + ], +) +def test_get_deployment_group_revision(request_type, transport: str = "grpc"): client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_resource( - config.GetResourceRequest(), + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_deployment_group_revision), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = config.DeploymentGroupRevision( name="name_value", + alternative_ids=["alternative_ids_value"], ) + response = client.get_deployment_group_revision(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = config.GetDeploymentGroupRevisionRequest() + assert args[0] == request -def test_list_resources_rest_use_cached_wrapped_rpc(): + # Establish that the response is the type that we expect. + assert isinstance(response, config.DeploymentGroupRevision) + assert response.name == "name_value" + assert response.alternative_ids == ["alternative_ids_value"] + + +def test_get_deployment_group_revision_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = config.GetDeploymentGroupRevisionRequest( + name="name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_deployment_group_revision), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.get_deployment_group_revision(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == config.GetDeploymentGroupRevisionRequest( + name="name_value", + ) + + +def test_get_deployment_group_revision_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", + transport="grpc", ) # Should wrap all calls on client creation @@ -14120,259 +15190,348 @@ def test_list_resources_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.list_resources in client._transport._wrapped_methods + assert ( + client._transport.get_deployment_group_revision + in client._transport._wrapped_methods + ) # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.list_resources] = mock_rpc - + client._transport._wrapped_methods[ + client._transport.get_deployment_group_revision + ] = mock_rpc request = {} - client.list_resources(request) + client.get_deployment_group_revision(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.list_resources(request) + client.get_deployment_group_revision(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_list_resources_rest_required_fields(request_type=config.ListResourcesRequest): - transport_class = transports.ConfigRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) - ) +@pytest.mark.asyncio +async def test_get_deployment_group_revision_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = ConfigAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) - # verify fields with default values are dropped + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).list_resources._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) + # Ensure method has been cached + assert ( + client._client._transport.get_deployment_group_revision + in client._client._transport._wrapped_methods + ) - # verify required fields with default values are now present + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.get_deployment_group_revision + ] = mock_rpc - jsonified_request["parent"] = "parent_value" + request = {} + await client.get_deployment_group_revision(request) - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).list_resources._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set( - ( - "filter", - "order_by", - "page_size", - "page_token", - ) + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.get_deployment_group_revision(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_get_deployment_group_revision_async( + transport: str = "grpc_asyncio", + request_type=config.GetDeploymentGroupRevisionRequest, +): + client = ConfigAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, ) - jsonified_request.update(unset_fields) - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_deployment_group_revision), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + config.DeploymentGroupRevision( + name="name_value", + alternative_ids=["alternative_ids_value"], + ) + ) + response = await client.get_deployment_group_revision(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = config.GetDeploymentGroupRevisionRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, config.DeploymentGroupRevision) + assert response.name == "name_value" + assert response.alternative_ids == ["alternative_ids_value"] + + +@pytest.mark.asyncio +async def test_get_deployment_group_revision_async_from_dict(): + await test_get_deployment_group_revision_async(request_type=dict) + +def test_get_deployment_group_revision_field_headers(): client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = config.ListResourcesResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "get", - "query_params": pb_request, - } - transcode.return_value = transcode_result - response_value = Response() - response_value.status_code = 200 + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = config.GetDeploymentGroupRevisionRequest() - # Convert return value to protobuf type - return_value = config.ListResourcesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) + request.name = "name_value" - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_deployment_group_revision), "__call__" + ) as call: + call.return_value = config.DeploymentGroupRevision() + client.get_deployment_group_revision(request) - response = client.list_resources(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] -def test_list_resources_rest_unset_required_fields(): - transport = transports.ConfigRestTransport( - credentials=ga_credentials.AnonymousCredentials +@pytest.mark.asyncio +async def test_get_deployment_group_revision_field_headers_async(): + client = ConfigAsyncClient( + credentials=async_anonymous_credentials(), ) - unset_fields = transport.list_resources._get_unset_required_fields({}) - assert set(unset_fields) == ( - set( - ( - "filter", - "orderBy", - "pageSize", - "pageToken", - ) + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = config.GetDeploymentGroupRevisionRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_deployment_group_revision), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + config.DeploymentGroupRevision() ) - & set(("parent",)) - ) + await client.get_deployment_group_revision(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request -def test_list_resources_rest_flattened(): + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_get_deployment_group_revision_flattened(): client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", ) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = config.ListResourcesResponse() - - # get arguments that satisfy an http rule for this method - sample_request = { - "parent": "projects/sample1/locations/sample2/deployments/sample3/revisions/sample4" - } - - # get truthy value for each flattened field - mock_args = dict( - parent="parent_value", + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_deployment_group_revision), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = config.DeploymentGroupRevision() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_deployment_group_revision( + name="name_value", ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = config.ListResourcesResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - client.list_resources(**mock_args) # Establish that the underlying call was made with the expected # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{parent=projects/*/locations/*/deployments/*/revisions/*}/resources" - % client.transport._host, - args[1], - ) + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val -def test_list_resources_rest_flattened_error(transport: str = "rest"): +def test_get_deployment_group_revision_flattened_error(): client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.list_resources( - config.ListResourcesRequest(), - parent="parent_value", + client.get_deployment_group_revision( + config.GetDeploymentGroupRevisionRequest(), + name="name_value", ) -def test_list_resources_rest_pager(transport: str = "rest"): - client = ConfigClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, +@pytest.mark.asyncio +async def test_get_deployment_group_revision_flattened_async(): + client = ConfigAsyncClient( + credentials=async_anonymous_credentials(), ) - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - # with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - config.ListResourcesResponse( - resources=[ - config.Resource(), - config.Resource(), - config.Resource(), - ], - next_page_token="abc", - ), - config.ListResourcesResponse( - resources=[], - next_page_token="def", - ), - config.ListResourcesResponse( - resources=[ - config.Resource(), - ], - next_page_token="ghi", - ), - config.ListResourcesResponse( - resources=[ - config.Resource(), - config.Resource(), - ], - ), + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_deployment_group_revision), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = config.DeploymentGroupRevision() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + config.DeploymentGroupRevision() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_deployment_group_revision( + name="name_value", ) - # Two responses for two calls - response = response + response - # Wrap the values into proper Response objs - response = tuple(config.ListResourcesResponse.to_json(x) for x in response) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode("UTF-8") - return_val.status_code = 200 - req.side_effect = return_values + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val - sample_request = { - "parent": "projects/sample1/locations/sample2/deployments/sample3/revisions/sample4" - } - pager = client.list_resources(request=sample_request) +@pytest.mark.asyncio +async def test_get_deployment_group_revision_flattened_error_async(): + client = ConfigAsyncClient( + credentials=async_anonymous_credentials(), + ) - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, config.Resource) for i in results) + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_deployment_group_revision( + config.GetDeploymentGroupRevisionRequest(), + name="name_value", + ) - pages = list(client.list_resources(request=sample_request).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token +@pytest.mark.parametrize( + "request_type", + [ + config.ListDeploymentGroupRevisionsRequest, + dict, + ], +) +def test_list_deployment_group_revisions(request_type, transport: str = "grpc"): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) -def test_export_deployment_statefile_rest_use_cached_wrapped_rpc(): + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_deployment_group_revisions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = config.ListDeploymentGroupRevisionsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + response = client.list_deployment_group_revisions(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = config.ListDeploymentGroupRevisionsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListDeploymentGroupRevisionsPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +def test_list_deployment_group_revisions_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = config.ListDeploymentGroupRevisionsRequest( + parent="parent_value", + page_token="page_token_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_deployment_group_revisions), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.list_deployment_group_revisions(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == config.ListDeploymentGroupRevisionsRequest( + parent="parent_value", + page_token="page_token_value", + ) + + +def test_list_deployment_group_revisions_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", + transport="grpc", ) # Should wrap all calls on client creation @@ -14381,7 +15540,7 @@ def test_export_deployment_statefile_rest_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.export_deployment_statefile + client._transport.list_deployment_group_revisions in client._transport._wrapped_methods ) @@ -14391,114 +15550,31 @@ def test_export_deployment_statefile_rest_use_cached_wrapped_rpc(): "foo" # operation_request.operation in compute client(s) expect a string. ) client._transport._wrapped_methods[ - client._transport.export_deployment_statefile + client._transport.list_deployment_group_revisions ] = mock_rpc - request = {} - client.export_deployment_statefile(request) + client.list_deployment_group_revisions(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.export_deployment_statefile(request) + client.list_deployment_group_revisions(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_export_deployment_statefile_rest_required_fields( - request_type=config.ExportDeploymentStatefileRequest, +@pytest.mark.asyncio +async def test_list_deployment_group_revisions_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", ): - transport_class = transports.ConfigRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) - ) - - # verify fields with default values are dropped - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).export_deployment_statefile._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = "parent_value" - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).export_deployment_statefile._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" - - client = ConfigClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = config.Statefile() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "post", - "query_params": pb_request, - } - transcode_result["body"] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = config.Statefile.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.export_deployment_statefile(request) - - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params - - -def test_export_deployment_statefile_rest_unset_required_fields(): - transport = transports.ConfigRestTransport( - credentials=ga_credentials.AnonymousCredentials - ) - - unset_fields = transport.export_deployment_statefile._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("parent",))) - - -def test_export_revision_statefile_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = ConfigClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = ConfigAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, ) # Should wrap all calls on client creation @@ -14507,315 +15583,430 @@ def test_export_revision_statefile_rest_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.export_revision_statefile - in client._transport._wrapped_methods + client._client._transport.list_deployment_group_revisions + in client._client._transport._wrapped_methods ) # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client._transport._wrapped_methods[ - client._transport.export_revision_statefile + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.list_deployment_group_revisions ] = mock_rpc request = {} - client.export_revision_statefile(request) + await client.list_deployment_group_revisions(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.export_revision_statefile(request) + await client.list_deployment_group_revisions(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_export_revision_statefile_rest_required_fields( - request_type=config.ExportRevisionStatefileRequest, +@pytest.mark.asyncio +async def test_list_deployment_group_revisions_async( + transport: str = "grpc_asyncio", + request_type=config.ListDeploymentGroupRevisionsRequest, ): - transport_class = transports.ConfigRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) + client = ConfigAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, ) - # verify fields with default values are dropped - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).export_revision_statefile._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = "parent_value" + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).export_revision_statefile._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_deployment_group_revisions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + config.ListDeploymentGroupRevisionsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + ) + response = await client.list_deployment_group_revisions(request) - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = config.ListDeploymentGroupRevisionsRequest() + assert args[0] == request - client = ConfigClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type(**request_init) + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListDeploymentGroupRevisionsAsyncPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] - # Designate an appropriate value for the returned response. - return_value = config.Statefile() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "post", - "query_params": pb_request, - } - transcode_result["body"] = pb_request - transcode.return_value = transcode_result - response_value = Response() - response_value.status_code = 200 +@pytest.mark.asyncio +async def test_list_deployment_group_revisions_async_from_dict(): + await test_list_deployment_group_revisions_async(request_type=dict) - # Convert return value to protobuf type - return_value = config.Statefile.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} +def test_list_deployment_group_revisions_field_headers(): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + ) - response = client.export_revision_statefile(request) + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = config.ListDeploymentGroupRevisionsRequest() - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params + request.parent = "parent_value" + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_deployment_group_revisions), "__call__" + ) as call: + call.return_value = config.ListDeploymentGroupRevisionsResponse() + client.list_deployment_group_revisions(request) -def test_export_revision_statefile_rest_unset_required_fields(): - transport = transports.ConfigRestTransport( - credentials=ga_credentials.AnonymousCredentials - ) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request - unset_fields = transport.export_revision_statefile._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("parent",))) + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] -def test_import_statefile_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = ConfigClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) +@pytest.mark.asyncio +async def test_list_deployment_group_revisions_field_headers_async(): + client = ConfigAsyncClient( + credentials=async_anonymous_credentials(), + ) - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = config.ListDeploymentGroupRevisionsRequest() - # Ensure method has been cached - assert client._transport.import_statefile in client._transport._wrapped_methods + request.parent = "parent_value" - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client._transport._wrapped_methods[client._transport.import_statefile] = ( - mock_rpc + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_deployment_group_revisions), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + config.ListDeploymentGroupRevisionsResponse() ) - - request = {} - client.import_statefile(request) + await client.list_deployment_group_revisions(request) # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.import_statefile(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] -def test_import_statefile_rest_required_fields( - request_type=config.ImportStatefileRequest, -): - transport_class = transports.ConfigRestTransport - request_init = {} - request_init["parent"] = "" - request_init["lock_id"] = 0 - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) +def test_list_deployment_group_revisions_flattened(): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), ) - # verify fields with default values are dropped + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_deployment_group_revisions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = config.ListDeploymentGroupRevisionsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_deployment_group_revisions( + parent="parent_value", + ) - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).import_statefile._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val - # verify required fields with default values are now present - jsonified_request["parent"] = "parent_value" - jsonified_request["lockId"] = 725 +def test_list_deployment_group_revisions_flattened_error(): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + ) - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).import_statefile._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_deployment_group_revisions( + config.ListDeploymentGroupRevisionsRequest(), + parent="parent_value", + ) - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" - assert "lockId" in jsonified_request - assert jsonified_request["lockId"] == 725 - client = ConfigClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", +@pytest.mark.asyncio +async def test_list_deployment_group_revisions_flattened_async(): + client = ConfigAsyncClient( + credentials=async_anonymous_credentials(), ) - request = request_type(**request_init) - # Designate an appropriate value for the returned response. - return_value = config.Statefile() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "post", - "query_params": pb_request, - } - transcode_result["body"] = pb_request - transcode.return_value = transcode_result + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_deployment_group_revisions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = config.ListDeploymentGroupRevisionsResponse() - response_value = Response() - response_value.status_code = 200 + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + config.ListDeploymentGroupRevisionsResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_deployment_group_revisions( + parent="parent_value", + ) - # Convert return value to protobuf type - return_value = config.Statefile.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.import_statefile(request) +@pytest.mark.asyncio +async def test_list_deployment_group_revisions_flattened_error_async(): + client = ConfigAsyncClient( + credentials=async_anonymous_credentials(), + ) - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_deployment_group_revisions( + config.ListDeploymentGroupRevisionsRequest(), + parent="parent_value", + ) -def test_import_statefile_rest_unset_required_fields(): - transport = transports.ConfigRestTransport( - credentials=ga_credentials.AnonymousCredentials +def test_list_deployment_group_revisions_pager(transport_name: str = "grpc"): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, ) - unset_fields = transport.import_statefile._get_unset_required_fields({}) - assert set(unset_fields) == ( - set(()) - & set( - ( - "parent", - "lockId", - ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_deployment_group_revisions), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + config.ListDeploymentGroupRevisionsResponse( + deployment_group_revisions=[ + config.DeploymentGroupRevision(), + config.DeploymentGroupRevision(), + config.DeploymentGroupRevision(), + ], + next_page_token="abc", + ), + config.ListDeploymentGroupRevisionsResponse( + deployment_group_revisions=[], + next_page_token="def", + ), + config.ListDeploymentGroupRevisionsResponse( + deployment_group_revisions=[ + config.DeploymentGroupRevision(), + ], + next_page_token="ghi", + ), + config.ListDeploymentGroupRevisionsResponse( + deployment_group_revisions=[ + config.DeploymentGroupRevision(), + config.DeploymentGroupRevision(), + ], + ), + RuntimeError, ) - ) + expected_metadata = () + retry = retries.Retry() + timeout = 5 + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_deployment_group_revisions( + request={}, retry=retry, timeout=timeout + ) -def test_import_statefile_rest_flattened(): + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, config.DeploymentGroupRevision) for i in results) + + +def test_list_deployment_group_revisions_pages(transport_name: str = "grpc"): client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), - transport="rest", + transport=transport_name, ) - # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = config.Statefile() - - # get arguments that satisfy an http rule for this method - sample_request = { - "parent": "projects/sample1/locations/sample2/deployments/sample3" - } - - # get truthy value for each flattened field - mock_args = dict( - parent="parent_value", - lock_id=725, + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_deployment_group_revisions), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + config.ListDeploymentGroupRevisionsResponse( + deployment_group_revisions=[ + config.DeploymentGroupRevision(), + config.DeploymentGroupRevision(), + config.DeploymentGroupRevision(), + ], + next_page_token="abc", + ), + config.ListDeploymentGroupRevisionsResponse( + deployment_group_revisions=[], + next_page_token="def", + ), + config.ListDeploymentGroupRevisionsResponse( + deployment_group_revisions=[ + config.DeploymentGroupRevision(), + ], + next_page_token="ghi", + ), + config.ListDeploymentGroupRevisionsResponse( + deployment_group_revisions=[ + config.DeploymentGroupRevision(), + config.DeploymentGroupRevision(), + ], + ), + RuntimeError, ) - mock_args.update(sample_request) + pages = list(client.list_deployment_group_revisions(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = config.Statefile.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.import_statefile(**mock_args) +@pytest.mark.asyncio +async def test_list_deployment_group_revisions_async_pager(): + client = ConfigAsyncClient( + credentials=async_anonymous_credentials(), + ) - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{parent=projects/*/locations/*/deployments/*}:importState" - % client.transport._host, - args[1], + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_deployment_group_revisions), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + config.ListDeploymentGroupRevisionsResponse( + deployment_group_revisions=[ + config.DeploymentGroupRevision(), + config.DeploymentGroupRevision(), + config.DeploymentGroupRevision(), + ], + next_page_token="abc", + ), + config.ListDeploymentGroupRevisionsResponse( + deployment_group_revisions=[], + next_page_token="def", + ), + config.ListDeploymentGroupRevisionsResponse( + deployment_group_revisions=[ + config.DeploymentGroupRevision(), + ], + next_page_token="ghi", + ), + config.ListDeploymentGroupRevisionsResponse( + deployment_group_revisions=[ + config.DeploymentGroupRevision(), + config.DeploymentGroupRevision(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_deployment_group_revisions( + request={}, ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + assert len(responses) == 6 + assert all(isinstance(i, config.DeploymentGroupRevision) for i in responses) -def test_import_statefile_rest_flattened_error(transport: str = "rest"): - client = ConfigClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, + +@pytest.mark.asyncio +async def test_list_deployment_group_revisions_async_pages(): + client = ConfigAsyncClient( + credentials=async_anonymous_credentials(), ) - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.import_statefile( - config.ImportStatefileRequest(), - parent="parent_value", - lock_id=725, + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_deployment_group_revisions), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + config.ListDeploymentGroupRevisionsResponse( + deployment_group_revisions=[ + config.DeploymentGroupRevision(), + config.DeploymentGroupRevision(), + config.DeploymentGroupRevision(), + ], + next_page_token="abc", + ), + config.ListDeploymentGroupRevisionsResponse( + deployment_group_revisions=[], + next_page_token="def", + ), + config.ListDeploymentGroupRevisionsResponse( + deployment_group_revisions=[ + config.DeploymentGroupRevision(), + ], + next_page_token="ghi", + ), + config.ListDeploymentGroupRevisionsResponse( + deployment_group_revisions=[ + config.DeploymentGroupRevision(), + config.DeploymentGroupRevision(), + ], + ), + RuntimeError, ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_deployment_group_revisions(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token -def test_delete_statefile_rest_use_cached_wrapped_rpc(): +def test_list_deployments_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -14829,38 +16020,37 @@ def test_delete_statefile_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.delete_statefile in client._transport._wrapped_methods + assert client._transport.list_deployments in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.delete_statefile] = ( + client._transport._wrapped_methods[client._transport.list_deployments] = ( mock_rpc ) request = {} - client.delete_statefile(request) + client.list_deployments(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.delete_statefile(request) + client.list_deployments(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_delete_statefile_rest_required_fields( - request_type=config.DeleteStatefileRequest, +def test_list_deployments_rest_required_fields( + request_type=config.ListDeploymentsRequest, ): transport_class = transports.ConfigRestTransport request_init = {} - request_init["name"] = "" - request_init["lock_id"] = 0 + request_init["parent"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -14871,24 +16061,30 @@ def test_delete_statefile_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).delete_statefile._get_unset_required_fields(jsonified_request) + ).list_deployments._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["name"] = "name_value" - jsonified_request["lockId"] = 725 + jsonified_request["parent"] = "parent_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).delete_statefile._get_unset_required_fields(jsonified_request) + ).list_deployments._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "filter", + "order_by", + "page_size", + "page_token", + ) + ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" - assert "lockId" in jsonified_request - assert jsonified_request["lockId"] == 725 + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), @@ -14897,7 +16093,7 @@ def test_delete_statefile_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = None + return_value = config.ListDeploymentsResponse() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -14909,45 +16105,49 @@ def test_delete_statefile_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "post", + "method": "get", "query_params": pb_request, } - transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 - json_return_value = "" + + # Convert return value to protobuf type + return_value = config.ListDeploymentsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.delete_statefile(request) + response = client.list_deployments(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_delete_statefile_rest_unset_required_fields(): +def test_list_deployments_rest_unset_required_fields(): transport = transports.ConfigRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.delete_statefile._get_unset_required_fields({}) + unset_fields = transport.list_deployments._get_unset_required_fields({}) assert set(unset_fields) == ( - set(()) - & set( + set( ( - "name", - "lockId", + "filter", + "orderBy", + "pageSize", + "pageToken", ) ) + & set(("parent",)) ) -def test_delete_statefile_rest_flattened(): +def test_list_deployments_rest_flattened(): client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -14956,41 +16156,41 @@ def test_delete_statefile_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = None + return_value = config.ListDeploymentsResponse() # get arguments that satisfy an http rule for this method - sample_request = { - "name": "projects/sample1/locations/sample2/deployments/sample3" - } + sample_request = {"parent": "projects/sample1/locations/sample2"} # get truthy value for each flattened field mock_args = dict( - name="name_value", + parent="parent_value", ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - json_return_value = "" + # Convert return value to protobuf type + return_value = config.ListDeploymentsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.delete_statefile(**mock_args) + client.list_deployments(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/deployments/*}:deleteState" + "%s/v1/{parent=projects/*/locations/*}/deployments" % client.transport._host, args[1], ) -def test_delete_statefile_rest_flattened_error(transport: str = "rest"): +def test_list_deployments_rest_flattened_error(transport: str = "rest"): client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -14999,14 +16199,75 @@ def test_delete_statefile_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.delete_statefile( - config.DeleteStatefileRequest(), - name="name_value", + client.list_deployments( + config.ListDeploymentsRequest(), + parent="parent_value", ) -def test_lock_deployment_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, +def test_list_deployments_rest_pager(transport: str = "rest"): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + config.ListDeploymentsResponse( + deployments=[ + config.Deployment(), + config.Deployment(), + config.Deployment(), + ], + next_page_token="abc", + ), + config.ListDeploymentsResponse( + deployments=[], + next_page_token="def", + ), + config.ListDeploymentsResponse( + deployments=[ + config.Deployment(), + ], + next_page_token="ghi", + ), + config.ListDeploymentsResponse( + deployments=[ + config.Deployment(), + config.Deployment(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(config.ListDeploymentsResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "projects/sample1/locations/sample2"} + + pager = client.list_deployments(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, config.Deployment) for i in results) + + pages = list(client.list_deployments(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +def test_get_deployment_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: client = ConfigClient( @@ -15019,35 +16280,29 @@ def test_lock_deployment_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.lock_deployment in client._transport._wrapped_methods + assert client._transport.get_deployment in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.lock_deployment] = mock_rpc + client._transport._wrapped_methods[client._transport.get_deployment] = mock_rpc request = {} - client.lock_deployment(request) + client.get_deployment(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.lock_deployment(request) + client.get_deployment(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_lock_deployment_rest_required_fields( - request_type=config.LockDeploymentRequest, -): +def test_get_deployment_rest_required_fields(request_type=config.GetDeploymentRequest): transport_class = transports.ConfigRestTransport request_init = {} @@ -15062,7 +16317,7 @@ def test_lock_deployment_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).lock_deployment._get_unset_required_fields(jsonified_request) + ).get_deployment._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present @@ -15071,7 +16326,7 @@ def test_lock_deployment_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).lock_deployment._get_unset_required_fields(jsonified_request) + ).get_deployment._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -15085,7 +16340,7 @@ def test_lock_deployment_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = config.Deployment() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -15097,37 +16352,39 @@ def test_lock_deployment_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "post", + "method": "get", "query_params": pb_request, } - transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = config.Deployment.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.lock_deployment(request) + response = client.get_deployment(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_lock_deployment_rest_unset_required_fields(): +def test_get_deployment_rest_unset_required_fields(): transport = transports.ConfigRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.lock_deployment._get_unset_required_fields({}) + unset_fields = transport.get_deployment._get_unset_required_fields({}) assert set(unset_fields) == (set(()) & set(("name",))) -def test_lock_deployment_rest_flattened(): +def test_get_deployment_rest_flattened(): client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -15136,7 +16393,7 @@ def test_lock_deployment_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = config.Deployment() # get arguments that satisfy an http rule for this method sample_request = { @@ -15152,25 +16409,27 @@ def test_lock_deployment_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 + # Convert return value to protobuf type + return_value = config.Deployment.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.lock_deployment(**mock_args) + client.get_deployment(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/deployments/*}:lock" + "%s/v1/{name=projects/*/locations/*/deployments/*}" % client.transport._host, args[1], ) -def test_lock_deployment_rest_flattened_error(transport: str = "rest"): +def test_get_deployment_rest_flattened_error(transport: str = "rest"): client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -15179,13 +16438,13 @@ def test_lock_deployment_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.lock_deployment( - config.LockDeploymentRequest(), + client.get_deployment( + config.GetDeploymentRequest(), name="name_value", ) -def test_unlock_deployment_rest_use_cached_wrapped_rpc(): +def test_create_deployment_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -15199,19 +16458,19 @@ def test_unlock_deployment_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.unlock_deployment in client._transport._wrapped_methods + assert client._transport.create_deployment in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.unlock_deployment] = ( + client._transport._wrapped_methods[client._transport.create_deployment] = ( mock_rpc ) request = {} - client.unlock_deployment(request) + client.create_deployment(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 @@ -15220,21 +16479,21 @@ def test_unlock_deployment_rest_use_cached_wrapped_rpc(): # subsequent calls should use the cached wrapper wrapper_fn.reset_mock() - client.unlock_deployment(request) + client.create_deployment(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_unlock_deployment_rest_required_fields( - request_type=config.UnlockDeploymentRequest, +def test_create_deployment_rest_required_fields( + request_type=config.CreateDeploymentRequest, ): transport_class = transports.ConfigRestTransport request_init = {} - request_init["name"] = "" - request_init["lock_id"] = 0 + request_init["parent"] = "" + request_init["deployment_id"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -15242,27 +16501,37 @@ def test_unlock_deployment_rest_required_fields( ) # verify fields with default values are dropped + assert "deploymentId" not in jsonified_request unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).unlock_deployment._get_unset_required_fields(jsonified_request) + ).create_deployment._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present + assert "deploymentId" in jsonified_request + assert jsonified_request["deploymentId"] == request_init["deployment_id"] - jsonified_request["name"] = "name_value" - jsonified_request["lockId"] = 725 + jsonified_request["parent"] = "parent_value" + jsonified_request["deploymentId"] = "deployment_id_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).unlock_deployment._get_unset_required_fields(jsonified_request) + ).create_deployment._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "deployment_id", + "request_id", + ) + ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" - assert "lockId" in jsonified_request - assert jsonified_request["lockId"] == 725 + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + assert "deploymentId" in jsonified_request + assert jsonified_request["deploymentId"] == "deployment_id_value" client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), @@ -15297,31 +16566,43 @@ def test_unlock_deployment_rest_required_fields( req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.unlock_deployment(request) + response = client.create_deployment(request) - expected_params = [("$alt", "json;enum-encoding=int")] + expected_params = [ + ( + "deploymentId", + "", + ), + ("$alt", "json;enum-encoding=int"), + ] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_unlock_deployment_rest_unset_required_fields(): +def test_create_deployment_rest_unset_required_fields(): transport = transports.ConfigRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.unlock_deployment._get_unset_required_fields({}) + unset_fields = transport.create_deployment._get_unset_required_fields({}) assert set(unset_fields) == ( - set(()) + set( + ( + "deploymentId", + "requestId", + ) + ) & set( ( - "name", - "lockId", + "parent", + "deploymentId", + "deployment", ) ) ) -def test_unlock_deployment_rest_flattened(): +def test_create_deployment_rest_flattened(): client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -15333,14 +16614,17 @@ def test_unlock_deployment_rest_flattened(): return_value = operations_pb2.Operation(name="operations/spam") # get arguments that satisfy an http rule for this method - sample_request = { - "name": "projects/sample1/locations/sample2/deployments/sample3" - } + sample_request = {"parent": "projects/sample1/locations/sample2"} # get truthy value for each flattened field mock_args = dict( - name="name_value", - lock_id=725, + parent="parent_value", + deployment=config.Deployment( + terraform_blueprint=config.TerraformBlueprint( + gcs_source="gcs_source_value" + ) + ), + deployment_id="deployment_id_value", ) mock_args.update(sample_request) @@ -15352,20 +16636,20 @@ def test_unlock_deployment_rest_flattened(): req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.unlock_deployment(**mock_args) + client.create_deployment(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/deployments/*}:unlock" + "%s/v1/{parent=projects/*/locations/*}/deployments" % client.transport._host, args[1], ) -def test_unlock_deployment_rest_flattened_error(transport: str = "rest"): +def test_create_deployment_rest_flattened_error(transport: str = "rest"): client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -15374,14 +16658,19 @@ def test_unlock_deployment_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.unlock_deployment( - config.UnlockDeploymentRequest(), - name="name_value", - lock_id=725, + client.create_deployment( + config.CreateDeploymentRequest(), + parent="parent_value", + deployment=config.Deployment( + terraform_blueprint=config.TerraformBlueprint( + gcs_source="gcs_source_value" + ) + ), + deployment_id="deployment_id_value", ) -def test_export_lock_info_rest_use_cached_wrapped_rpc(): +def test_update_deployment_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -15395,37 +16684,40 @@ def test_export_lock_info_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.export_lock_info in client._transport._wrapped_methods + assert client._transport.update_deployment in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.export_lock_info] = ( + client._transport._wrapped_methods[client._transport.update_deployment] = ( mock_rpc ) request = {} - client.export_lock_info(request) + client.update_deployment(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.export_lock_info(request) + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.update_deployment(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_export_lock_info_rest_required_fields( - request_type=config.ExportLockInfoRequest, +def test_update_deployment_rest_required_fields( + request_type=config.UpdateDeploymentRequest, ): transport_class = transports.ConfigRestTransport request_init = {} - request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -15436,21 +16728,24 @@ def test_export_lock_info_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).export_lock_info._get_unset_required_fields(jsonified_request) + ).update_deployment._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["name"] = "name_value" - unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).export_lock_info._get_unset_required_fields(jsonified_request) + ).update_deployment._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "request_id", + "update_mask", + ) + ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), @@ -15459,7 +16754,7 @@ def test_export_lock_info_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = config.LockInfo() + return_value = operations_pb2.Operation(name="operations/spam") # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -15471,39 +16766,45 @@ def test_export_lock_info_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "get", + "method": "patch", "query_params": pb_request, } + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = config.LockInfo.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.export_lock_info(request) + response = client.update_deployment(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_export_lock_info_rest_unset_required_fields(): +def test_update_deployment_rest_unset_required_fields(): transport = transports.ConfigRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.export_lock_info._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) + unset_fields = transport.update_deployment._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "requestId", + "updateMask", + ) + ) + & set(("deployment",)) + ) -def test_export_lock_info_rest_flattened(): +def test_update_deployment_rest_flattened(): client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -15512,43 +16813,48 @@ def test_export_lock_info_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = config.LockInfo() + return_value = operations_pb2.Operation(name="operations/spam") # get arguments that satisfy an http rule for this method sample_request = { - "name": "projects/sample1/locations/sample2/deployments/sample3" + "deployment": { + "name": "projects/sample1/locations/sample2/deployments/sample3" + } } # get truthy value for each flattened field mock_args = dict( - name="name_value", + deployment=config.Deployment( + terraform_blueprint=config.TerraformBlueprint( + gcs_source="gcs_source_value" + ) + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = config.LockInfo.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.export_lock_info(**mock_args) + client.update_deployment(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/deployments/*}:exportLock" + "%s/v1/{deployment.name=projects/*/locations/*/deployments/*}" % client.transport._host, args[1], ) -def test_export_lock_info_rest_flattened_error(transport: str = "rest"): +def test_update_deployment_rest_flattened_error(transport: str = "rest"): client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -15557,13 +16863,18 @@ def test_export_lock_info_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.export_lock_info( - config.ExportLockInfoRequest(), - name="name_value", + client.update_deployment( + config.UpdateDeploymentRequest(), + deployment=config.Deployment( + terraform_blueprint=config.TerraformBlueprint( + gcs_source="gcs_source_value" + ) + ), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), ) -def test_create_preview_rest_use_cached_wrapped_rpc(): +def test_delete_deployment_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -15577,17 +16888,19 @@ def test_create_preview_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.create_preview in client._transport._wrapped_methods + assert client._transport.delete_deployment in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.create_preview] = mock_rpc + client._transport._wrapped_methods[client._transport.delete_deployment] = ( + mock_rpc + ) request = {} - client.create_preview(request) + client.delete_deployment(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 @@ -15596,18 +16909,20 @@ def test_create_preview_rest_use_cached_wrapped_rpc(): # subsequent calls should use the cached wrapper wrapper_fn.reset_mock() - client.create_preview(request) + client.delete_deployment(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_create_preview_rest_required_fields(request_type=config.CreatePreviewRequest): +def test_delete_deployment_rest_required_fields( + request_type=config.DeleteDeploymentRequest, +): transport_class = transports.ConfigRestTransport request_init = {} - request_init["parent"] = "" + request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -15618,28 +16933,29 @@ def test_create_preview_rest_required_fields(request_type=config.CreatePreviewRe unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).create_preview._get_unset_required_fields(jsonified_request) + ).delete_deployment._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["parent"] = "parent_value" + jsonified_request["name"] = "name_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).create_preview._get_unset_required_fields(jsonified_request) + ).delete_deployment._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. assert not set(unset_fields) - set( ( - "preview_id", + "delete_policy", + "force", "request_id", ) ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), @@ -15660,10 +16976,9 @@ def test_create_preview_rest_required_fields(request_type=config.CreatePreviewRe pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "post", + "method": "delete", "query_params": pb_request, } - transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() @@ -15674,36 +16989,32 @@ def test_create_preview_rest_required_fields(request_type=config.CreatePreviewRe req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.create_preview(request) + response = client.delete_deployment(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_create_preview_rest_unset_required_fields(): +def test_delete_deployment_rest_unset_required_fields(): transport = transports.ConfigRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.create_preview._get_unset_required_fields({}) + unset_fields = transport.delete_deployment._get_unset_required_fields({}) assert set(unset_fields) == ( set( ( - "previewId", + "deletePolicy", + "force", "requestId", ) ) - & set( - ( - "parent", - "preview", - ) - ) + & set(("name",)) ) -def test_create_preview_rest_flattened(): +def test_delete_deployment_rest_flattened(): client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -15715,16 +17026,13 @@ def test_create_preview_rest_flattened(): return_value = operations_pb2.Operation(name="operations/spam") # get arguments that satisfy an http rule for this method - sample_request = {"parent": "projects/sample1/locations/sample2"} + sample_request = { + "name": "projects/sample1/locations/sample2/deployments/sample3" + } # get truthy value for each flattened field mock_args = dict( - parent="parent_value", - preview=config.Preview( - terraform_blueprint=config.TerraformBlueprint( - gcs_source="gcs_source_value" - ) - ), + name="name_value", ) mock_args.update(sample_request) @@ -15736,19 +17044,20 @@ def test_create_preview_rest_flattened(): req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.create_preview(**mock_args) + client.delete_deployment(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{parent=projects/*/locations/*}/previews" % client.transport._host, + "%s/v1/{name=projects/*/locations/*/deployments/*}" + % client.transport._host, args[1], ) -def test_create_preview_rest_flattened_error(transport: str = "rest"): +def test_delete_deployment_rest_flattened_error(transport: str = "rest"): client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -15757,18 +17066,13 @@ def test_create_preview_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.create_preview( - config.CreatePreviewRequest(), - parent="parent_value", - preview=config.Preview( - terraform_blueprint=config.TerraformBlueprint( - gcs_source="gcs_source_value" - ) - ), + client.delete_deployment( + config.DeleteDeploymentRequest(), + name="name_value", ) -def test_get_preview_rest_use_cached_wrapped_rpc(): +def test_list_revisions_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -15782,33 +17086,33 @@ def test_get_preview_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.get_preview in client._transport._wrapped_methods + assert client._transport.list_revisions in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.get_preview] = mock_rpc + client._transport._wrapped_methods[client._transport.list_revisions] = mock_rpc request = {} - client.get_preview(request) + client.list_revisions(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.get_preview(request) + client.list_revisions(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_get_preview_rest_required_fields(request_type=config.GetPreviewRequest): +def test_list_revisions_rest_required_fields(request_type=config.ListRevisionsRequest): transport_class = transports.ConfigRestTransport request_init = {} - request_init["name"] = "" + request_init["parent"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -15819,21 +17123,30 @@ def test_get_preview_rest_required_fields(request_type=config.GetPreviewRequest) unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_preview._get_unset_required_fields(jsonified_request) + ).list_revisions._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["name"] = "name_value" + jsonified_request["parent"] = "parent_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_preview._get_unset_required_fields(jsonified_request) + ).list_revisions._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "filter", + "order_by", + "page_size", + "page_token", + ) + ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), @@ -15842,7 +17155,7 @@ def test_get_preview_rest_required_fields(request_type=config.GetPreviewRequest) request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = config.Preview() + return_value = config.ListRevisionsResponse() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -15863,30 +17176,40 @@ def test_get_preview_rest_required_fields(request_type=config.GetPreviewRequest) response_value.status_code = 200 # Convert return value to protobuf type - return_value = config.Preview.pb(return_value) + return_value = config.ListRevisionsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.get_preview(request) + response = client.list_revisions(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_get_preview_rest_unset_required_fields(): +def test_list_revisions_rest_unset_required_fields(): transport = transports.ConfigRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.get_preview._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) + unset_fields = transport.list_revisions._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "filter", + "orderBy", + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) -def test_get_preview_rest_flattened(): +def test_list_revisions_rest_flattened(): client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -15895,14 +17218,16 @@ def test_get_preview_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = config.Preview() + return_value = config.ListRevisionsResponse() # get arguments that satisfy an http rule for this method - sample_request = {"name": "projects/sample1/locations/sample2/previews/sample3"} + sample_request = { + "parent": "projects/sample1/locations/sample2/deployments/sample3" + } # get truthy value for each flattened field mock_args = dict( - name="name_value", + parent="parent_value", ) mock_args.update(sample_request) @@ -15910,25 +17235,26 @@ def test_get_preview_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = config.Preview.pb(return_value) + return_value = config.ListRevisionsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.get_preview(**mock_args) + client.list_revisions(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/previews/*}" % client.transport._host, + "%s/v1/{parent=projects/*/locations/*/deployments/*}/revisions" + % client.transport._host, args[1], ) -def test_get_preview_rest_flattened_error(transport: str = "rest"): +def test_list_revisions_rest_flattened_error(transport: str = "rest"): client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -15937,53 +17263,116 @@ def test_get_preview_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_preview( - config.GetPreviewRequest(), - name="name_value", - ) - - -def test_list_previews_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = ConfigClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert client._transport.list_previews in client._transport._wrapped_methods - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. + client.list_revisions( + config.ListRevisionsRequest(), + parent="parent_value", ) - client._transport._wrapped_methods[client._transport.list_previews] = mock_rpc - - request = {} - client.list_previews(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - client.list_previews(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 +def test_list_revisions_rest_pager(transport: str = "rest"): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + config.ListRevisionsResponse( + revisions=[ + config.Revision(), + config.Revision(), + config.Revision(), + ], + next_page_token="abc", + ), + config.ListRevisionsResponse( + revisions=[], + next_page_token="def", + ), + config.ListRevisionsResponse( + revisions=[ + config.Revision(), + ], + next_page_token="ghi", + ), + config.ListRevisionsResponse( + revisions=[ + config.Revision(), + config.Revision(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(config.ListRevisionsResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = { + "parent": "projects/sample1/locations/sample2/deployments/sample3" + } + + pager = client.list_revisions(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, config.Revision) for i in results) + + pages = list(client.list_revisions(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +def test_get_revision_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_revision in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get_revision] = mock_rpc + + request = {} + client.get_revision(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_revision(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 -def test_list_previews_rest_required_fields(request_type=config.ListPreviewsRequest): - transport_class = transports.ConfigRestTransport + +def test_get_revision_rest_required_fields(request_type=config.GetRevisionRequest): + transport_class = transports.ConfigRestTransport request_init = {} - request_init["parent"] = "" + request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -15994,30 +17383,21 @@ def test_list_previews_rest_required_fields(request_type=config.ListPreviewsRequ unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_previews._get_unset_required_fields(jsonified_request) + ).get_revision._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["parent"] = "parent_value" + jsonified_request["name"] = "name_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_previews._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set( - ( - "filter", - "order_by", - "page_size", - "page_token", - ) - ) + ).get_revision._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), @@ -16026,7 +17406,7 @@ def test_list_previews_rest_required_fields(request_type=config.ListPreviewsRequ request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = config.ListPreviewsResponse() + return_value = config.Revision() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -16047,40 +17427,30 @@ def test_list_previews_rest_required_fields(request_type=config.ListPreviewsRequ response_value.status_code = 200 # Convert return value to protobuf type - return_value = config.ListPreviewsResponse.pb(return_value) + return_value = config.Revision.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.list_previews(request) + response = client.get_revision(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_list_previews_rest_unset_required_fields(): +def test_get_revision_rest_unset_required_fields(): transport = transports.ConfigRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.list_previews._get_unset_required_fields({}) - assert set(unset_fields) == ( - set( - ( - "filter", - "orderBy", - "pageSize", - "pageToken", - ) - ) - & set(("parent",)) - ) + unset_fields = transport.get_revision._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) -def test_list_previews_rest_flattened(): +def test_get_revision_rest_flattened(): client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -16089,14 +17459,16 @@ def test_list_previews_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = config.ListPreviewsResponse() + return_value = config.Revision() # get arguments that satisfy an http rule for this method - sample_request = {"parent": "projects/sample1/locations/sample2"} + sample_request = { + "name": "projects/sample1/locations/sample2/deployments/sample3/revisions/sample4" + } # get truthy value for each flattened field mock_args = dict( - parent="parent_value", + name="name_value", ) mock_args.update(sample_request) @@ -16104,25 +17476,26 @@ def test_list_previews_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = config.ListPreviewsResponse.pb(return_value) + return_value = config.Revision.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.list_previews(**mock_args) + client.get_revision(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{parent=projects/*/locations/*}/previews" % client.transport._host, + "%s/v1/{name=projects/*/locations/*/deployments/*/revisions/*}" + % client.transport._host, args[1], ) -def test_list_previews_rest_flattened_error(transport: str = "rest"): +def test_get_revision_rest_flattened_error(transport: str = "rest"): client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -16131,74 +17504,13 @@ def test_list_previews_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.list_previews( - config.ListPreviewsRequest(), - parent="parent_value", - ) - - -def test_list_previews_rest_pager(transport: str = "rest"): - client = ConfigClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - # with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - config.ListPreviewsResponse( - previews=[ - config.Preview(), - config.Preview(), - config.Preview(), - ], - next_page_token="abc", - ), - config.ListPreviewsResponse( - previews=[], - next_page_token="def", - ), - config.ListPreviewsResponse( - previews=[ - config.Preview(), - ], - next_page_token="ghi", - ), - config.ListPreviewsResponse( - previews=[ - config.Preview(), - config.Preview(), - ], - ), + client.get_revision( + config.GetRevisionRequest(), + name="name_value", ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple(config.ListPreviewsResponse.to_json(x) for x in response) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode("UTF-8") - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = {"parent": "projects/sample1/locations/sample2"} - - pager = client.list_previews(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, config.Preview) for i in results) - - pages = list(client.list_previews(request=sample_request).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token -def test_delete_preview_rest_use_cached_wrapped_rpc(): +def test_get_resource_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -16212,33 +17524,29 @@ def test_delete_preview_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert client._transport.delete_preview in client._transport._wrapped_methods + assert client._transport.get_resource in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.delete_preview] = mock_rpc + client._transport._wrapped_methods[client._transport.get_resource] = mock_rpc request = {} - client.delete_preview(request) + client.get_resource(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - # Operation methods build a cached wrapper on first rpc call - # subsequent calls should use the cached wrapper - wrapper_fn.reset_mock() - - client.delete_preview(request) + client.get_resource(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_delete_preview_rest_required_fields(request_type=config.DeletePreviewRequest): +def test_get_resource_rest_required_fields(request_type=config.GetResourceRequest): transport_class = transports.ConfigRestTransport request_init = {} @@ -16253,7 +17561,7 @@ def test_delete_preview_rest_required_fields(request_type=config.DeletePreviewRe unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).delete_preview._get_unset_required_fields(jsonified_request) + ).get_resource._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present @@ -16262,9 +17570,7 @@ def test_delete_preview_rest_required_fields(request_type=config.DeletePreviewRe unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).delete_preview._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("request_id",)) + ).get_resource._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -16278,7 +17584,7 @@ def test_delete_preview_rest_required_fields(request_type=config.DeletePreviewRe request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = config.Resource() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -16290,36 +17596,39 @@ def test_delete_preview_rest_required_fields(request_type=config.DeletePreviewRe pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "delete", + "method": "get", "query_params": pb_request, } transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = config.Resource.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.delete_preview(request) + response = client.get_resource(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_delete_preview_rest_unset_required_fields(): +def test_get_resource_rest_unset_required_fields(): transport = transports.ConfigRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.delete_preview._get_unset_required_fields({}) - assert set(unset_fields) == (set(("requestId",)) & set(("name",))) + unset_fields = transport.get_resource._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) -def test_delete_preview_rest_flattened(): +def test_get_resource_rest_flattened(): client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -16328,10 +17637,12 @@ def test_delete_preview_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = config.Resource() # get arguments that satisfy an http rule for this method - sample_request = {"name": "projects/sample1/locations/sample2/previews/sample3"} + sample_request = { + "name": "projects/sample1/locations/sample2/deployments/sample3/revisions/sample4/resources/sample5" + } # get truthy value for each flattened field mock_args = dict( @@ -16342,24 +17653,27 @@ def test_delete_preview_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 + # Convert return value to protobuf type + return_value = config.Resource.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.delete_preview(**mock_args) + client.get_resource(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/previews/*}" % client.transport._host, + "%s/v1/{name=projects/*/locations/*/deployments/*/revisions/*/resources/*}" + % client.transport._host, args[1], ) -def test_delete_preview_rest_flattened_error(transport: str = "rest"): +def test_get_resource_rest_flattened_error(transport: str = "rest"): client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -16368,139 +17682,13 @@ def test_delete_preview_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.delete_preview( - config.DeletePreviewRequest(), + client.get_resource( + config.GetResourceRequest(), name="name_value", ) -def test_export_preview_result_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = ConfigClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - - # Should wrap all calls on client creation - assert wrapper_fn.call_count > 0 - wrapper_fn.reset_mock() - - # Ensure method has been cached - assert ( - client._transport.export_preview_result - in client._transport._wrapped_methods - ) - - # Replace cached wrapped function with mock - mock_rpc = mock.Mock() - mock_rpc.return_value.name = ( - "foo" # operation_request.operation in compute client(s) expect a string. - ) - client._transport._wrapped_methods[client._transport.export_preview_result] = ( - mock_rpc - ) - - request = {} - client.export_preview_result(request) - - # Establish that the underlying gRPC stub method was called. - assert mock_rpc.call_count == 1 - - client.export_preview_result(request) - - # Establish that a new wrapper was not created for this call - assert wrapper_fn.call_count == 0 - assert mock_rpc.call_count == 2 - - -def test_export_preview_result_rest_required_fields( - request_type=config.ExportPreviewResultRequest, -): - transport_class = transports.ConfigRestTransport - - request_init = {} - request_init["parent"] = "" - request = request_type(**request_init) - pb_request = request_type.pb(request) - jsonified_request = json.loads( - json_format.MessageToJson(pb_request, use_integers_for_enums=False) - ) - - # verify fields with default values are dropped - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).export_preview_result._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with default values are now present - - jsonified_request["parent"] = "parent_value" - - unset_fields = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ).export_preview_result._get_unset_required_fields(jsonified_request) - jsonified_request.update(unset_fields) - - # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" - - client = ConfigClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) - request = request_type(**request_init) - - # Designate an appropriate value for the returned response. - return_value = config.ExportPreviewResultResponse() - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # We need to mock transcode() because providing default values - # for required fields will fail the real version if the http_options - # expect actual values for those fields. - with mock.patch.object(path_template, "transcode") as transcode: - # A uri without fields and an empty body will force all the - # request fields to show up in the query_params. - pb_request = request_type.pb(request) - transcode_result = { - "uri": "v1/sample_method", - "method": "post", - "query_params": pb_request, - } - transcode_result["body"] = pb_request - transcode.return_value = transcode_result - - response_value = Response() - response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = config.ExportPreviewResultResponse.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - - response = client.export_preview_result(request) - - expected_params = [("$alt", "json;enum-encoding=int")] - actual_params = req.call_args.kwargs["params"] - assert expected_params == actual_params - - -def test_export_preview_result_rest_unset_required_fields(): - transport = transports.ConfigRestTransport( - credentials=ga_credentials.AnonymousCredentials - ) - - unset_fields = transport.export_preview_result._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("parent",))) - - -def test_list_terraform_versions_rest_use_cached_wrapped_rpc(): +def test_list_resources_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -16514,36 +17702,29 @@ def test_list_terraform_versions_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert ( - client._transport.list_terraform_versions - in client._transport._wrapped_methods - ) + assert client._transport.list_resources in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.list_terraform_versions - ] = mock_rpc + client._transport._wrapped_methods[client._transport.list_resources] = mock_rpc request = {} - client.list_terraform_versions(request) + client.list_resources(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.list_terraform_versions(request) + client.list_resources(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_list_terraform_versions_rest_required_fields( - request_type=config.ListTerraformVersionsRequest, -): +def test_list_resources_rest_required_fields(request_type=config.ListResourcesRequest): transport_class = transports.ConfigRestTransport request_init = {} @@ -16558,7 +17739,7 @@ def test_list_terraform_versions_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_terraform_versions._get_unset_required_fields(jsonified_request) + ).list_resources._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present @@ -16567,7 +17748,7 @@ def test_list_terraform_versions_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_terraform_versions._get_unset_required_fields(jsonified_request) + ).list_resources._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. assert not set(unset_fields) - set( ( @@ -16590,7 +17771,7 @@ def test_list_terraform_versions_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = config.ListTerraformVersionsResponse() + return_value = config.ListResourcesResponse() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -16611,26 +17792,26 @@ def test_list_terraform_versions_rest_required_fields( response_value.status_code = 200 # Convert return value to protobuf type - return_value = config.ListTerraformVersionsResponse.pb(return_value) + return_value = config.ListResourcesResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.list_terraform_versions(request) + response = client.list_resources(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_list_terraform_versions_rest_unset_required_fields(): +def test_list_resources_rest_unset_required_fields(): transport = transports.ConfigRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.list_terraform_versions._get_unset_required_fields({}) + unset_fields = transport.list_resources._get_unset_required_fields({}) assert set(unset_fields) == ( set( ( @@ -16644,7 +17825,7 @@ def test_list_terraform_versions_rest_unset_required_fields(): ) -def test_list_terraform_versions_rest_flattened(): +def test_list_resources_rest_flattened(): client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -16653,10 +17834,12 @@ def test_list_terraform_versions_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = config.ListTerraformVersionsResponse() + return_value = config.ListResourcesResponse() # get arguments that satisfy an http rule for this method - sample_request = {"parent": "projects/sample1/locations/sample2"} + sample_request = { + "parent": "projects/sample1/locations/sample2/deployments/sample3/revisions/sample4" + } # get truthy value for each flattened field mock_args = dict( @@ -16668,26 +17851,26 @@ def test_list_terraform_versions_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = config.ListTerraformVersionsResponse.pb(return_value) + return_value = config.ListResourcesResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.list_terraform_versions(**mock_args) + client.list_resources(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{parent=projects/*/locations/*}/terraformVersions" + "%s/v1/{parent=projects/*/locations/*/deployments/*/revisions/*}/resources" % client.transport._host, args[1], ) -def test_list_terraform_versions_rest_flattened_error(transport: str = "rest"): +def test_list_resources_rest_flattened_error(transport: str = "rest"): client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -16696,13 +17879,13 @@ def test_list_terraform_versions_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.list_terraform_versions( - config.ListTerraformVersionsRequest(), + client.list_resources( + config.ListResourcesRequest(), parent="parent_value", ) -def test_list_terraform_versions_rest_pager(transport: str = "rest"): +def test_list_resources_rest_pager(transport: str = "rest"): client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -16714,28 +17897,28 @@ def test_list_terraform_versions_rest_pager(transport: str = "rest"): # with mock.patch.object(path_template, 'transcode') as transcode: # Set the response as a series of pages response = ( - config.ListTerraformVersionsResponse( - terraform_versions=[ - config.TerraformVersion(), - config.TerraformVersion(), - config.TerraformVersion(), + config.ListResourcesResponse( + resources=[ + config.Resource(), + config.Resource(), + config.Resource(), ], next_page_token="abc", ), - config.ListTerraformVersionsResponse( - terraform_versions=[], + config.ListResourcesResponse( + resources=[], next_page_token="def", ), - config.ListTerraformVersionsResponse( - terraform_versions=[ - config.TerraformVersion(), + config.ListResourcesResponse( + resources=[ + config.Resource(), ], next_page_token="ghi", ), - config.ListTerraformVersionsResponse( - terraform_versions=[ - config.TerraformVersion(), - config.TerraformVersion(), + config.ListResourcesResponse( + resources=[ + config.Resource(), + config.Resource(), ], ), ) @@ -16743,29 +17926,29 @@ def test_list_terraform_versions_rest_pager(transport: str = "rest"): response = response + response # Wrap the values into proper Response objs - response = tuple( - config.ListTerraformVersionsResponse.to_json(x) for x in response - ) + response = tuple(config.ListResourcesResponse.to_json(x) for x in response) return_values = tuple(Response() for i in response) for return_val, response_val in zip(return_values, response): return_val._content = response_val.encode("UTF-8") return_val.status_code = 200 req.side_effect = return_values - sample_request = {"parent": "projects/sample1/locations/sample2"} + sample_request = { + "parent": "projects/sample1/locations/sample2/deployments/sample3/revisions/sample4" + } - pager = client.list_terraform_versions(request=sample_request) + pager = client.list_resources(request=sample_request) results = list(pager) assert len(results) == 6 - assert all(isinstance(i, config.TerraformVersion) for i in results) + assert all(isinstance(i, config.Resource) for i in results) - pages = list(client.list_terraform_versions(request=sample_request).pages) + pages = list(client.list_resources(request=sample_request).pages) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token -def test_get_terraform_version_rest_use_cached_wrapped_rpc(): +def test_export_deployment_statefile_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -16780,7 +17963,7 @@ def test_get_terraform_version_rest_use_cached_wrapped_rpc(): # Ensure method has been cached assert ( - client._transport.get_terraform_version + client._transport.export_deployment_statefile in client._transport._wrapped_methods ) @@ -16789,30 +17972,30 @@ def test_get_terraform_version_rest_use_cached_wrapped_rpc(): mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.get_terraform_version] = ( - mock_rpc - ) + client._transport._wrapped_methods[ + client._transport.export_deployment_statefile + ] = mock_rpc request = {} - client.get_terraform_version(request) + client.export_deployment_statefile(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.get_terraform_version(request) + client.export_deployment_statefile(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_get_terraform_version_rest_required_fields( - request_type=config.GetTerraformVersionRequest, +def test_export_deployment_statefile_rest_required_fields( + request_type=config.ExportDeploymentStatefileRequest, ): transport_class = transports.ConfigRestTransport request_init = {} - request_init["name"] = "" + request_init["parent"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -16823,21 +18006,21 @@ def test_get_terraform_version_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_terraform_version._get_unset_required_fields(jsonified_request) + ).export_deployment_statefile._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["name"] = "name_value" + jsonified_request["parent"] = "parent_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_terraform_version._get_unset_required_fields(jsonified_request) + ).export_deployment_statefile._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "name" in jsonified_request - assert jsonified_request["name"] == "name_value" + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), @@ -16846,7 +18029,7 @@ def test_get_terraform_version_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = config.TerraformVersion() + return_value = config.Statefile() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -16858,99 +18041,166 @@ def test_get_terraform_version_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "get", + "method": "post", "query_params": pb_request, } + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = config.TerraformVersion.pb(return_value) + return_value = config.Statefile.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.get_terraform_version(request) + response = client.export_deployment_statefile(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_get_terraform_version_rest_unset_required_fields(): +def test_export_deployment_statefile_rest_unset_required_fields(): transport = transports.ConfigRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.get_terraform_version._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) + unset_fields = transport.export_deployment_statefile._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("parent",))) -def test_get_terraform_version_rest_flattened(): +def test_export_revision_statefile_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.export_revision_statefile + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.export_revision_statefile + ] = mock_rpc + + request = {} + client.export_revision_statefile(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.export_revision_statefile(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_export_revision_statefile_rest_required_fields( + request_type=config.ExportRevisionStatefileRequest, +): + transport_class = transports.ConfigRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).export_revision_statefile._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).export_revision_statefile._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) + request = request_type(**request_init) + # Designate an appropriate value for the returned response. + return_value = config.Statefile() # Mock the http request call within the method and fake a response. - with mock.patch.object(type(client.transport._session), "request") as req: - # Designate an appropriate value for the returned response. - return_value = config.TerraformVersion() + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result - # get arguments that satisfy an http rule for this method - sample_request = { - "name": "projects/sample1/locations/sample2/terraformVersions/sample3" - } + response_value = Response() + response_value.status_code = 200 - # get truthy value for each flattened field - mock_args = dict( - name="name_value", - ) - mock_args.update(sample_request) + # Convert return value to protobuf type + return_value = config.Statefile.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) - # Wrap the value into a proper Response obj - response_value = Response() - response_value.status_code = 200 - # Convert return value to protobuf type - return_value = config.TerraformVersion.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) - response_value._content = json_return_value.encode("UTF-8") - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.get_terraform_version(**mock_args) + response = client.export_revision_statefile(request) - # Establish that the underlying call was made with the expected - # request object values. - assert len(req.mock_calls) == 1 - _, args, _ = req.mock_calls[0] - assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/terraformVersions/*}" - % client.transport._host, - args[1], - ) + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params -def test_get_terraform_version_rest_flattened_error(transport: str = "rest"): - client = ConfigClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, +def test_export_revision_statefile_rest_unset_required_fields(): + transport = transports.ConfigRestTransport( + credentials=ga_credentials.AnonymousCredentials ) - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_terraform_version( - config.GetTerraformVersionRequest(), - name="name_value", - ) + unset_fields = transport.export_revision_statefile._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("parent",))) -def test_list_resource_changes_rest_use_cached_wrapped_rpc(): +def test_import_statefile_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -16964,40 +18214,38 @@ def test_list_resource_changes_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert ( - client._transport.list_resource_changes - in client._transport._wrapped_methods - ) + assert client._transport.import_statefile in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.list_resource_changes] = ( + client._transport._wrapped_methods[client._transport.import_statefile] = ( mock_rpc ) request = {} - client.list_resource_changes(request) + client.import_statefile(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.list_resource_changes(request) + client.import_statefile(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_list_resource_changes_rest_required_fields( - request_type=config.ListResourceChangesRequest, +def test_import_statefile_rest_required_fields( + request_type=config.ImportStatefileRequest, ): transport_class = transports.ConfigRestTransport request_init = {} request_init["parent"] = "" + request_init["lock_id"] = 0 request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -17008,30 +18256,24 @@ def test_list_resource_changes_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_resource_changes._get_unset_required_fields(jsonified_request) + ).import_statefile._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present jsonified_request["parent"] = "parent_value" + jsonified_request["lockId"] = 725 unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_resource_changes._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set( - ( - "filter", - "order_by", - "page_size", - "page_token", - ) - ) + ).import_statefile._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone assert "parent" in jsonified_request assert jsonified_request["parent"] == "parent_value" + assert "lockId" in jsonified_request + assert jsonified_request["lockId"] == 725 client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), @@ -17040,7 +18282,7 @@ def test_list_resource_changes_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = config.ListResourceChangesResponse() + return_value = config.Statefile() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -17052,49 +18294,48 @@ def test_list_resource_changes_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "get", + "method": "post", "query_params": pb_request, } + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = config.ListResourceChangesResponse.pb(return_value) + return_value = config.Statefile.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.list_resource_changes(request) + response = client.import_statefile(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_list_resource_changes_rest_unset_required_fields(): +def test_import_statefile_rest_unset_required_fields(): transport = transports.ConfigRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.list_resource_changes._get_unset_required_fields({}) + unset_fields = transport.import_statefile._get_unset_required_fields({}) assert set(unset_fields) == ( - set( + set(()) + & set( ( - "filter", - "orderBy", - "pageSize", - "pageToken", + "parent", + "lockId", ) ) - & set(("parent",)) ) -def test_list_resource_changes_rest_flattened(): +def test_import_statefile_rest_flattened(): client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -17103,16 +18344,17 @@ def test_list_resource_changes_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = config.ListResourceChangesResponse() + return_value = config.Statefile() # get arguments that satisfy an http rule for this method sample_request = { - "parent": "projects/sample1/locations/sample2/previews/sample3" + "parent": "projects/sample1/locations/sample2/deployments/sample3" } # get truthy value for each flattened field mock_args = dict( parent="parent_value", + lock_id=725, ) mock_args.update(sample_request) @@ -17120,26 +18362,26 @@ def test_list_resource_changes_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = config.ListResourceChangesResponse.pb(return_value) + return_value = config.Statefile.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.list_resource_changes(**mock_args) + client.import_statefile(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{parent=projects/*/locations/*/previews/*}/resourceChanges" + "%s/v1/{parent=projects/*/locations/*/deployments/*}:importState" % client.transport._host, args[1], ) -def test_list_resource_changes_rest_flattened_error(transport: str = "rest"): +def test_import_statefile_rest_flattened_error(transport: str = "rest"): client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -17148,78 +18390,14 @@ def test_list_resource_changes_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.list_resource_changes( - config.ListResourceChangesRequest(), + client.import_statefile( + config.ImportStatefileRequest(), parent="parent_value", + lock_id=725, ) -def test_list_resource_changes_rest_pager(transport: str = "rest"): - client = ConfigClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - # with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - config.ListResourceChangesResponse( - resource_changes=[ - config.ResourceChange(), - config.ResourceChange(), - config.ResourceChange(), - ], - next_page_token="abc", - ), - config.ListResourceChangesResponse( - resource_changes=[], - next_page_token="def", - ), - config.ListResourceChangesResponse( - resource_changes=[ - config.ResourceChange(), - ], - next_page_token="ghi", - ), - config.ListResourceChangesResponse( - resource_changes=[ - config.ResourceChange(), - config.ResourceChange(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple( - config.ListResourceChangesResponse.to_json(x) for x in response - ) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode("UTF-8") - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = { - "parent": "projects/sample1/locations/sample2/previews/sample3" - } - - pager = client.list_resource_changes(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, config.ResourceChange) for i in results) - - pages = list(client.list_resource_changes(request=sample_request).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token - - -def test_get_resource_change_rest_use_cached_wrapped_rpc(): +def test_delete_statefile_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -17233,39 +18411,38 @@ def test_get_resource_change_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert ( - client._transport.get_resource_change in client._transport._wrapped_methods - ) + assert client._transport.delete_statefile in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.get_resource_change] = ( + client._transport._wrapped_methods[client._transport.delete_statefile] = ( mock_rpc ) request = {} - client.get_resource_change(request) + client.delete_statefile(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.get_resource_change(request) + client.delete_statefile(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_get_resource_change_rest_required_fields( - request_type=config.GetResourceChangeRequest, +def test_delete_statefile_rest_required_fields( + request_type=config.DeleteStatefileRequest, ): transport_class = transports.ConfigRestTransport request_init = {} request_init["name"] = "" + request_init["lock_id"] = 0 request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -17276,21 +18453,24 @@ def test_get_resource_change_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_resource_change._get_unset_required_fields(jsonified_request) + ).delete_statefile._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present jsonified_request["name"] = "name_value" + jsonified_request["lockId"] = 725 unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_resource_change._get_unset_required_fields(jsonified_request) + ).delete_statefile._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone assert "name" in jsonified_request assert jsonified_request["name"] == "name_value" + assert "lockId" in jsonified_request + assert jsonified_request["lockId"] == 725 client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), @@ -17299,7 +18479,7 @@ def test_get_resource_change_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = config.ResourceChange() + return_value = None # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -17311,39 +18491,45 @@ def test_get_resource_change_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "get", + "method": "post", "query_params": pb_request, } + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = config.ResourceChange.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) + json_return_value = "" response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.get_resource_change(request) + response = client.delete_statefile(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_get_resource_change_rest_unset_required_fields(): +def test_delete_statefile_rest_unset_required_fields(): transport = transports.ConfigRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.get_resource_change._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) + unset_fields = transport.delete_statefile._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "name", + "lockId", + ) + ) + ) -def test_get_resource_change_rest_flattened(): +def test_delete_statefile_rest_flattened(): client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -17352,11 +18538,11 @@ def test_get_resource_change_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = config.ResourceChange() + return_value = None # get arguments that satisfy an http rule for this method sample_request = { - "name": "projects/sample1/locations/sample2/previews/sample3/resourceChanges/sample4" + "name": "projects/sample1/locations/sample2/deployments/sample3" } # get truthy value for each flattened field @@ -17368,27 +18554,25 @@ def test_get_resource_change_rest_flattened(): # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = config.ResourceChange.pb(return_value) - json_return_value = json_format.MessageToJson(return_value) + json_return_value = "" response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.get_resource_change(**mock_args) + client.delete_statefile(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/previews/*/resourceChanges/*}" + "%s/v1/{name=projects/*/locations/*/deployments/*}:deleteState" % client.transport._host, args[1], ) -def test_get_resource_change_rest_flattened_error(transport: str = "rest"): +def test_delete_statefile_rest_flattened_error(transport: str = "rest"): client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -17397,13 +18581,13 @@ def test_get_resource_change_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_resource_change( - config.GetResourceChangeRequest(), + client.delete_statefile( + config.DeleteStatefileRequest(), name="name_value", ) -def test_list_resource_drifts_rest_use_cached_wrapped_rpc(): +def test_lock_deployment_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -17417,39 +18601,39 @@ def test_list_resource_drifts_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert ( - client._transport.list_resource_drifts in client._transport._wrapped_methods - ) + assert client._transport.lock_deployment in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.list_resource_drifts] = ( - mock_rpc - ) + client._transport._wrapped_methods[client._transport.lock_deployment] = mock_rpc request = {} - client.list_resource_drifts(request) + client.lock_deployment(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.list_resource_drifts(request) + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.lock_deployment(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_list_resource_drifts_rest_required_fields( - request_type=config.ListResourceDriftsRequest, +def test_lock_deployment_rest_required_fields( + request_type=config.LockDeploymentRequest, ): transport_class = transports.ConfigRestTransport request_init = {} - request_init["parent"] = "" + request_init["name"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -17460,30 +18644,21 @@ def test_list_resource_drifts_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_resource_drifts._get_unset_required_fields(jsonified_request) + ).lock_deployment._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present - jsonified_request["parent"] = "parent_value" + jsonified_request["name"] = "name_value" unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).list_resource_drifts._get_unset_required_fields(jsonified_request) - # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set( - ( - "filter", - "order_by", - "page_size", - "page_token", - ) - ) + ).lock_deployment._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone - assert "parent" in jsonified_request - assert jsonified_request["parent"] == "parent_value" + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), @@ -17492,7 +18667,7 @@ def test_list_resource_drifts_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = config.ListResourceDriftsResponse() + return_value = operations_pb2.Operation(name="operations/spam") # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -17504,49 +18679,37 @@ def test_list_resource_drifts_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "get", + "method": "post", "query_params": pb_request, } + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = config.ListResourceDriftsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.list_resource_drifts(request) + response = client.lock_deployment(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_list_resource_drifts_rest_unset_required_fields(): +def test_lock_deployment_rest_unset_required_fields(): transport = transports.ConfigRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.list_resource_drifts._get_unset_required_fields({}) - assert set(unset_fields) == ( - set( - ( - "filter", - "orderBy", - "pageSize", - "pageToken", - ) - ) - & set(("parent",)) - ) + unset_fields = transport.lock_deployment._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) -def test_list_resource_drifts_rest_flattened(): +def test_lock_deployment_rest_flattened(): client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -17555,43 +18718,41 @@ def test_list_resource_drifts_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = config.ListResourceDriftsResponse() + return_value = operations_pb2.Operation(name="operations/spam") # get arguments that satisfy an http rule for this method sample_request = { - "parent": "projects/sample1/locations/sample2/previews/sample3" + "name": "projects/sample1/locations/sample2/deployments/sample3" } # get truthy value for each flattened field mock_args = dict( - parent="parent_value", + name="name_value", ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = config.ListResourceDriftsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.list_resource_drifts(**mock_args) + client.lock_deployment(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{parent=projects/*/locations/*/previews/*}/resourceDrifts" + "%s/v1/{name=projects/*/locations/*/deployments/*}:lock" % client.transport._host, args[1], ) -def test_list_resource_drifts_rest_flattened_error(transport: str = "rest"): +def test_lock_deployment_rest_flattened_error(transport: str = "rest"): client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -17600,122 +18761,62 @@ def test_list_resource_drifts_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.list_resource_drifts( - config.ListResourceDriftsRequest(), - parent="parent_value", + client.lock_deployment( + config.LockDeploymentRequest(), + name="name_value", ) -def test_list_resource_drifts_rest_pager(transport: str = "rest"): - client = ConfigClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Mock the http request call within the method and fake a response. - with mock.patch.object(Session, "request") as req: - # TODO(kbandes): remove this mock unless there's a good reason for it. - # with mock.patch.object(path_template, 'transcode') as transcode: - # Set the response as a series of pages - response = ( - config.ListResourceDriftsResponse( - resource_drifts=[ - config.ResourceDrift(), - config.ResourceDrift(), - config.ResourceDrift(), - ], - next_page_token="abc", - ), - config.ListResourceDriftsResponse( - resource_drifts=[], - next_page_token="def", - ), - config.ListResourceDriftsResponse( - resource_drifts=[ - config.ResourceDrift(), - ], - next_page_token="ghi", - ), - config.ListResourceDriftsResponse( - resource_drifts=[ - config.ResourceDrift(), - config.ResourceDrift(), - ], - ), - ) - # Two responses for two calls - response = response + response - - # Wrap the values into proper Response objs - response = tuple(config.ListResourceDriftsResponse.to_json(x) for x in response) - return_values = tuple(Response() for i in response) - for return_val, response_val in zip(return_values, response): - return_val._content = response_val.encode("UTF-8") - return_val.status_code = 200 - req.side_effect = return_values - - sample_request = { - "parent": "projects/sample1/locations/sample2/previews/sample3" - } - - pager = client.list_resource_drifts(request=sample_request) - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, config.ResourceDrift) for i in results) - - pages = list(client.list_resource_drifts(request=sample_request).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token - - -def test_get_resource_drift_rest_use_cached_wrapped_rpc(): - # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, - # instead of constructing them on each call - with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: - client = ConfigClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="rest", - ) +def test_unlock_deployment_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) # Should wrap all calls on client creation assert wrapper_fn.call_count > 0 wrapper_fn.reset_mock() # Ensure method has been cached - assert ( - client._transport.get_resource_drift in client._transport._wrapped_methods - ) + assert client._transport.unlock_deployment in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[client._transport.get_resource_drift] = ( + client._transport._wrapped_methods[client._transport.unlock_deployment] = ( mock_rpc ) request = {} - client.get_resource_drift(request) + client.unlock_deployment(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.get_resource_drift(request) + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.unlock_deployment(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_get_resource_drift_rest_required_fields( - request_type=config.GetResourceDriftRequest, +def test_unlock_deployment_rest_required_fields( + request_type=config.UnlockDeploymentRequest, ): transport_class = transports.ConfigRestTransport request_init = {} request_init["name"] = "" + request_init["lock_id"] = 0 request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -17726,21 +18827,24 @@ def test_get_resource_drift_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_resource_drift._get_unset_required_fields(jsonified_request) + ).unlock_deployment._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present jsonified_request["name"] = "name_value" + jsonified_request["lockId"] = 725 unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_resource_drift._get_unset_required_fields(jsonified_request) + ).unlock_deployment._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone assert "name" in jsonified_request assert jsonified_request["name"] == "name_value" + assert "lockId" in jsonified_request + assert jsonified_request["lockId"] == 725 client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), @@ -17749,7 +18853,7 @@ def test_get_resource_drift_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = config.ResourceDrift() + return_value = operations_pb2.Operation(name="operations/spam") # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -17761,39 +18865,45 @@ def test_get_resource_drift_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "get", + "method": "post", "query_params": pb_request, } + transcode_result["body"] = pb_request transcode.return_value = transcode_result response_value = Response() response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = config.ResourceDrift.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.get_resource_drift(request) + response = client.unlock_deployment(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_get_resource_drift_rest_unset_required_fields(): +def test_unlock_deployment_rest_unset_required_fields(): transport = transports.ConfigRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.get_resource_drift._get_unset_required_fields({}) - assert set(unset_fields) == (set(()) & set(("name",))) + unset_fields = transport.unlock_deployment._get_unset_required_fields({}) + assert set(unset_fields) == ( + set(()) + & set( + ( + "name", + "lockId", + ) + ) + ) -def test_get_resource_drift_rest_flattened(): +def test_unlock_deployment_rest_flattened(): client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -17802,43 +18912,42 @@ def test_get_resource_drift_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = config.ResourceDrift() + return_value = operations_pb2.Operation(name="operations/spam") # get arguments that satisfy an http rule for this method sample_request = { - "name": "projects/sample1/locations/sample2/previews/sample3/resourceDrifts/sample4" + "name": "projects/sample1/locations/sample2/deployments/sample3" } # get truthy value for each flattened field mock_args = dict( name="name_value", + lock_id=725, ) mock_args.update(sample_request) # Wrap the value into a proper Response obj response_value = Response() response_value.status_code = 200 - # Convert return value to protobuf type - return_value = config.ResourceDrift.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.get_resource_drift(**mock_args) + client.unlock_deployment(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/previews/*/resourceDrifts/*}" + "%s/v1/{name=projects/*/locations/*/deployments/*}:unlock" % client.transport._host, args[1], ) -def test_get_resource_drift_rest_flattened_error(transport: str = "rest"): +def test_unlock_deployment_rest_flattened_error(transport: str = "rest"): client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -17847,13 +18956,14 @@ def test_get_resource_drift_rest_flattened_error(transport: str = "rest"): # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_resource_drift( - config.GetResourceDriftRequest(), + client.unlock_deployment( + config.UnlockDeploymentRequest(), name="name_value", + lock_id=725, ) -def test_get_auto_migration_config_rest_use_cached_wrapped_rpc(): +def test_export_lock_info_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -17867,35 +18977,32 @@ def test_get_auto_migration_config_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert ( - client._transport.get_auto_migration_config - in client._transport._wrapped_methods - ) + assert client._transport.export_lock_info in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.get_auto_migration_config - ] = mock_rpc + client._transport._wrapped_methods[client._transport.export_lock_info] = ( + mock_rpc + ) request = {} - client.get_auto_migration_config(request) + client.export_lock_info(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 - client.get_auto_migration_config(request) + client.export_lock_info(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_get_auto_migration_config_rest_required_fields( - request_type=config.GetAutoMigrationConfigRequest, +def test_export_lock_info_rest_required_fields( + request_type=config.ExportLockInfoRequest, ): transport_class = transports.ConfigRestTransport @@ -17911,7 +19018,7 @@ def test_get_auto_migration_config_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_auto_migration_config._get_unset_required_fields(jsonified_request) + ).export_lock_info._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present @@ -17920,7 +19027,7 @@ def test_get_auto_migration_config_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).get_auto_migration_config._get_unset_required_fields(jsonified_request) + ).export_lock_info._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone @@ -17934,7 +19041,7 @@ def test_get_auto_migration_config_rest_required_fields( request = request_type(**request_init) # Designate an appropriate value for the returned response. - return_value = config.AutoMigrationConfig() + return_value = config.LockInfo() # Mock the http request call within the method and fake a response. with mock.patch.object(Session, "request") as req: # We need to mock transcode() because providing default values @@ -17955,30 +19062,30 @@ def test_get_auto_migration_config_rest_required_fields( response_value.status_code = 200 # Convert return value to protobuf type - return_value = config.AutoMigrationConfig.pb(return_value) + return_value = config.LockInfo.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.get_auto_migration_config(request) + response = client.export_lock_info(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_get_auto_migration_config_rest_unset_required_fields(): +def test_export_lock_info_rest_unset_required_fields(): transport = transports.ConfigRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.get_auto_migration_config._get_unset_required_fields({}) + unset_fields = transport.export_lock_info._get_unset_required_fields({}) assert set(unset_fields) == (set(()) & set(("name",))) -def test_get_auto_migration_config_rest_flattened(): +def test_export_lock_info_rest_flattened(): client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -17987,11 +19094,11 @@ def test_get_auto_migration_config_rest_flattened(): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = config.AutoMigrationConfig() + return_value = config.LockInfo() # get arguments that satisfy an http rule for this method sample_request = { - "name": "projects/sample1/locations/sample2/autoMigrationConfig" + "name": "projects/sample1/locations/sample2/deployments/sample3" } # get truthy value for each flattened field @@ -18004,26 +19111,26 @@ def test_get_auto_migration_config_rest_flattened(): response_value = Response() response_value.status_code = 200 # Convert return value to protobuf type - return_value = config.AutoMigrationConfig.pb(return_value) + return_value = config.LockInfo.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.get_auto_migration_config(**mock_args) + client.export_lock_info(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{name=projects/*/locations/*/autoMigrationConfig}" + "%s/v1/{name=projects/*/locations/*/deployments/*}:exportLock" % client.transport._host, args[1], ) -def test_get_auto_migration_config_rest_flattened_error(transport: str = "rest"): +def test_export_lock_info_rest_flattened_error(transport: str = "rest"): client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -18032,13 +19139,13 @@ def test_get_auto_migration_config_rest_flattened_error(transport: str = "rest") # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.get_auto_migration_config( - config.GetAutoMigrationConfigRequest(), + client.export_lock_info( + config.ExportLockInfoRequest(), name="name_value", ) -def test_update_auto_migration_config_rest_use_cached_wrapped_rpc(): +def test_create_preview_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: @@ -18052,22 +19159,17 @@ def test_update_auto_migration_config_rest_use_cached_wrapped_rpc(): wrapper_fn.reset_mock() # Ensure method has been cached - assert ( - client._transport.update_auto_migration_config - in client._transport._wrapped_methods - ) + assert client._transport.create_preview in client._transport._wrapped_methods # Replace cached wrapped function with mock mock_rpc = mock.Mock() mock_rpc.return_value.name = ( "foo" # operation_request.operation in compute client(s) expect a string. ) - client._transport._wrapped_methods[ - client._transport.update_auto_migration_config - ] = mock_rpc + client._transport._wrapped_methods[client._transport.create_preview] = mock_rpc request = {} - client.update_auto_migration_config(request) + client.create_preview(request) # Establish that the underlying gRPC stub method was called. assert mock_rpc.call_count == 1 @@ -18076,19 +19178,18 @@ def test_update_auto_migration_config_rest_use_cached_wrapped_rpc(): # subsequent calls should use the cached wrapper wrapper_fn.reset_mock() - client.update_auto_migration_config(request) + client.create_preview(request) # Establish that a new wrapper was not created for this call assert wrapper_fn.call_count == 0 assert mock_rpc.call_count == 2 -def test_update_auto_migration_config_rest_required_fields( - request_type=config.UpdateAutoMigrationConfigRequest, -): +def test_create_preview_rest_required_fields(request_type=config.CreatePreviewRequest): transport_class = transports.ConfigRestTransport request_init = {} + request_init["parent"] = "" request = request_type(**request_init) pb_request = request_type.pb(request) jsonified_request = json.loads( @@ -18099,19 +19200,28 @@ def test_update_auto_migration_config_rest_required_fields( unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).update_auto_migration_config._get_unset_required_fields(jsonified_request) + ).create_preview._get_unset_required_fields(jsonified_request) jsonified_request.update(unset_fields) # verify required fields with default values are now present + jsonified_request["parent"] = "parent_value" + unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ).update_auto_migration_config._get_unset_required_fields(jsonified_request) + ).create_preview._get_unset_required_fields(jsonified_request) # Check that path parameters and body parameters are not mixing in. - assert not set(unset_fields) - set(("update_mask",)) + assert not set(unset_fields) - set( + ( + "preview_id", + "request_id", + ) + ) jsonified_request.update(unset_fields) # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), @@ -18132,7 +19242,7 @@ def test_update_auto_migration_config_rest_required_fields( pb_request = request_type.pb(request) transcode_result = { "uri": "v1/sample_method", - "method": "patch", + "method": "post", "query_params": pb_request, } transcode_result["body"] = pb_request @@ -18146,23 +19256,36 @@ def test_update_auto_migration_config_rest_required_fields( req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.update_auto_migration_config(request) + response = client.create_preview(request) expected_params = [("$alt", "json;enum-encoding=int")] actual_params = req.call_args.kwargs["params"] assert expected_params == actual_params -def test_update_auto_migration_config_rest_unset_required_fields(): +def test_create_preview_rest_unset_required_fields(): transport = transports.ConfigRestTransport( credentials=ga_credentials.AnonymousCredentials ) - unset_fields = transport.update_auto_migration_config._get_unset_required_fields({}) - assert set(unset_fields) == (set(("updateMask",)) & set(("autoMigrationConfig",))) + unset_fields = transport.create_preview._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "previewId", + "requestId", + ) + ) + & set( + ( + "parent", + "preview", + ) + ) + ) -def test_update_auto_migration_config_rest_flattened(): +def test_create_preview_rest_flattened(): client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -18174,41 +19297,40 @@ def test_update_auto_migration_config_rest_flattened(): return_value = operations_pb2.Operation(name="operations/spam") # get arguments that satisfy an http rule for this method - sample_request = { - "auto_migration_config": { - "name": "projects/sample1/locations/sample2/autoMigrationConfig" - } - } + sample_request = {"parent": "projects/sample1/locations/sample2"} # get truthy value for each flattened field mock_args = dict( - auto_migration_config=config.AutoMigrationConfig(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), - ) - mock_args.update(sample_request) - - # Wrap the value into a proper Response obj - response_value = Response() + parent="parent_value", + preview=config.Preview( + terraform_blueprint=config.TerraformBlueprint( + gcs_source="gcs_source_value" + ) + ), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() response_value.status_code = 200 json_return_value = json_format.MessageToJson(return_value) response_value._content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.update_auto_migration_config(**mock_args) + client.create_preview(**mock_args) # Establish that the underlying call was made with the expected # request object values. assert len(req.mock_calls) == 1 _, args, _ = req.mock_calls[0] assert path_template.validate( - "%s/v1/{auto_migration_config.name=projects/*/locations/*/autoMigrationConfig}" - % client.transport._host, + "%s/v1/{parent=projects/*/locations/*}/previews" % client.transport._host, args[1], ) -def test_update_auto_migration_config_rest_flattened_error(transport: str = "rest"): +def test_create_preview_rest_flattened_error(transport: str = "rest"): client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, @@ -18217,1633 +19339,7633 @@ def test_update_auto_migration_config_rest_flattened_error(transport: str = "res # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): - client.update_auto_migration_config( - config.UpdateAutoMigrationConfigRequest(), - auto_migration_config=config.AutoMigrationConfig(name="name_value"), - update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + client.create_preview( + config.CreatePreviewRequest(), + parent="parent_value", + preview=config.Preview( + terraform_blueprint=config.TerraformBlueprint( + gcs_source="gcs_source_value" + ) + ), ) -def test_credentials_transport_error(): - # It is an error to provide credentials and a transport instance. - transport = transports.ConfigGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): +def test_get_preview_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # It is an error to provide a credentials file and a transport instance. - transport = transports.ConfigGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = ConfigClient( - client_options={"credentials_file": "credentials.json"}, - transport=transport, + transport="rest", ) - # It is an error to provide an api_key and a transport instance. - transport = transports.ConfigGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - options = client_options.ClientOptions() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = ConfigClient( - client_options=options, - transport=transport, - ) + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() - # It is an error to provide an api_key and a credential. - options = client_options.ClientOptions() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = ConfigClient( - client_options=options, credentials=ga_credentials.AnonymousCredentials() - ) + # Ensure method has been cached + assert client._transport.get_preview in client._transport._wrapped_methods - # It is an error to provide scopes and a transport instance. - transport = transports.ConfigGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = ConfigClient( - client_options={"scopes": ["1", "2"]}, - transport=transport, + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. ) + client._transport._wrapped_methods[client._transport.get_preview] = mock_rpc + request = {} + client.get_preview(request) -def test_transport_instance(): - # A client may be instantiated with a custom transport instance. - transport = transports.ConfigGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - client = ConfigClient(transport=transport) - assert client.transport is transport + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + client.get_preview(request) -def test_transport_get_channel(): - # A client may be instantiated with a custom transport instance. - transport = transports.ConfigGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 - transport = transports.ConfigGrpcAsyncIOTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel +def test_get_preview_rest_required_fields(request_type=config.GetPreviewRequest): + transport_class = transports.ConfigRestTransport -@pytest.mark.parametrize( - "transport_class", - [ - transports.ConfigGrpcTransport, - transports.ConfigGrpcAsyncIOTransport, - transports.ConfigRestTransport, - ], -) -def test_transport_adc(transport_class): - # Test default credentials are used if not provided. - with mock.patch.object(google.auth, "default") as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class() - adc.assert_called_once() + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + # verify fields with default values are dropped -def test_transport_kind_grpc(): - transport = ConfigClient.get_transport_class("grpc")( + unset_fields = transport_class( credentials=ga_credentials.AnonymousCredentials() - ) - assert transport.kind == "grpc" + ).get_preview._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + # verify required fields with default values are now present -def test_initialize_client_w_grpc(): - client = ConfigClient( - credentials=ga_credentials.AnonymousCredentials(), transport="grpc" - ) - assert client is not None + jsonified_request["name"] = "name_value" + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_preview._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_deployments_empty_call_grpc(): client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", + transport="rest", ) + request = request_type(**request_init) - # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.list_deployments), "__call__") as call: - call.return_value = config.ListDeploymentsResponse() - client.list_deployments(request=None) + # Designate an appropriate value for the returned response. + return_value = config.Preview() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = config.ListDeploymentsRequest() + response_value = Response() + response_value.status_code = 200 - assert args[0] == request_msg + # Convert return value to protobuf type + return_value = config.Preview.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_deployment_empty_call_grpc(): - client = ConfigClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) + response = client.get_preview(request) - # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.get_deployment), "__call__") as call: - call.return_value = config.Deployment() - client.get_deployment(request=None) + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = config.GetDeploymentRequest() - assert args[0] == request_msg +def test_get_preview_rest_unset_required_fields(): + transport = transports.ConfigRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + unset_fields = transport.get_preview._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_create_deployment_empty_call_grpc(): + +def test_get_preview_rest_flattened(): client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", + transport="rest", ) - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_deployment), "__call__" - ) as call: - call.return_value = operations_pb2.Operation(name="operations/op") - client.create_deployment(request=None) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = config.Preview() - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = config.CreateDeploymentRequest() + # get arguments that satisfy an http rule for this method + sample_request = {"name": "projects/sample1/locations/sample2/previews/sample3"} - assert args[0] == request_msg + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = config.Preview.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_update_deployment_empty_call_grpc(): - client = ConfigClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) - - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_deployment), "__call__" - ) as call: - call.return_value = operations_pb2.Operation(name="operations/op") - client.update_deployment(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = config.UpdateDeploymentRequest() + client.get_preview(**mock_args) - assert args[0] == request_msg + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/previews/*}" % client.transport._host, + args[1], + ) -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_delete_deployment_empty_call_grpc(): +def test_get_preview_rest_flattened_error(transport: str = "rest"): client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", + transport=transport, ) - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_deployment), "__call__" - ) as call: - call.return_value = operations_pb2.Operation(name="operations/op") - client.delete_deployment(request=None) + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_preview( + config.GetPreviewRequest(), + name="name_value", + ) - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = config.DeleteDeploymentRequest() - assert args[0] == request_msg +def test_list_previews_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_revisions_empty_call_grpc(): - client = ConfigClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) + # Ensure method has been cached + assert client._transport.list_previews in client._transport._wrapped_methods - # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.list_revisions), "__call__") as call: - call.return_value = config.ListRevisionsResponse() - client.list_revisions(request=None) + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.list_previews] = mock_rpc - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = config.ListRevisionsRequest() + request = {} + client.list_previews(request) - assert args[0] == request_msg + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + client.list_previews(request) -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_revision_empty_call_grpc(): - client = ConfigClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 - # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.get_revision), "__call__") as call: - call.return_value = config.Revision() - client.get_revision(request=None) - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = config.GetRevisionRequest() +def test_list_previews_rest_required_fields(request_type=config.ListPreviewsRequest): + transport_class = transports.ConfigRestTransport - assert args[0] == request_msg + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + # verify fields with default values are dropped -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_resource_empty_call_grpc(): - client = ConfigClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_previews._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) - # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.get_resource), "__call__") as call: - call.return_value = config.Resource() - client.get_resource(request=None) + # verify required fields with default values are now present - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = config.GetResourceRequest() + jsonified_request["parent"] = "parent_value" - assert args[0] == request_msg + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_previews._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "filter", + "order_by", + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_resources_empty_call_grpc(): client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", + transport="rest", ) + request = request_type(**request_init) - # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.list_resources), "__call__") as call: - call.return_value = config.ListResourcesResponse() - client.list_resources(request=None) + # Designate an appropriate value for the returned response. + return_value = config.ListPreviewsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = config.ListResourcesRequest() + response_value = Response() + response_value.status_code = 200 - assert args[0] == request_msg + # Convert return value to protobuf type + return_value = config.ListPreviewsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_export_deployment_statefile_empty_call_grpc(): - client = ConfigClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) + response = client.list_previews(request) - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.export_deployment_statefile), "__call__" - ) as call: - call.return_value = config.Statefile() - client.export_deployment_statefile(request=None) + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = config.ExportDeploymentStatefileRequest() - assert args[0] == request_msg +def test_list_previews_rest_unset_required_fields(): + transport = transports.ConfigRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_previews._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "filter", + "orderBy", + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_export_revision_statefile_empty_call_grpc(): +def test_list_previews_rest_flattened(): client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", + transport="rest", ) - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.export_revision_statefile), "__call__" - ) as call: - call.return_value = config.Statefile() - client.export_revision_statefile(request=None) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = config.ListPreviewsResponse() - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = config.ExportRevisionStatefileRequest() + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} - assert args[0] == request_msg + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = config.ListPreviewsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_import_statefile_empty_call_grpc(): + client.list_previews(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*}/previews" % client.transport._host, + args[1], + ) + + +def test_list_previews_rest_flattened_error(transport: str = "rest"): client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", + transport=transport, ) - # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.import_statefile), "__call__") as call: - call.return_value = config.Statefile() - client.import_statefile(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = config.ImportStatefileRequest() - - assert args[0] == request_msg + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_previews( + config.ListPreviewsRequest(), + parent="parent_value", + ) -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_delete_statefile_empty_call_grpc(): +def test_list_previews_rest_pager(transport: str = "rest"): client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", + transport=transport, ) - # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.delete_statefile), "__call__") as call: - call.return_value = None - client.delete_statefile(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = config.DeleteStatefileRequest() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + config.ListPreviewsResponse( + previews=[ + config.Preview(), + config.Preview(), + config.Preview(), + ], + next_page_token="abc", + ), + config.ListPreviewsResponse( + previews=[], + next_page_token="def", + ), + config.ListPreviewsResponse( + previews=[ + config.Preview(), + ], + next_page_token="ghi", + ), + config.ListPreviewsResponse( + previews=[ + config.Preview(), + config.Preview(), + ], + ), + ) + # Two responses for two calls + response = response + response - assert args[0] == request_msg + # Wrap the values into proper Response objs + response = tuple(config.ListPreviewsResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + sample_request = {"parent": "projects/sample1/locations/sample2"} -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_lock_deployment_empty_call_grpc(): - client = ConfigClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) + pager = client.list_previews(request=sample_request) - # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.lock_deployment), "__call__") as call: - call.return_value = operations_pb2.Operation(name="operations/op") - client.lock_deployment(request=None) + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, config.Preview) for i in results) - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = config.LockDeploymentRequest() + pages = list(client.list_previews(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token - assert args[0] == request_msg +def test_delete_preview_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_unlock_deployment_empty_call_grpc(): - client = ConfigClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.unlock_deployment), "__call__" - ) as call: - call.return_value = operations_pb2.Operation(name="operations/op") - client.unlock_deployment(request=None) + # Ensure method has been cached + assert client._transport.delete_preview in client._transport._wrapped_methods - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = config.UnlockDeploymentRequest() + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.delete_preview] = mock_rpc - assert args[0] == request_msg + request = {} + client.delete_preview(request) + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_export_lock_info_empty_call_grpc(): - client = ConfigClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() - # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.export_lock_info), "__call__") as call: - call.return_value = config.LockInfo() - client.export_lock_info(request=None) + client.delete_preview(request) - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = config.ExportLockInfoRequest() + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 - assert args[0] == request_msg +def test_delete_preview_rest_required_fields(request_type=config.DeletePreviewRequest): + transport_class = transports.ConfigRestTransport -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_create_preview_empty_call_grpc(): - client = ConfigClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) ) - # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.create_preview), "__call__") as call: - call.return_value = operations_pb2.Operation(name="operations/op") - client.create_preview(request=None) + # verify fields with default values are dropped - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = config.CreatePreviewRequest() + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_preview._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) - assert args[0] == request_msg + # verify required fields with default values are now present + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_preview._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("request_id",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_preview_empty_call_grpc(): client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", + transport="rest", ) + request = request_type(**request_init) - # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.get_preview), "__call__") as call: - call.return_value = config.Preview() - client.get_preview(request=None) + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, + } + transcode.return_value = transcode_result - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = config.GetPreviewRequest() + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) - assert args[0] == request_msg + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.delete_preview(request) -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_previews_empty_call_grpc(): - client = ConfigClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params - # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.list_previews), "__call__") as call: - call.return_value = config.ListPreviewsResponse() - client.list_previews(request=None) - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = config.ListPreviewsRequest() +def test_delete_preview_rest_unset_required_fields(): + transport = transports.ConfigRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) - assert args[0] == request_msg + unset_fields = transport.delete_preview._get_unset_required_fields({}) + assert set(unset_fields) == (set(("requestId",)) & set(("name",))) -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_delete_preview_empty_call_grpc(): +def test_delete_preview_rest_flattened(): client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", + transport="rest", ) - # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.delete_preview), "__call__") as call: - call.return_value = operations_pb2.Operation(name="operations/op") - client.delete_preview(request=None) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = config.DeletePreviewRequest() + # get arguments that satisfy an http rule for this method + sample_request = {"name": "projects/sample1/locations/sample2/previews/sample3"} - assert args[0] == request_msg + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.delete_preview(**mock_args) -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_export_preview_result_empty_call_grpc(): + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/previews/*}" % client.transport._host, + args[1], + ) + + +def test_delete_preview_rest_flattened_error(transport: str = "rest"): client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", + transport=transport, ) - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.export_preview_result), "__call__" - ) as call: - call.return_value = config.ExportPreviewResultResponse() - client.export_preview_result(request=None) + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_preview( + config.DeletePreviewRequest(), + name="name_value", + ) - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = config.ExportPreviewResultRequest() - assert args[0] == request_msg +def test_export_preview_result_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_terraform_versions_empty_call_grpc(): - client = ConfigClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) + # Ensure method has been cached + assert ( + client._transport.export_preview_result + in client._transport._wrapped_methods + ) - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_terraform_versions), "__call__" - ) as call: - call.return_value = config.ListTerraformVersionsResponse() - client.list_terraform_versions(request=None) + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.export_preview_result] = ( + mock_rpc + ) - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = config.ListTerraformVersionsRequest() + request = {} + client.export_preview_result(request) - assert args[0] == request_msg + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + client.export_preview_result(request) -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_terraform_version_empty_call_grpc(): - client = ConfigClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_terraform_version), "__call__" - ) as call: - call.return_value = config.TerraformVersion() - client.get_terraform_version(request=None) - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = config.GetTerraformVersionRequest() +def test_export_preview_result_rest_required_fields( + request_type=config.ExportPreviewResultRequest, +): + transport_class = transports.ConfigRestTransport - assert args[0] == request_msg + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + # verify fields with default values are dropped -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_resource_changes_empty_call_grpc(): - client = ConfigClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).export_preview_result._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_resource_changes), "__call__" - ) as call: - call.return_value = config.ListResourceChangesResponse() - client.list_resource_changes(request=None) + # verify required fields with default values are now present - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = config.ListResourceChangesRequest() + jsonified_request["parent"] = "parent_value" - assert args[0] == request_msg + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).export_preview_result._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_resource_change_empty_call_grpc(): client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", + transport="rest", ) + request = request_type(**request_init) - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_resource_change), "__call__" - ) as call: - call.return_value = config.ResourceChange() - client.get_resource_change(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = config.GetResourceChangeRequest() - - assert args[0] == request_msg + # Designate an appropriate value for the returned response. + return_value = config.ExportPreviewResultResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + response_value = Response() + response_value.status_code = 200 -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_list_resource_drifts_empty_call_grpc(): - client = ConfigClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) + # Convert return value to protobuf type + return_value = config.ExportPreviewResultResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_resource_drifts), "__call__" - ) as call: - call.return_value = config.ListResourceDriftsResponse() - client.list_resource_drifts(request=None) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = config.ListResourceDriftsRequest() + response = client.export_preview_result(request) - assert args[0] == request_msg + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_resource_drift_empty_call_grpc(): - client = ConfigClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", +def test_export_preview_result_rest_unset_required_fields(): + transport = transports.ConfigRestTransport( + credentials=ga_credentials.AnonymousCredentials ) - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_resource_drift), "__call__" - ) as call: - call.return_value = config.ResourceDrift() - client.get_resource_drift(request=None) + unset_fields = transport.export_preview_result._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("parent",))) - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = config.GetResourceDriftRequest() - assert args[0] == request_msg +def test_list_terraform_versions_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_get_auto_migration_config_empty_call_grpc(): - client = ConfigClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", - ) + # Ensure method has been cached + assert ( + client._transport.list_terraform_versions + in client._transport._wrapped_methods + ) - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_auto_migration_config), "__call__" - ) as call: - call.return_value = config.AutoMigrationConfig() - client.get_auto_migration_config(request=None) + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.list_terraform_versions + ] = mock_rpc - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = config.GetAutoMigrationConfigRequest() + request = {} + client.list_terraform_versions(request) - assert args[0] == request_msg + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + client.list_terraform_versions(request) -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -def test_update_auto_migration_config_empty_call_grpc(): - client = ConfigClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc", + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_list_terraform_versions_rest_required_fields( + request_type=config.ListTerraformVersionsRequest, +): + transport_class = transports.ConfigRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) ) - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_auto_migration_config), "__call__" - ) as call: - call.return_value = operations_pb2.Operation(name="operations/op") - client.update_auto_migration_config(request=None) + # verify fields with default values are dropped - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = config.UpdateAutoMigrationConfigRequest() + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_terraform_versions._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) - assert args[0] == request_msg + # verify required fields with default values are now present + jsonified_request["parent"] = "parent_value" -def test_transport_kind_grpc_asyncio(): - transport = ConfigAsyncClient.get_transport_class("grpc_asyncio")( - credentials=async_anonymous_credentials() + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_terraform_versions._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "filter", + "order_by", + "page_size", + "page_token", + ) ) - assert transport.kind == "grpc_asyncio" + jsonified_request.update(unset_fields) + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" -def test_initialize_client_w_grpc_asyncio(): - client = ConfigAsyncClient( - credentials=async_anonymous_credentials(), transport="grpc_asyncio" + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - assert client is not None + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = config.ListTerraformVersionsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + response_value = Response() + response_value.status_code = 200 -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_list_deployments_empty_call_grpc_asyncio(): - client = ConfigAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) + # Convert return value to protobuf type + return_value = config.ListTerraformVersionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) - # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.list_deployments), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - config.ListDeploymentsResponse( - next_page_token="next_page_token_value", - unreachable=["unreachable_value"], - ) - ) - await client.list_deployments(request=None) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = config.ListDeploymentsRequest() + response = client.list_terraform_versions(request) - assert args[0] == request_msg + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_get_deployment_empty_call_grpc_asyncio(): - client = ConfigAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", +def test_list_terraform_versions_rest_unset_required_fields(): + transport = transports.ConfigRestTransport( + credentials=ga_credentials.AnonymousCredentials ) - # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.get_deployment), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - config.Deployment( - name="name_value", - state=config.Deployment.State.CREATING, - latest_revision="latest_revision_value", - state_detail="state_detail_value", - error_code=config.Deployment.ErrorCode.REVISION_FAILED, - delete_build="delete_build_value", - delete_logs="delete_logs_value", - error_logs="error_logs_value", - artifacts_gcs_bucket="artifacts_gcs_bucket_value", - service_account="service_account_value", - import_existing_resources=True, - worker_pool="worker_pool_value", - lock_state=config.Deployment.LockState.LOCKED, - tf_version_constraint="tf_version_constraint_value", - tf_version="tf_version_value", - quota_validation=config.QuotaValidation.ENABLED, + unset_fields = transport.list_terraform_versions._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "filter", + "orderBy", + "pageSize", + "pageToken", ) ) - await client.get_deployment(request=None) + & set(("parent",)) + ) - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = config.GetDeploymentRequest() - assert args[0] == request_msg +def test_list_terraform_versions_rest_flattened(): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = config.ListTerraformVersionsResponse() -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_create_deployment_empty_call_grpc_asyncio(): - client = ConfigAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.create_deployment), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/spam") + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", ) - await client.create_deployment(request=None) + mock_args.update(sample_request) - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = config.CreateDeploymentRequest() + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = config.ListTerraformVersionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - assert args[0] == request_msg + client.list_terraform_versions(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*}/terraformVersions" + % client.transport._host, + args[1], + ) -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_update_deployment_empty_call_grpc_asyncio(): - client = ConfigAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", +def test_list_terraform_versions_rest_flattened_error(transport: str = "rest"): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_deployment), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/spam") + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_terraform_versions( + config.ListTerraformVersionsRequest(), + parent="parent_value", ) - await client.update_deployment(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = config.UpdateDeploymentRequest() - - assert args[0] == request_msg -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_delete_deployment_empty_call_grpc_asyncio(): - client = ConfigAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", +def test_list_terraform_versions_rest_pager(transport: str = "rest"): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.delete_deployment), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + config.ListTerraformVersionsResponse( + terraform_versions=[ + config.TerraformVersion(), + config.TerraformVersion(), + config.TerraformVersion(), + ], + next_page_token="abc", + ), + config.ListTerraformVersionsResponse( + terraform_versions=[], + next_page_token="def", + ), + config.ListTerraformVersionsResponse( + terraform_versions=[ + config.TerraformVersion(), + ], + next_page_token="ghi", + ), + config.ListTerraformVersionsResponse( + terraform_versions=[ + config.TerraformVersion(), + config.TerraformVersion(), + ], + ), ) - await client.delete_deployment(request=None) + # Two responses for two calls + response = response + response - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = config.DeleteDeploymentRequest() + # Wrap the values into proper Response objs + response = tuple( + config.ListTerraformVersionsResponse.to_json(x) for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values - assert args[0] == request_msg + sample_request = {"parent": "projects/sample1/locations/sample2"} + pager = client.list_terraform_versions(request=sample_request) -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_list_revisions_empty_call_grpc_asyncio(): - client = ConfigAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, config.TerraformVersion) for i in results) - # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.list_revisions), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - config.ListRevisionsResponse( - next_page_token="next_page_token_value", - unreachable=["unreachable_value"], - ) + pages = list(client.list_terraform_versions(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +def test_get_terraform_version_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - await client.list_revisions(request=None) - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = config.ListRevisionsRequest() + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() - assert args[0] == request_msg + # Ensure method has been cached + assert ( + client._transport.get_terraform_version + in client._transport._wrapped_methods + ) + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get_terraform_version] = ( + mock_rpc + ) -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_get_revision_empty_call_grpc_asyncio(): - client = ConfigAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) + request = {} + client.get_terraform_version(request) - # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.get_revision), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - config.Revision( - name="name_value", - action=config.Revision.Action.CREATE, - state=config.Revision.State.APPLYING, - state_detail="state_detail_value", - error_code=config.Revision.ErrorCode.CLOUD_BUILD_PERMISSION_DENIED, - build="build_value", - logs="logs_value", - error_logs="error_logs_value", - service_account="service_account_value", - import_existing_resources=True, - worker_pool="worker_pool_value", - tf_version_constraint="tf_version_constraint_value", - tf_version="tf_version_value", - quota_validation_results="quota_validation_results_value", - quota_validation=config.QuotaValidation.ENABLED, - ) - ) - await client.get_revision(request=None) + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = config.GetRevisionRequest() + client.get_terraform_version(request) - assert args[0] == request_msg + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_get_resource_empty_call_grpc_asyncio(): - client = ConfigAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", +def test_get_terraform_version_rest_required_fields( + request_type=config.GetTerraformVersionRequest, +): + transport_class = transports.ConfigRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) ) - # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.get_resource), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - config.Resource( - name="name_value", - intent=config.Resource.Intent.CREATE, - state=config.Resource.State.PLANNED, - ) - ) - await client.get_resource(request=None) + # verify fields with default values are dropped - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = config.GetResourceRequest() + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_terraform_version._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) - assert args[0] == request_msg + # verify required fields with default values are now present + jsonified_request["name"] = "name_value" -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_list_resources_empty_call_grpc_asyncio(): - client = ConfigAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_terraform_version._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) - # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.list_resources), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - config.ListResourcesResponse( - next_page_token="next_page_token_value", - unreachable=["unreachable_value"], - ) - ) - await client.list_resources(request=None) + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = config.ListResourcesRequest() + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) - assert args[0] == request_msg + # Designate an appropriate value for the returned response. + return_value = config.TerraformVersion() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + response_value = Response() + response_value.status_code = 200 -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_export_deployment_statefile_empty_call_grpc_asyncio(): - client = ConfigAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) + # Convert return value to protobuf type + return_value = config.TerraformVersion.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.export_deployment_statefile), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - config.Statefile( - signed_uri="signed_uri_value", - ) - ) - await client.export_deployment_statefile(request=None) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = config.ExportDeploymentStatefileRequest() + response = client.get_terraform_version(request) - assert args[0] == request_msg + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_export_revision_statefile_empty_call_grpc_asyncio(): - client = ConfigAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", +def test_get_terraform_version_rest_unset_required_fields(): + transport = transports.ConfigRestTransport( + credentials=ga_credentials.AnonymousCredentials ) - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.export_revision_statefile), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - config.Statefile( - signed_uri="signed_uri_value", - ) - ) - await client.export_revision_statefile(request=None) + unset_fields = transport.get_terraform_version._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = config.ExportRevisionStatefileRequest() - assert args[0] == request_msg +def test_get_terraform_version_rest_flattened(): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = config.TerraformVersion() -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_import_statefile_empty_call_grpc_asyncio(): - client = ConfigAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/terraformVersions/sample3" + } - # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.import_statefile), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - config.Statefile( - signed_uri="signed_uri_value", - ) + # get truthy value for each flattened field + mock_args = dict( + name="name_value", ) - await client.import_statefile(request=None) + mock_args.update(sample_request) - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = config.ImportStatefileRequest() + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = config.TerraformVersion.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - assert args[0] == request_msg + client.get_terraform_version(**mock_args) + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/terraformVersions/*}" + % client.transport._host, + args[1], + ) -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_delete_statefile_empty_call_grpc_asyncio(): - client = ConfigAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", + +def test_get_terraform_version_rest_flattened_error(transport: str = "rest"): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.delete_statefile), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_statefile(request=None) + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_terraform_version( + config.GetTerraformVersionRequest(), + name="name_value", + ) - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = config.DeleteStatefileRequest() - assert args[0] == request_msg +def test_list_resource_changes_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_lock_deployment_empty_call_grpc_asyncio(): - client = ConfigAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) + # Ensure method has been cached + assert ( + client._transport.list_resource_changes + in client._transport._wrapped_methods + ) - # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.lock_deployment), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/spam") + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.list_resource_changes] = ( + mock_rpc ) - await client.lock_deployment(request=None) - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = config.LockDeploymentRequest() + request = {} + client.list_resource_changes(request) - assert args[0] == request_msg + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + client.list_resource_changes(request) -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_unlock_deployment_empty_call_grpc_asyncio(): - client = ConfigAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_list_resource_changes_rest_required_fields( + request_type=config.ListResourceChangesRequest, +): + transport_class = transports.ConfigRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) ) - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.unlock_deployment), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/spam") - ) - await client.unlock_deployment(request=None) + # verify fields with default values are dropped - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = config.UnlockDeploymentRequest() + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_resource_changes._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) - assert args[0] == request_msg + # verify required fields with default values are now present + jsonified_request["parent"] = "parent_value" -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_export_lock_info_empty_call_grpc_asyncio(): - client = ConfigAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_resource_changes._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "filter", + "order_by", + "page_size", + "page_token", + ) ) + jsonified_request.update(unset_fields) - # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.export_lock_info), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - config.LockInfo( - lock_id=725, - operation="operation_value", - info="info_value", - who="who_value", - version="version_value", - ) - ) - await client.export_lock_info(request=None) + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = config.ExportLockInfoRequest() + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) - assert args[0] == request_msg + # Designate an appropriate value for the returned response. + return_value = config.ListResourceChangesResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + response_value = Response() + response_value.status_code = 200 -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_create_preview_empty_call_grpc_asyncio(): - client = ConfigAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) + # Convert return value to protobuf type + return_value = config.ListResourceChangesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) - # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.create_preview), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/spam") - ) - await client.create_preview(request=None) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = config.CreatePreviewRequest() + response = client.list_resource_changes(request) - assert args[0] == request_msg + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_get_preview_empty_call_grpc_asyncio(): - client = ConfigAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", +def test_list_resource_changes_rest_unset_required_fields(): + transport = transports.ConfigRestTransport( + credentials=ga_credentials.AnonymousCredentials ) - # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.get_preview), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - config.Preview( - name="name_value", - state=config.Preview.State.CREATING, - deployment="deployment_value", - preview_mode=config.Preview.PreviewMode.DEFAULT, - service_account="service_account_value", - artifacts_gcs_bucket="artifacts_gcs_bucket_value", - worker_pool="worker_pool_value", - error_code=config.Preview.ErrorCode.CLOUD_BUILD_PERMISSION_DENIED, - build="build_value", - error_logs="error_logs_value", - logs="logs_value", - tf_version="tf_version_value", - tf_version_constraint="tf_version_constraint_value", + unset_fields = transport.list_resource_changes._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "filter", + "orderBy", + "pageSize", + "pageToken", ) ) - await client.get_preview(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = config.GetPreviewRequest() - - assert args[0] == request_msg + & set(("parent",)) + ) -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_list_previews_empty_call_grpc_asyncio(): - client = ConfigAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", +def test_list_resource_changes_rest_flattened(): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", ) - # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.list_previews), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - config.ListPreviewsResponse( - next_page_token="next_page_token_value", - unreachable=["unreachable_value"], - ) - ) - await client.list_previews(request=None) + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = config.ListResourceChangesResponse() - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = config.ListPreviewsRequest() + # get arguments that satisfy an http rule for this method + sample_request = { + "parent": "projects/sample1/locations/sample2/previews/sample3" + } - assert args[0] == request_msg + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = config.ListResourceChangesResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_delete_preview_empty_call_grpc_asyncio(): - client = ConfigAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) + client.list_resource_changes(**mock_args) - # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.delete_preview), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/spam") + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*/previews/*}/resourceChanges" + % client.transport._host, + args[1], ) - await client.delete_preview(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = config.DeletePreviewRequest() - - assert args[0] == request_msg -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_export_preview_result_empty_call_grpc_asyncio(): - client = ConfigAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", +def test_list_resource_changes_rest_flattened_error(transport: str = "rest"): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.export_preview_result), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - config.ExportPreviewResultResponse() + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_resource_changes( + config.ListResourceChangesRequest(), + parent="parent_value", ) - await client.export_preview_result(request=None) - - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = config.ExportPreviewResultRequest() - - assert args[0] == request_msg -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_list_terraform_versions_empty_call_grpc_asyncio(): - client = ConfigAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", +def test_list_resource_changes_rest_pager(transport: str = "rest"): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_terraform_versions), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - config.ListTerraformVersionsResponse( - next_page_token="next_page_token_value", - unreachable=["unreachable_value"], - ) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + config.ListResourceChangesResponse( + resource_changes=[ + config.ResourceChange(), + config.ResourceChange(), + config.ResourceChange(), + ], + next_page_token="abc", + ), + config.ListResourceChangesResponse( + resource_changes=[], + next_page_token="def", + ), + config.ListResourceChangesResponse( + resource_changes=[ + config.ResourceChange(), + ], + next_page_token="ghi", + ), + config.ListResourceChangesResponse( + resource_changes=[ + config.ResourceChange(), + config.ResourceChange(), + ], + ), ) - await client.list_terraform_versions(request=None) + # Two responses for two calls + response = response + response - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = config.ListTerraformVersionsRequest() + # Wrap the values into proper Response objs + response = tuple( + config.ListResourceChangesResponse.to_json(x) for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values - assert args[0] == request_msg + sample_request = { + "parent": "projects/sample1/locations/sample2/previews/sample3" + } + pager = client.list_resource_changes(request=sample_request) -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_get_terraform_version_empty_call_grpc_asyncio(): - client = ConfigAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, config.ResourceChange) for i in results) - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_terraform_version), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - config.TerraformVersion( - name="name_value", - state=config.TerraformVersion.State.ACTIVE, - ) - ) - await client.get_terraform_version(request=None) + pages = list(client.list_resource_changes(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = config.GetTerraformVersionRequest() - assert args[0] == request_msg +def test_get_resource_change_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_list_resource_changes_empty_call_grpc_asyncio(): - client = ConfigAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) + # Ensure method has been cached + assert ( + client._transport.get_resource_change in client._transport._wrapped_methods + ) - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_resource_changes), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - config.ListResourceChangesResponse( - next_page_token="next_page_token_value", - unreachable=["unreachable_value"], - ) + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get_resource_change] = ( + mock_rpc ) - await client.list_resource_changes(request=None) - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = config.ListResourceChangesRequest() + request = {} + client.get_resource_change(request) - assert args[0] == request_msg + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + client.get_resource_change(request) -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_get_resource_change_empty_call_grpc_asyncio(): - client = ConfigAsyncClient( + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_get_resource_change_rest_required_fields( + request_type=config.GetResourceChangeRequest, +): + transport_class = transports.ConfigRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_resource_change._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_resource_change._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = config.ResourceChange() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = config.ResourceChange.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.get_resource_change(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_resource_change_rest_unset_required_fields(): + transport = transports.ConfigRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_resource_change._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +def test_get_resource_change_rest_flattened(): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = config.ResourceChange() + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/previews/sample3/resourceChanges/sample4" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = config.ResourceChange.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.get_resource_change(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/previews/*/resourceChanges/*}" + % client.transport._host, + args[1], + ) + + +def test_get_resource_change_rest_flattened_error(transport: str = "rest"): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_resource_change( + config.GetResourceChangeRequest(), + name="name_value", + ) + + +def test_list_resource_drifts_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.list_resource_drifts in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.list_resource_drifts] = ( + mock_rpc + ) + + request = {} + client.list_resource_drifts(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_resource_drifts(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_list_resource_drifts_rest_required_fields( + request_type=config.ListResourceDriftsRequest, +): + transport_class = transports.ConfigRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_resource_drifts._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_resource_drifts._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "filter", + "order_by", + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = config.ListResourceDriftsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = config.ListResourceDriftsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.list_resource_drifts(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_resource_drifts_rest_unset_required_fields(): + transport = transports.ConfigRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_resource_drifts._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "filter", + "orderBy", + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) + + +def test_list_resource_drifts_rest_flattened(): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = config.ListResourceDriftsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = { + "parent": "projects/sample1/locations/sample2/previews/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = config.ListResourceDriftsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.list_resource_drifts(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*/previews/*}/resourceDrifts" + % client.transport._host, + args[1], + ) + + +def test_list_resource_drifts_rest_flattened_error(transport: str = "rest"): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_resource_drifts( + config.ListResourceDriftsRequest(), + parent="parent_value", + ) + + +def test_list_resource_drifts_rest_pager(transport: str = "rest"): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + config.ListResourceDriftsResponse( + resource_drifts=[ + config.ResourceDrift(), + config.ResourceDrift(), + config.ResourceDrift(), + ], + next_page_token="abc", + ), + config.ListResourceDriftsResponse( + resource_drifts=[], + next_page_token="def", + ), + config.ListResourceDriftsResponse( + resource_drifts=[ + config.ResourceDrift(), + ], + next_page_token="ghi", + ), + config.ListResourceDriftsResponse( + resource_drifts=[ + config.ResourceDrift(), + config.ResourceDrift(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple(config.ListResourceDriftsResponse.to_json(x) for x in response) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = { + "parent": "projects/sample1/locations/sample2/previews/sample3" + } + + pager = client.list_resource_drifts(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, config.ResourceDrift) for i in results) + + pages = list(client.list_resource_drifts(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +def test_get_resource_drift_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.get_resource_drift in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get_resource_drift] = ( + mock_rpc + ) + + request = {} + client.get_resource_drift(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_resource_drift(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_get_resource_drift_rest_required_fields( + request_type=config.GetResourceDriftRequest, +): + transport_class = transports.ConfigRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_resource_drift._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_resource_drift._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = config.ResourceDrift() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = config.ResourceDrift.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.get_resource_drift(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_resource_drift_rest_unset_required_fields(): + transport = transports.ConfigRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_resource_drift._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +def test_get_resource_drift_rest_flattened(): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = config.ResourceDrift() + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/previews/sample3/resourceDrifts/sample4" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = config.ResourceDrift.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.get_resource_drift(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/previews/*/resourceDrifts/*}" + % client.transport._host, + args[1], + ) + + +def test_get_resource_drift_rest_flattened_error(transport: str = "rest"): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_resource_drift( + config.GetResourceDriftRequest(), + name="name_value", + ) + + +def test_get_auto_migration_config_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.get_auto_migration_config + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.get_auto_migration_config + ] = mock_rpc + + request = {} + client.get_auto_migration_config(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_auto_migration_config(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_get_auto_migration_config_rest_required_fields( + request_type=config.GetAutoMigrationConfigRequest, +): + transport_class = transports.ConfigRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_auto_migration_config._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_auto_migration_config._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = config.AutoMigrationConfig() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = config.AutoMigrationConfig.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.get_auto_migration_config(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_auto_migration_config_rest_unset_required_fields(): + transport = transports.ConfigRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_auto_migration_config._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +def test_get_auto_migration_config_rest_flattened(): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = config.AutoMigrationConfig() + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/autoMigrationConfig" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = config.AutoMigrationConfig.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.get_auto_migration_config(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/autoMigrationConfig}" + % client.transport._host, + args[1], + ) + + +def test_get_auto_migration_config_rest_flattened_error(transport: str = "rest"): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_auto_migration_config( + config.GetAutoMigrationConfigRequest(), + name="name_value", + ) + + +def test_update_auto_migration_config_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.update_auto_migration_config + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.update_auto_migration_config + ] = mock_rpc + + request = {} + client.update_auto_migration_config(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.update_auto_migration_config(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_update_auto_migration_config_rest_required_fields( + request_type=config.UpdateAutoMigrationConfigRequest, +): + transport_class = transports.ConfigRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_auto_migration_config._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_auto_migration_config._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("update_mask",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "patch", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.update_auto_migration_config(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_update_auto_migration_config_rest_unset_required_fields(): + transport = transports.ConfigRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.update_auto_migration_config._get_unset_required_fields({}) + assert set(unset_fields) == (set(("updateMask",)) & set(("autoMigrationConfig",))) + + +def test_update_auto_migration_config_rest_flattened(): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "auto_migration_config": { + "name": "projects/sample1/locations/sample2/autoMigrationConfig" + } + } + + # get truthy value for each flattened field + mock_args = dict( + auto_migration_config=config.AutoMigrationConfig(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.update_auto_migration_config(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{auto_migration_config.name=projects/*/locations/*/autoMigrationConfig}" + % client.transport._host, + args[1], + ) + + +def test_update_auto_migration_config_rest_flattened_error(transport: str = "rest"): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_auto_migration_config( + config.UpdateAutoMigrationConfigRequest(), + auto_migration_config=config.AutoMigrationConfig(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +def test_get_deployment_group_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.get_deployment_group in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get_deployment_group] = ( + mock_rpc + ) + + request = {} + client.get_deployment_group(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_deployment_group(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_get_deployment_group_rest_required_fields( + request_type=config.GetDeploymentGroupRequest, +): + transport_class = transports.ConfigRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_deployment_group._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_deployment_group._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = config.DeploymentGroup() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = config.DeploymentGroup.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.get_deployment_group(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_deployment_group_rest_unset_required_fields(): + transport = transports.ConfigRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_deployment_group._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +def test_get_deployment_group_rest_flattened(): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = config.DeploymentGroup() + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/deploymentGroups/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = config.DeploymentGroup.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.get_deployment_group(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/deploymentGroups/*}" + % client.transport._host, + args[1], + ) + + +def test_get_deployment_group_rest_flattened_error(transport: str = "rest"): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_deployment_group( + config.GetDeploymentGroupRequest(), + name="name_value", + ) + + +def test_create_deployment_group_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.create_deployment_group + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.create_deployment_group + ] = mock_rpc + + request = {} + client.create_deployment_group(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_deployment_group(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_create_deployment_group_rest_required_fields( + request_type=config.CreateDeploymentGroupRequest, +): + transport_class = transports.ConfigRestTransport + + request_init = {} + request_init["parent"] = "" + request_init["deployment_group_id"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + assert "deploymentGroupId" not in jsonified_request + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_deployment_group._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + assert "deploymentGroupId" in jsonified_request + assert jsonified_request["deploymentGroupId"] == request_init["deployment_group_id"] + + jsonified_request["parent"] = "parent_value" + jsonified_request["deploymentGroupId"] = "deployment_group_id_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_deployment_group._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "deployment_group_id", + "request_id", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + assert "deploymentGroupId" in jsonified_request + assert jsonified_request["deploymentGroupId"] == "deployment_group_id_value" + + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.create_deployment_group(request) + + expected_params = [ + ( + "deploymentGroupId", + "", + ), + ("$alt", "json;enum-encoding=int"), + ] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_create_deployment_group_rest_unset_required_fields(): + transport = transports.ConfigRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.create_deployment_group._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "deploymentGroupId", + "requestId", + ) + ) + & set( + ( + "parent", + "deploymentGroupId", + "deploymentGroup", + ) + ) + ) + + +def test_create_deployment_group_rest_flattened(): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + deployment_group=config.DeploymentGroup(name="name_value"), + deployment_group_id="deployment_group_id_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.create_deployment_group(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*}/deploymentGroups" + % client.transport._host, + args[1], + ) + + +def test_create_deployment_group_rest_flattened_error(transport: str = "rest"): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_deployment_group( + config.CreateDeploymentGroupRequest(), + parent="parent_value", + deployment_group=config.DeploymentGroup(name="name_value"), + deployment_group_id="deployment_group_id_value", + ) + + +def test_update_deployment_group_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.update_deployment_group + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.update_deployment_group + ] = mock_rpc + + request = {} + client.update_deployment_group(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.update_deployment_group(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_update_deployment_group_rest_required_fields( + request_type=config.UpdateDeploymentGroupRequest, +): + transport_class = transports.ConfigRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_deployment_group._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_deployment_group._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "request_id", + "update_mask", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "patch", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.update_deployment_group(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_update_deployment_group_rest_unset_required_fields(): + transport = transports.ConfigRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.update_deployment_group._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "requestId", + "updateMask", + ) + ) + & set(("deploymentGroup",)) + ) + + +def test_update_deployment_group_rest_flattened(): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "deployment_group": { + "name": "projects/sample1/locations/sample2/deploymentGroups/sample3" + } + } + + # get truthy value for each flattened field + mock_args = dict( + deployment_group=config.DeploymentGroup(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.update_deployment_group(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{deployment_group.name=projects/*/locations/*/deploymentGroups/*}" + % client.transport._host, + args[1], + ) + + +def test_update_deployment_group_rest_flattened_error(transport: str = "rest"): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.update_deployment_group( + config.UpdateDeploymentGroupRequest(), + deployment_group=config.DeploymentGroup(name="name_value"), + update_mask=field_mask_pb2.FieldMask(paths=["paths_value"]), + ) + + +def test_delete_deployment_group_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.delete_deployment_group + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.delete_deployment_group + ] = mock_rpc + + request = {} + client.delete_deployment_group(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_deployment_group(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_delete_deployment_group_rest_required_fields( + request_type=config.DeleteDeploymentGroupRequest, +): + transport_class = transports.ConfigRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_deployment_group._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_deployment_group._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "deployment_reference_policy", + "force", + "request_id", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.delete_deployment_group(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_delete_deployment_group_rest_unset_required_fields(): + transport = transports.ConfigRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.delete_deployment_group._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "deploymentReferencePolicy", + "force", + "requestId", + ) + ) + & set(("name",)) + ) + + +def test_delete_deployment_group_rest_flattened(): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/deploymentGroups/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.delete_deployment_group(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/deploymentGroups/*}" + % client.transport._host, + args[1], + ) + + +def test_delete_deployment_group_rest_flattened_error(transport: str = "rest"): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_deployment_group( + config.DeleteDeploymentGroupRequest(), + name="name_value", + ) + + +def test_list_deployment_groups_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.list_deployment_groups + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.list_deployment_groups] = ( + mock_rpc + ) + + request = {} + client.list_deployment_groups(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_deployment_groups(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_list_deployment_groups_rest_required_fields( + request_type=config.ListDeploymentGroupsRequest, +): + transport_class = transports.ConfigRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_deployment_groups._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_deployment_groups._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "filter", + "order_by", + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = config.ListDeploymentGroupsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = config.ListDeploymentGroupsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.list_deployment_groups(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_deployment_groups_rest_unset_required_fields(): + transport = transports.ConfigRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_deployment_groups._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "filter", + "orderBy", + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) + + +def test_list_deployment_groups_rest_flattened(): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = config.ListDeploymentGroupsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = config.ListDeploymentGroupsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.list_deployment_groups(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*}/deploymentGroups" + % client.transport._host, + args[1], + ) + + +def test_list_deployment_groups_rest_flattened_error(transport: str = "rest"): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_deployment_groups( + config.ListDeploymentGroupsRequest(), + parent="parent_value", + ) + + +def test_list_deployment_groups_rest_pager(transport: str = "rest"): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + config.ListDeploymentGroupsResponse( + deployment_groups=[ + config.DeploymentGroup(), + config.DeploymentGroup(), + config.DeploymentGroup(), + ], + next_page_token="abc", + ), + config.ListDeploymentGroupsResponse( + deployment_groups=[], + next_page_token="def", + ), + config.ListDeploymentGroupsResponse( + deployment_groups=[ + config.DeploymentGroup(), + ], + next_page_token="ghi", + ), + config.ListDeploymentGroupsResponse( + deployment_groups=[ + config.DeploymentGroup(), + config.DeploymentGroup(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + config.ListDeploymentGroupsResponse.to_json(x) for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "projects/sample1/locations/sample2"} + + pager = client.list_deployment_groups(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, config.DeploymentGroup) for i in results) + + pages = list(client.list_deployment_groups(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +def test_provision_deployment_group_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.provision_deployment_group + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.provision_deployment_group + ] = mock_rpc + + request = {} + client.provision_deployment_group(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.provision_deployment_group(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_provision_deployment_group_rest_required_fields( + request_type=config.ProvisionDeploymentGroupRequest, +): + transport_class = transports.ConfigRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).provision_deployment_group._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).provision_deployment_group._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.provision_deployment_group(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_provision_deployment_group_rest_unset_required_fields(): + transport = transports.ConfigRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.provision_deployment_group._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +def test_provision_deployment_group_rest_flattened(): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/deploymentGroups/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.provision_deployment_group(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/deploymentGroups/*}:provision" + % client.transport._host, + args[1], + ) + + +def test_provision_deployment_group_rest_flattened_error(transport: str = "rest"): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.provision_deployment_group( + config.ProvisionDeploymentGroupRequest(), + name="name_value", + ) + + +def test_deprovision_deployment_group_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.deprovision_deployment_group + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.deprovision_deployment_group + ] = mock_rpc + + request = {} + client.deprovision_deployment_group(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.deprovision_deployment_group(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_deprovision_deployment_group_rest_required_fields( + request_type=config.DeprovisionDeploymentGroupRequest, +): + transport_class = transports.ConfigRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).deprovision_deployment_group._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).deprovision_deployment_group._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.deprovision_deployment_group(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_deprovision_deployment_group_rest_unset_required_fields(): + transport = transports.ConfigRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.deprovision_deployment_group._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +def test_deprovision_deployment_group_rest_flattened(): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/deploymentGroups/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.deprovision_deployment_group(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/deploymentGroups/*}:deprovision" + % client.transport._host, + args[1], + ) + + +def test_deprovision_deployment_group_rest_flattened_error(transport: str = "rest"): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.deprovision_deployment_group( + config.DeprovisionDeploymentGroupRequest(), + name="name_value", + ) + + +def test_get_deployment_group_revision_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.get_deployment_group_revision + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.get_deployment_group_revision + ] = mock_rpc + + request = {} + client.get_deployment_group_revision(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_deployment_group_revision(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_get_deployment_group_revision_rest_required_fields( + request_type=config.GetDeploymentGroupRevisionRequest, +): + transport_class = transports.ConfigRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_deployment_group_revision._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_deployment_group_revision._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = config.DeploymentGroupRevision() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = config.DeploymentGroupRevision.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.get_deployment_group_revision(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_deployment_group_revision_rest_unset_required_fields(): + transport = transports.ConfigRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_deployment_group_revision._get_unset_required_fields( + {} + ) + assert set(unset_fields) == (set(()) & set(("name",))) + + +def test_get_deployment_group_revision_rest_flattened(): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = config.DeploymentGroupRevision() + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/deploymentGroups/sample3/revisions/sample4" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = config.DeploymentGroupRevision.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.get_deployment_group_revision(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{name=projects/*/locations/*/deploymentGroups/*/revisions/*}" + % client.transport._host, + args[1], + ) + + +def test_get_deployment_group_revision_rest_flattened_error(transport: str = "rest"): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_deployment_group_revision( + config.GetDeploymentGroupRevisionRequest(), + name="name_value", + ) + + +def test_list_deployment_group_revisions_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.list_deployment_group_revisions + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.list_deployment_group_revisions + ] = mock_rpc + + request = {} + client.list_deployment_group_revisions(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_deployment_group_revisions(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_list_deployment_group_revisions_rest_required_fields( + request_type=config.ListDeploymentGroupRevisionsRequest, +): + transport_class = transports.ConfigRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_deployment_group_revisions._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_deployment_group_revisions._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = config.ListDeploymentGroupRevisionsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = config.ListDeploymentGroupRevisionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.list_deployment_group_revisions(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_deployment_group_revisions_rest_unset_required_fields(): + transport = transports.ConfigRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_deployment_group_revisions._get_unset_required_fields( + {} + ) + assert set(unset_fields) == ( + set( + ( + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) + + +def test_list_deployment_group_revisions_rest_flattened(): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = config.ListDeploymentGroupRevisionsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = { + "parent": "projects/sample1/locations/sample2/deploymentGroups/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = config.ListDeploymentGroupRevisionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.list_deployment_group_revisions(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v1/{parent=projects/*/locations/*/deploymentGroups/*}/revisions" + % client.transport._host, + args[1], + ) + + +def test_list_deployment_group_revisions_rest_flattened_error(transport: str = "rest"): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_deployment_group_revisions( + config.ListDeploymentGroupRevisionsRequest(), + parent="parent_value", + ) + + +def test_list_deployment_group_revisions_rest_pager(transport: str = "rest"): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + config.ListDeploymentGroupRevisionsResponse( + deployment_group_revisions=[ + config.DeploymentGroupRevision(), + config.DeploymentGroupRevision(), + config.DeploymentGroupRevision(), + ], + next_page_token="abc", + ), + config.ListDeploymentGroupRevisionsResponse( + deployment_group_revisions=[], + next_page_token="def", + ), + config.ListDeploymentGroupRevisionsResponse( + deployment_group_revisions=[ + config.DeploymentGroupRevision(), + ], + next_page_token="ghi", + ), + config.ListDeploymentGroupRevisionsResponse( + deployment_group_revisions=[ + config.DeploymentGroupRevision(), + config.DeploymentGroupRevision(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + config.ListDeploymentGroupRevisionsResponse.to_json(x) for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = { + "parent": "projects/sample1/locations/sample2/deploymentGroups/sample3" + } + + pager = client.list_deployment_group_revisions(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, config.DeploymentGroupRevision) for i in results) + + pages = list( + client.list_deployment_group_revisions(request=sample_request).pages + ) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.ConfigGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.ConfigGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = ConfigClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.ConfigGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = ConfigClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = ConfigClient( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.ConfigGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = ConfigClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.ConfigGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = ConfigClient(transport=transport) + assert client.transport is transport + + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.ConfigGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.ConfigGrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.ConfigGrpcTransport, + transports.ConfigGrpcAsyncIOTransport, + transports.ConfigRestTransport, + ], +) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + + +def test_transport_kind_grpc(): + transport = ConfigClient.get_transport_class("grpc")( + credentials=ga_credentials.AnonymousCredentials() + ) + assert transport.kind == "grpc" + + +def test_initialize_client_w_grpc(): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), transport="grpc" + ) + assert client is not None + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_deployments_empty_call_grpc(): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.list_deployments), "__call__") as call: + call.return_value = config.ListDeploymentsResponse() + client.list_deployments(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = config.ListDeploymentsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_deployment_empty_call_grpc(): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.get_deployment), "__call__") as call: + call.return_value = config.Deployment() + client.get_deployment(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = config.GetDeploymentRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_deployment_empty_call_grpc(): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_deployment), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.create_deployment(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = config.CreateDeploymentRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_deployment_empty_call_grpc(): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_deployment), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.update_deployment(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = config.UpdateDeploymentRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_deployment_empty_call_grpc(): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_deployment), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.delete_deployment(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = config.DeleteDeploymentRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_revisions_empty_call_grpc(): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.list_revisions), "__call__") as call: + call.return_value = config.ListRevisionsResponse() + client.list_revisions(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = config.ListRevisionsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_revision_empty_call_grpc(): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.get_revision), "__call__") as call: + call.return_value = config.Revision() + client.get_revision(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = config.GetRevisionRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_resource_empty_call_grpc(): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.get_resource), "__call__") as call: + call.return_value = config.Resource() + client.get_resource(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = config.GetResourceRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_resources_empty_call_grpc(): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.list_resources), "__call__") as call: + call.return_value = config.ListResourcesResponse() + client.list_resources(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = config.ListResourcesRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_export_deployment_statefile_empty_call_grpc(): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.export_deployment_statefile), "__call__" + ) as call: + call.return_value = config.Statefile() + client.export_deployment_statefile(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = config.ExportDeploymentStatefileRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_export_revision_statefile_empty_call_grpc(): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.export_revision_statefile), "__call__" + ) as call: + call.return_value = config.Statefile() + client.export_revision_statefile(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = config.ExportRevisionStatefileRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_import_statefile_empty_call_grpc(): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.import_statefile), "__call__") as call: + call.return_value = config.Statefile() + client.import_statefile(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = config.ImportStatefileRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_statefile_empty_call_grpc(): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.delete_statefile), "__call__") as call: + call.return_value = None + client.delete_statefile(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = config.DeleteStatefileRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_lock_deployment_empty_call_grpc(): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.lock_deployment), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.lock_deployment(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = config.LockDeploymentRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_unlock_deployment_empty_call_grpc(): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.unlock_deployment), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.unlock_deployment(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = config.UnlockDeploymentRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_export_lock_info_empty_call_grpc(): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.export_lock_info), "__call__") as call: + call.return_value = config.LockInfo() + client.export_lock_info(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = config.ExportLockInfoRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_preview_empty_call_grpc(): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.create_preview), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.create_preview(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = config.CreatePreviewRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_preview_empty_call_grpc(): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.get_preview), "__call__") as call: + call.return_value = config.Preview() + client.get_preview(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = config.GetPreviewRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_previews_empty_call_grpc(): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.list_previews), "__call__") as call: + call.return_value = config.ListPreviewsResponse() + client.list_previews(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = config.ListPreviewsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_preview_empty_call_grpc(): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.delete_preview), "__call__") as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.delete_preview(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = config.DeletePreviewRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_export_preview_result_empty_call_grpc(): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.export_preview_result), "__call__" + ) as call: + call.return_value = config.ExportPreviewResultResponse() + client.export_preview_result(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = config.ExportPreviewResultRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_terraform_versions_empty_call_grpc(): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_terraform_versions), "__call__" + ) as call: + call.return_value = config.ListTerraformVersionsResponse() + client.list_terraform_versions(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = config.ListTerraformVersionsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_terraform_version_empty_call_grpc(): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_terraform_version), "__call__" + ) as call: + call.return_value = config.TerraformVersion() + client.get_terraform_version(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = config.GetTerraformVersionRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_resource_changes_empty_call_grpc(): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_resource_changes), "__call__" + ) as call: + call.return_value = config.ListResourceChangesResponse() + client.list_resource_changes(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = config.ListResourceChangesRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_resource_change_empty_call_grpc(): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_resource_change), "__call__" + ) as call: + call.return_value = config.ResourceChange() + client.get_resource_change(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = config.GetResourceChangeRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_resource_drifts_empty_call_grpc(): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_resource_drifts), "__call__" + ) as call: + call.return_value = config.ListResourceDriftsResponse() + client.list_resource_drifts(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = config.ListResourceDriftsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_resource_drift_empty_call_grpc(): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_resource_drift), "__call__" + ) as call: + call.return_value = config.ResourceDrift() + client.get_resource_drift(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = config.GetResourceDriftRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_auto_migration_config_empty_call_grpc(): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_auto_migration_config), "__call__" + ) as call: + call.return_value = config.AutoMigrationConfig() + client.get_auto_migration_config(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = config.GetAutoMigrationConfigRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_auto_migration_config_empty_call_grpc(): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_auto_migration_config), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.update_auto_migration_config(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = config.UpdateAutoMigrationConfigRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_deployment_group_empty_call_grpc(): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_deployment_group), "__call__" + ) as call: + call.return_value = config.DeploymentGroup() + client.get_deployment_group(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = config.GetDeploymentGroupRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_deployment_group_empty_call_grpc(): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_deployment_group), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.create_deployment_group(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = config.CreateDeploymentGroupRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_deployment_group_empty_call_grpc(): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_deployment_group), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.update_deployment_group(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = config.UpdateDeploymentGroupRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_deployment_group_empty_call_grpc(): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_deployment_group), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.delete_deployment_group(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = config.DeleteDeploymentGroupRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_deployment_groups_empty_call_grpc(): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_deployment_groups), "__call__" + ) as call: + call.return_value = config.ListDeploymentGroupsResponse() + client.list_deployment_groups(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = config.ListDeploymentGroupsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_provision_deployment_group_empty_call_grpc(): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.provision_deployment_group), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.provision_deployment_group(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = config.ProvisionDeploymentGroupRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_deprovision_deployment_group_empty_call_grpc(): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.deprovision_deployment_group), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.deprovision_deployment_group(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = config.DeprovisionDeploymentGroupRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_deployment_group_revision_empty_call_grpc(): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_deployment_group_revision), "__call__" + ) as call: + call.return_value = config.DeploymentGroupRevision() + client.get_deployment_group_revision(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = config.GetDeploymentGroupRevisionRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_deployment_group_revisions_empty_call_grpc(): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_deployment_group_revisions), "__call__" + ) as call: + call.return_value = config.ListDeploymentGroupRevisionsResponse() + client.list_deployment_group_revisions(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = config.ListDeploymentGroupRevisionsRequest() + + assert args[0] == request_msg + + +def test_transport_kind_grpc_asyncio(): + transport = ConfigAsyncClient.get_transport_class("grpc_asyncio")( + credentials=async_anonymous_credentials() + ) + assert transport.kind == "grpc_asyncio" + + +def test_initialize_client_w_grpc_asyncio(): + client = ConfigAsyncClient( + credentials=async_anonymous_credentials(), transport="grpc_asyncio" + ) + assert client is not None + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_list_deployments_empty_call_grpc_asyncio(): + client = ConfigAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.list_deployments), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + config.ListDeploymentsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + ) + await client.list_deployments(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = config.ListDeploymentsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_get_deployment_empty_call_grpc_asyncio(): + client = ConfigAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.get_deployment), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + config.Deployment( + name="name_value", + state=config.Deployment.State.CREATING, + latest_revision="latest_revision_value", + state_detail="state_detail_value", + error_code=config.Deployment.ErrorCode.REVISION_FAILED, + delete_build="delete_build_value", + delete_logs="delete_logs_value", + error_logs="error_logs_value", + artifacts_gcs_bucket="artifacts_gcs_bucket_value", + service_account="service_account_value", + import_existing_resources=True, + worker_pool="worker_pool_value", + lock_state=config.Deployment.LockState.LOCKED, + tf_version_constraint="tf_version_constraint_value", + tf_version="tf_version_value", + quota_validation=config.QuotaValidation.ENABLED, + ) + ) + await client.get_deployment(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = config.GetDeploymentRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_create_deployment_empty_call_grpc_asyncio(): + client = ConfigAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_deployment), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + await client.create_deployment(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = config.CreateDeploymentRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_update_deployment_empty_call_grpc_asyncio(): + client = ConfigAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_deployment), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + await client.update_deployment(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = config.UpdateDeploymentRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_delete_deployment_empty_call_grpc_asyncio(): + client = ConfigAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_deployment), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + await client.delete_deployment(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = config.DeleteDeploymentRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_list_revisions_empty_call_grpc_asyncio(): + client = ConfigAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.list_revisions), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + config.ListRevisionsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + ) + await client.list_revisions(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = config.ListRevisionsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_get_revision_empty_call_grpc_asyncio(): + client = ConfigAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.get_revision), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + config.Revision( + name="name_value", + action=config.Revision.Action.CREATE, + state=config.Revision.State.APPLYING, + state_detail="state_detail_value", + error_code=config.Revision.ErrorCode.CLOUD_BUILD_PERMISSION_DENIED, + build="build_value", + logs="logs_value", + error_logs="error_logs_value", + service_account="service_account_value", + import_existing_resources=True, + worker_pool="worker_pool_value", + tf_version_constraint="tf_version_constraint_value", + tf_version="tf_version_value", + quota_validation_results="quota_validation_results_value", + quota_validation=config.QuotaValidation.ENABLED, + ) + ) + await client.get_revision(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = config.GetRevisionRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_get_resource_empty_call_grpc_asyncio(): + client = ConfigAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.get_resource), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + config.Resource( + name="name_value", + intent=config.Resource.Intent.CREATE, + state=config.Resource.State.PLANNED, + ) + ) + await client.get_resource(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = config.GetResourceRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_list_resources_empty_call_grpc_asyncio(): + client = ConfigAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.list_resources), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + config.ListResourcesResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + ) + await client.list_resources(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = config.ListResourcesRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_export_deployment_statefile_empty_call_grpc_asyncio(): + client = ConfigAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.export_deployment_statefile), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + config.Statefile( + signed_uri="signed_uri_value", + ) + ) + await client.export_deployment_statefile(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = config.ExportDeploymentStatefileRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_export_revision_statefile_empty_call_grpc_asyncio(): + client = ConfigAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.export_revision_statefile), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + config.Statefile( + signed_uri="signed_uri_value", + ) + ) + await client.export_revision_statefile(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = config.ExportRevisionStatefileRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_import_statefile_empty_call_grpc_asyncio(): + client = ConfigAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.import_statefile), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + config.Statefile( + signed_uri="signed_uri_value", + ) + ) + await client.import_statefile(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = config.ImportStatefileRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_delete_statefile_empty_call_grpc_asyncio(): + client = ConfigAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.delete_statefile), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_statefile(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = config.DeleteStatefileRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_lock_deployment_empty_call_grpc_asyncio(): + client = ConfigAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.lock_deployment), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + await client.lock_deployment(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = config.LockDeploymentRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_unlock_deployment_empty_call_grpc_asyncio(): + client = ConfigAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.unlock_deployment), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + await client.unlock_deployment(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = config.UnlockDeploymentRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_export_lock_info_empty_call_grpc_asyncio(): + client = ConfigAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.export_lock_info), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + config.LockInfo( + lock_id=725, + operation="operation_value", + info="info_value", + who="who_value", + version="version_value", + ) + ) + await client.export_lock_info(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = config.ExportLockInfoRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_create_preview_empty_call_grpc_asyncio(): + client = ConfigAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.create_preview), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + await client.create_preview(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = config.CreatePreviewRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_get_preview_empty_call_grpc_asyncio(): + client = ConfigAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.get_preview), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + config.Preview( + name="name_value", + state=config.Preview.State.CREATING, + deployment="deployment_value", + preview_mode=config.Preview.PreviewMode.DEFAULT, + service_account="service_account_value", + artifacts_gcs_bucket="artifacts_gcs_bucket_value", + worker_pool="worker_pool_value", + error_code=config.Preview.ErrorCode.CLOUD_BUILD_PERMISSION_DENIED, + build="build_value", + error_logs="error_logs_value", + logs="logs_value", + tf_version="tf_version_value", + tf_version_constraint="tf_version_constraint_value", + ) + ) + await client.get_preview(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = config.GetPreviewRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_list_previews_empty_call_grpc_asyncio(): + client = ConfigAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.list_previews), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + config.ListPreviewsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + ) + await client.list_previews(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = config.ListPreviewsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_delete_preview_empty_call_grpc_asyncio(): + client = ConfigAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.delete_preview), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + await client.delete_preview(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = config.DeletePreviewRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_export_preview_result_empty_call_grpc_asyncio(): + client = ConfigAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.export_preview_result), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + config.ExportPreviewResultResponse() + ) + await client.export_preview_result(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = config.ExportPreviewResultRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_list_terraform_versions_empty_call_grpc_asyncio(): + client = ConfigAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_terraform_versions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + config.ListTerraformVersionsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + ) + await client.list_terraform_versions(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = config.ListTerraformVersionsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_get_terraform_version_empty_call_grpc_asyncio(): + client = ConfigAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_terraform_version), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + config.TerraformVersion( + name="name_value", + state=config.TerraformVersion.State.ACTIVE, + ) + ) + await client.get_terraform_version(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = config.GetTerraformVersionRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_list_resource_changes_empty_call_grpc_asyncio(): + client = ConfigAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_resource_changes), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + config.ListResourceChangesResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + ) + await client.list_resource_changes(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = config.ListResourceChangesRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_get_resource_change_empty_call_grpc_asyncio(): + client = ConfigAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_resource_change), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + config.ResourceChange( + name="name_value", + intent=config.ResourceChange.Intent.CREATE, + ) + ) + await client.get_resource_change(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = config.GetResourceChangeRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_list_resource_drifts_empty_call_grpc_asyncio(): + client = ConfigAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_resource_drifts), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + config.ListResourceDriftsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + ) + await client.list_resource_drifts(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = config.ListResourceDriftsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_get_resource_drift_empty_call_grpc_asyncio(): + client = ConfigAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_resource_drift), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + config.ResourceDrift( + name="name_value", + ) + ) + await client.get_resource_drift(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = config.GetResourceDriftRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_get_auto_migration_config_empty_call_grpc_asyncio(): + client = ConfigAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_auto_migration_config), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + config.AutoMigrationConfig( + name="name_value", + auto_migration_enabled=True, + ) + ) + await client.get_auto_migration_config(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = config.GetAutoMigrationConfigRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_update_auto_migration_config_empty_call_grpc_asyncio(): + client = ConfigAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_auto_migration_config), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + await client.update_auto_migration_config(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = config.UpdateAutoMigrationConfigRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_get_deployment_group_empty_call_grpc_asyncio(): + client = ConfigAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_deployment_group), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + config.DeploymentGroup( + name="name_value", + state=config.DeploymentGroup.State.CREATING, + state_description="state_description_value", + provisioning_state=config.DeploymentGroup.ProvisioningState.PROVISIONING, + provisioning_state_description="provisioning_state_description_value", + ) + ) + await client.get_deployment_group(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = config.GetDeploymentGroupRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_create_deployment_group_empty_call_grpc_asyncio(): + client = ConfigAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_deployment_group), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + await client.create_deployment_group(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = config.CreateDeploymentGroupRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_update_deployment_group_empty_call_grpc_asyncio(): + client = ConfigAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_deployment_group), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + await client.update_deployment_group(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = config.UpdateDeploymentGroupRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_delete_deployment_group_empty_call_grpc_asyncio(): + client = ConfigAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_deployment_group), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + await client.delete_deployment_group(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = config.DeleteDeploymentGroupRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_list_deployment_groups_empty_call_grpc_asyncio(): + client = ConfigAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_deployment_groups), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + config.ListDeploymentGroupsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + ) + await client.list_deployment_groups(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = config.ListDeploymentGroupsRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_provision_deployment_group_empty_call_grpc_asyncio(): + client = ConfigAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.provision_deployment_group), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + await client.provision_deployment_group(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = config.ProvisionDeploymentGroupRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_deprovision_deployment_group_empty_call_grpc_asyncio(): + client = ConfigAsyncClient( credentials=async_anonymous_credentials(), transport="grpc_asyncio", ) - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_resource_change), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - config.ResourceChange( - name="name_value", - intent=config.ResourceChange.Intent.CREATE, - ) + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.deprovision_deployment_group), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + await client.deprovision_deployment_group(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = config.DeprovisionDeploymentGroupRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_get_deployment_group_revision_empty_call_grpc_asyncio(): + client = ConfigAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_deployment_group_revision), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + config.DeploymentGroupRevision( + name="name_value", + alternative_ids=["alternative_ids_value"], + ) + ) + await client.get_deployment_group_revision(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = config.GetDeploymentGroupRevisionRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_list_deployment_group_revisions_empty_call_grpc_asyncio(): + client = ConfigAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_deployment_group_revisions), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + config.ListDeploymentGroupRevisionsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + ) + await client.list_deployment_group_revisions(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = config.ListDeploymentGroupRevisionsRequest() + + assert args[0] == request_msg + + +def test_transport_kind_rest(): + transport = ConfigClient.get_transport_class("rest")( + credentials=ga_credentials.AnonymousCredentials() + ) + assert transport.kind == "rest" + + +def test_list_deployments_rest_bad_request(request_type=config.ListDeploymentsRequest): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with ( + mock.patch.object(Session, "request") as req, + pytest.raises(core_exceptions.BadRequest), + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.list_deployments(request) + + +@pytest.mark.parametrize( + "request_type", + [ + config.ListDeploymentsRequest, + dict, + ], +) +def test_list_deployments_rest_call_success(request_type): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = config.ListDeploymentsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = config.ListDeploymentsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.list_deployments(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListDeploymentsPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_deployments_rest_interceptors(null_interceptor): + transport = transports.ConfigRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ConfigRestInterceptor(), + ) + client = ConfigClient(transport=transport) + + with ( + mock.patch.object(type(client.transport._session), "request") as req, + mock.patch.object(path_template, "transcode") as transcode, + mock.patch.object( + transports.ConfigRestInterceptor, "post_list_deployments" + ) as post, + mock.patch.object( + transports.ConfigRestInterceptor, "post_list_deployments_with_metadata" + ) as post_with_metadata, + mock.patch.object( + transports.ConfigRestInterceptor, "pre_list_deployments" + ) as pre, + ): + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = config.ListDeploymentsRequest.pb(config.ListDeploymentsRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = config.ListDeploymentsResponse.to_json( + config.ListDeploymentsResponse() + ) + req.return_value.content = return_value + + request = config.ListDeploymentsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = config.ListDeploymentsResponse() + post_with_metadata.return_value = config.ListDeploymentsResponse(), metadata + + client.list_deployments( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_get_deployment_rest_bad_request(request_type=config.GetDeploymentRequest): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/deployments/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with ( + mock.patch.object(Session, "request") as req, + pytest.raises(core_exceptions.BadRequest), + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.get_deployment(request) + + +@pytest.mark.parametrize( + "request_type", + [ + config.GetDeploymentRequest, + dict, + ], +) +def test_get_deployment_rest_call_success(request_type): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/deployments/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = config.Deployment( + name="name_value", + state=config.Deployment.State.CREATING, + latest_revision="latest_revision_value", + state_detail="state_detail_value", + error_code=config.Deployment.ErrorCode.REVISION_FAILED, + delete_build="delete_build_value", + delete_logs="delete_logs_value", + error_logs="error_logs_value", + artifacts_gcs_bucket="artifacts_gcs_bucket_value", + service_account="service_account_value", + import_existing_resources=True, + worker_pool="worker_pool_value", + lock_state=config.Deployment.LockState.LOCKED, + tf_version_constraint="tf_version_constraint_value", + tf_version="tf_version_value", + quota_validation=config.QuotaValidation.ENABLED, + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = config.Deployment.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.get_deployment(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, config.Deployment) + assert response.name == "name_value" + assert response.state == config.Deployment.State.CREATING + assert response.latest_revision == "latest_revision_value" + assert response.state_detail == "state_detail_value" + assert response.error_code == config.Deployment.ErrorCode.REVISION_FAILED + assert response.delete_build == "delete_build_value" + assert response.delete_logs == "delete_logs_value" + assert response.error_logs == "error_logs_value" + assert response.artifacts_gcs_bucket == "artifacts_gcs_bucket_value" + assert response.service_account == "service_account_value" + assert response.import_existing_resources is True + assert response.worker_pool == "worker_pool_value" + assert response.lock_state == config.Deployment.LockState.LOCKED + assert response.tf_version_constraint == "tf_version_constraint_value" + assert response.tf_version == "tf_version_value" + assert response.quota_validation == config.QuotaValidation.ENABLED + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_deployment_rest_interceptors(null_interceptor): + transport = transports.ConfigRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ConfigRestInterceptor(), + ) + client = ConfigClient(transport=transport) + + with ( + mock.patch.object(type(client.transport._session), "request") as req, + mock.patch.object(path_template, "transcode") as transcode, + mock.patch.object( + transports.ConfigRestInterceptor, "post_get_deployment" + ) as post, + mock.patch.object( + transports.ConfigRestInterceptor, "post_get_deployment_with_metadata" + ) as post_with_metadata, + mock.patch.object( + transports.ConfigRestInterceptor, "pre_get_deployment" + ) as pre, + ): + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = config.GetDeploymentRequest.pb(config.GetDeploymentRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = config.Deployment.to_json(config.Deployment()) + req.return_value.content = return_value + + request = config.GetDeploymentRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = config.Deployment() + post_with_metadata.return_value = config.Deployment(), metadata + + client.get_deployment( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_create_deployment_rest_bad_request( + request_type=config.CreateDeploymentRequest, +): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with ( + mock.patch.object(Session, "request") as req, + pytest.raises(core_exceptions.BadRequest), + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.create_deployment(request) + + +@pytest.mark.parametrize( + "request_type", + [ + config.CreateDeploymentRequest, + dict, + ], +) +def test_create_deployment_rest_call_success(request_type): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request_init["deployment"] = { + "terraform_blueprint": { + "gcs_source": "gcs_source_value", + "git_source": { + "repo": "repo_value", + "directory": "directory_value", + "ref": "ref_value", + }, + "input_values": {}, + "external_values": {}, + }, + "name": "name_value", + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "labels": {}, + "state": 1, + "latest_revision": "latest_revision_value", + "state_detail": "state_detail_value", + "error_code": 1, + "delete_results": { + "content": "content_value", + "artifacts": "artifacts_value", + "outputs": {}, + }, + "delete_build": "delete_build_value", + "delete_logs": "delete_logs_value", + "tf_errors": [ + { + "resource_address": "resource_address_value", + "http_response_code": 1928, + "error_description": "error_description_value", + "error": { + "code": 411, + "message": "message_value", + "details": [ + { + "type_url": "type.googleapis.com/google.protobuf.Duration", + "value": b"\x08\x0c\x10\xdb\x07", + } + ], + }, + } + ], + "error_logs": "error_logs_value", + "artifacts_gcs_bucket": "artifacts_gcs_bucket_value", + "service_account": "service_account_value", + "import_existing_resources": True, + "worker_pool": "worker_pool_value", + "lock_state": 1, + "tf_version_constraint": "tf_version_constraint_value", + "tf_version": "tf_version_value", + "quota_validation": 1, + "annotations": {}, + "provider_config": {"source_type": 1}, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = config.CreateDeploymentRequest.meta.fields["deployment"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["deployment"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["deployment"][field])): + del request_init["deployment"][field][i][subfield] + else: + del request_init["deployment"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.create_deployment(request) + + # Establish that the response is the type that we expect. + json_return_value = json_format.MessageToJson(return_value) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_deployment_rest_interceptors(null_interceptor): + transport = transports.ConfigRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ConfigRestInterceptor(), + ) + client = ConfigClient(transport=transport) + + with ( + mock.patch.object(type(client.transport._session), "request") as req, + mock.patch.object(path_template, "transcode") as transcode, + mock.patch.object(operation.Operation, "_set_result_from_operation"), + mock.patch.object( + transports.ConfigRestInterceptor, "post_create_deployment" + ) as post, + mock.patch.object( + transports.ConfigRestInterceptor, "post_create_deployment_with_metadata" + ) as post_with_metadata, + mock.patch.object( + transports.ConfigRestInterceptor, "pre_create_deployment" + ) as pre, + ): + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = config.CreateDeploymentRequest.pb(config.CreateDeploymentRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = json_format.MessageToJson(operations_pb2.Operation()) + req.return_value.content = return_value + + request = config.CreateDeploymentRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata + + client.create_deployment( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_update_deployment_rest_bad_request( + request_type=config.UpdateDeploymentRequest, +): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = { + "deployment": {"name": "projects/sample1/locations/sample2/deployments/sample3"} + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with ( + mock.patch.object(Session, "request") as req, + pytest.raises(core_exceptions.BadRequest), + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.update_deployment(request) + + +@pytest.mark.parametrize( + "request_type", + [ + config.UpdateDeploymentRequest, + dict, + ], +) +def test_update_deployment_rest_call_success(request_type): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = { + "deployment": {"name": "projects/sample1/locations/sample2/deployments/sample3"} + } + request_init["deployment"] = { + "terraform_blueprint": { + "gcs_source": "gcs_source_value", + "git_source": { + "repo": "repo_value", + "directory": "directory_value", + "ref": "ref_value", + }, + "input_values": {}, + "external_values": {}, + }, + "name": "projects/sample1/locations/sample2/deployments/sample3", + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "labels": {}, + "state": 1, + "latest_revision": "latest_revision_value", + "state_detail": "state_detail_value", + "error_code": 1, + "delete_results": { + "content": "content_value", + "artifacts": "artifacts_value", + "outputs": {}, + }, + "delete_build": "delete_build_value", + "delete_logs": "delete_logs_value", + "tf_errors": [ + { + "resource_address": "resource_address_value", + "http_response_code": 1928, + "error_description": "error_description_value", + "error": { + "code": 411, + "message": "message_value", + "details": [ + { + "type_url": "type.googleapis.com/google.protobuf.Duration", + "value": b"\x08\x0c\x10\xdb\x07", + } + ], + }, + } + ], + "error_logs": "error_logs_value", + "artifacts_gcs_bucket": "artifacts_gcs_bucket_value", + "service_account": "service_account_value", + "import_existing_resources": True, + "worker_pool": "worker_pool_value", + "lock_state": 1, + "tf_version_constraint": "tf_version_constraint_value", + "tf_version": "tf_version_value", + "quota_validation": 1, + "annotations": {}, + "provider_config": {"source_type": 1}, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = config.UpdateDeploymentRequest.meta.fields["deployment"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["deployment"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["deployment"][field])): + del request_init["deployment"][field][i][subfield] + else: + del request_init["deployment"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.update_deployment(request) + + # Establish that the response is the type that we expect. + json_return_value = json_format.MessageToJson(return_value) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_deployment_rest_interceptors(null_interceptor): + transport = transports.ConfigRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ConfigRestInterceptor(), + ) + client = ConfigClient(transport=transport) + + with ( + mock.patch.object(type(client.transport._session), "request") as req, + mock.patch.object(path_template, "transcode") as transcode, + mock.patch.object(operation.Operation, "_set_result_from_operation"), + mock.patch.object( + transports.ConfigRestInterceptor, "post_update_deployment" + ) as post, + mock.patch.object( + transports.ConfigRestInterceptor, "post_update_deployment_with_metadata" + ) as post_with_metadata, + mock.patch.object( + transports.ConfigRestInterceptor, "pre_update_deployment" + ) as pre, + ): + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = config.UpdateDeploymentRequest.pb(config.UpdateDeploymentRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = json_format.MessageToJson(operations_pb2.Operation()) + req.return_value.content = return_value + + request = config.UpdateDeploymentRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata + + client.update_deployment( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_delete_deployment_rest_bad_request( + request_type=config.DeleteDeploymentRequest, +): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/deployments/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with ( + mock.patch.object(Session, "request") as req, + pytest.raises(core_exceptions.BadRequest), + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.delete_deployment(request) + + +@pytest.mark.parametrize( + "request_type", + [ + config.DeleteDeploymentRequest, + dict, + ], +) +def test_delete_deployment_rest_call_success(request_type): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/deployments/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.delete_deployment(request) + + # Establish that the response is the type that we expect. + json_return_value = json_format.MessageToJson(return_value) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_deployment_rest_interceptors(null_interceptor): + transport = transports.ConfigRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ConfigRestInterceptor(), + ) + client = ConfigClient(transport=transport) + + with ( + mock.patch.object(type(client.transport._session), "request") as req, + mock.patch.object(path_template, "transcode") as transcode, + mock.patch.object(operation.Operation, "_set_result_from_operation"), + mock.patch.object( + transports.ConfigRestInterceptor, "post_delete_deployment" + ) as post, + mock.patch.object( + transports.ConfigRestInterceptor, "post_delete_deployment_with_metadata" + ) as post_with_metadata, + mock.patch.object( + transports.ConfigRestInterceptor, "pre_delete_deployment" + ) as pre, + ): + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = config.DeleteDeploymentRequest.pb(config.DeleteDeploymentRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = json_format.MessageToJson(operations_pb2.Operation()) + req.return_value.content = return_value + + request = config.DeleteDeploymentRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata + + client.delete_deployment( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], ) - await client.get_resource_change(request=None) - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = config.GetResourceChangeRequest() + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() - assert args[0] == request_msg +def test_list_revisions_rest_bad_request(request_type=config.ListRevisionsRequest): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2/deployments/sample3"} + request = request_type(**request_init) -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_list_resource_drifts_empty_call_grpc_asyncio(): - client = ConfigAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", + # Mock the http request call within the method and fake a BadRequest error. + with ( + mock.patch.object(Session, "request") as req, + pytest.raises(core_exceptions.BadRequest), + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.list_revisions(request) + + +@pytest.mark.parametrize( + "request_type", + [ + config.ListRevisionsRequest, + dict, + ], +) +def test_list_revisions_rest_call_success(request_type): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.list_resource_drifts), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - config.ListResourceDriftsResponse( - next_page_token="next_page_token_value", - unreachable=["unreachable_value"], - ) + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2/deployments/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = config.ListRevisionsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], ) - await client.list_resource_drifts(request=None) - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = config.ListResourceDriftsRequest() + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 - assert args[0] == request_msg + # Convert return value to protobuf type + return_value = config.ListRevisionsResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.list_revisions(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListRevisionsPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_revisions_rest_interceptors(null_interceptor): + transport = transports.ConfigRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ConfigRestInterceptor(), + ) + client = ConfigClient(transport=transport) + + with ( + mock.patch.object(type(client.transport._session), "request") as req, + mock.patch.object(path_template, "transcode") as transcode, + mock.patch.object( + transports.ConfigRestInterceptor, "post_list_revisions" + ) as post, + mock.patch.object( + transports.ConfigRestInterceptor, "post_list_revisions_with_metadata" + ) as post_with_metadata, + mock.patch.object( + transports.ConfigRestInterceptor, "pre_list_revisions" + ) as pre, + ): + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = config.ListRevisionsRequest.pb(config.ListRevisionsRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = config.ListRevisionsResponse.to_json( + config.ListRevisionsResponse() + ) + req.return_value.content = return_value -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_get_resource_drift_empty_call_grpc_asyncio(): - client = ConfigAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", - ) + request = config.ListRevisionsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = config.ListRevisionsResponse() + post_with_metadata.return_value = config.ListRevisionsResponse(), metadata - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_resource_drift), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - config.ResourceDrift( - name="name_value", - ) + client.list_revisions( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], ) - await client.get_resource_drift(request=None) - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = config.GetResourceDriftRequest() + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() - assert args[0] == request_msg +def test_get_revision_rest_bad_request(request_type=config.GetRevisionRequest): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/deployments/sample3/revisions/sample4" + } + request = request_type(**request_init) -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_get_auto_migration_config_empty_call_grpc_asyncio(): - client = ConfigAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", + # Mock the http request call within the method and fake a BadRequest error. + with ( + mock.patch.object(Session, "request") as req, + pytest.raises(core_exceptions.BadRequest), + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.get_revision(request) + + +@pytest.mark.parametrize( + "request_type", + [ + config.GetRevisionRequest, + dict, + ], +) +def test_get_revision_rest_call_success(request_type): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.get_auto_migration_config), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - config.AutoMigrationConfig( - name="name_value", - auto_migration_enabled=True, - ) + # send a request that will satisfy transcoding + request_init = { + "name": "projects/sample1/locations/sample2/deployments/sample3/revisions/sample4" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = config.Revision( + name="name_value", + action=config.Revision.Action.CREATE, + state=config.Revision.State.APPLYING, + state_detail="state_detail_value", + error_code=config.Revision.ErrorCode.CLOUD_BUILD_PERMISSION_DENIED, + build="build_value", + logs="logs_value", + error_logs="error_logs_value", + service_account="service_account_value", + import_existing_resources=True, + worker_pool="worker_pool_value", + tf_version_constraint="tf_version_constraint_value", + tf_version="tf_version_value", + quota_validation_results="quota_validation_results_value", + quota_validation=config.QuotaValidation.ENABLED, ) - await client.get_auto_migration_config(request=None) - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = config.GetAutoMigrationConfigRequest() + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 - assert args[0] == request_msg + # Convert return value to protobuf type + return_value = config.Revision.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.get_revision(request) + # Establish that the response is the type that we expect. + assert isinstance(response, config.Revision) + assert response.name == "name_value" + assert response.action == config.Revision.Action.CREATE + assert response.state == config.Revision.State.APPLYING + assert response.state_detail == "state_detail_value" + assert ( + response.error_code == config.Revision.ErrorCode.CLOUD_BUILD_PERMISSION_DENIED + ) + assert response.build == "build_value" + assert response.logs == "logs_value" + assert response.error_logs == "error_logs_value" + assert response.service_account == "service_account_value" + assert response.import_existing_resources is True + assert response.worker_pool == "worker_pool_value" + assert response.tf_version_constraint == "tf_version_constraint_value" + assert response.tf_version == "tf_version_value" + assert response.quota_validation_results == "quota_validation_results_value" + assert response.quota_validation == config.QuotaValidation.ENABLED -# This test is a coverage failsafe to make sure that totally empty calls, -# i.e. request == None and no flattened fields passed, work. -@pytest.mark.asyncio -async def test_update_auto_migration_config_empty_call_grpc_asyncio(): - client = ConfigAsyncClient( - credentials=async_anonymous_credentials(), - transport="grpc_asyncio", + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_revision_rest_interceptors(null_interceptor): + transport = transports.ConfigRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ConfigRestInterceptor(), ) + client = ConfigClient(transport=transport) - # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.update_auto_migration_config), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - operations_pb2.Operation(name="operations/spam") - ) - await client.update_auto_migration_config(request=None) + with ( + mock.patch.object(type(client.transport._session), "request") as req, + mock.patch.object(path_template, "transcode") as transcode, + mock.patch.object( + transports.ConfigRestInterceptor, "post_get_revision" + ) as post, + mock.patch.object( + transports.ConfigRestInterceptor, "post_get_revision_with_metadata" + ) as post_with_metadata, + mock.patch.object(transports.ConfigRestInterceptor, "pre_get_revision") as pre, + ): + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = config.GetRevisionRequest.pb(config.GetRevisionRequest()) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } - # Establish that the underlying stub method was called. - call.assert_called() - _, args, _ = call.mock_calls[0] - request_msg = config.UpdateAutoMigrationConfigRequest() + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = config.Revision.to_json(config.Revision()) + req.return_value.content = return_value - assert args[0] == request_msg + request = config.GetRevisionRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = config.Revision() + post_with_metadata.return_value = config.Revision(), metadata + client.get_revision( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) -def test_transport_kind_rest(): - transport = ConfigClient.get_transport_class("rest")( - credentials=ga_credentials.AnonymousCredentials() - ) - assert transport.kind == "rest" + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() -def test_list_deployments_rest_bad_request(request_type=config.ListDeploymentsRequest): +def test_get_resource_rest_bad_request(request_type=config.GetResourceRequest): client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} + request_init = { + "name": "projects/sample1/locations/sample2/deployments/sample3/revisions/sample4/resources/sample5" + } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -19859,31 +26981,34 @@ def test_list_deployments_rest_bad_request(request_type=config.ListDeploymentsRe response_value.request = mock.Mock() req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.list_deployments(request) + client.get_resource(request) @pytest.mark.parametrize( "request_type", [ - config.ListDeploymentsRequest, + config.GetResourceRequest, dict, ], ) -def test_list_deployments_rest_call_success(request_type): +def test_get_resource_rest_call_success(request_type): client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} + request_init = { + "name": "projects/sample1/locations/sample2/deployments/sample3/revisions/sample4/resources/sample5" + } request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = config.ListDeploymentsResponse( - next_page_token="next_page_token_value", - unreachable=["unreachable_value"], + return_value = config.Resource( + name="name_value", + intent=config.Resource.Intent.CREATE, + state=config.Resource.State.PLANNED, ) # Wrap the value into a proper Response obj @@ -19891,21 +27016,22 @@ def test_list_deployments_rest_call_success(request_type): response_value.status_code = 200 # Convert return value to protobuf type - return_value = config.ListDeploymentsResponse.pb(return_value) + return_value = config.Resource.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.list_deployments(request) + response = client.get_resource(request) # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListDeploymentsPager) - assert response.next_page_token == "next_page_token_value" - assert response.unreachable == ["unreachable_value"] + assert isinstance(response, config.Resource) + assert response.name == "name_value" + assert response.intent == config.Resource.Intent.CREATE + assert response.state == config.Resource.State.PLANNED @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_deployments_rest_interceptors(null_interceptor): +def test_get_resource_rest_interceptors(null_interceptor): transport = transports.ConfigRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None if null_interceptor else transports.ConfigRestInterceptor(), @@ -19916,19 +27042,17 @@ def test_list_deployments_rest_interceptors(null_interceptor): mock.patch.object(type(client.transport._session), "request") as req, mock.patch.object(path_template, "transcode") as transcode, mock.patch.object( - transports.ConfigRestInterceptor, "post_list_deployments" + transports.ConfigRestInterceptor, "post_get_resource" ) as post, mock.patch.object( - transports.ConfigRestInterceptor, "post_list_deployments_with_metadata" + transports.ConfigRestInterceptor, "post_get_resource_with_metadata" ) as post_with_metadata, - mock.patch.object( - transports.ConfigRestInterceptor, "pre_list_deployments" - ) as pre, + mock.patch.object(transports.ConfigRestInterceptor, "pre_get_resource") as pre, ): pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() - pb_message = config.ListDeploymentsRequest.pb(config.ListDeploymentsRequest()) + pb_message = config.GetResourceRequest.pb(config.GetResourceRequest()) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -19939,21 +27063,19 @@ def test_list_deployments_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = config.ListDeploymentsResponse.to_json( - config.ListDeploymentsResponse() - ) + return_value = config.Resource.to_json(config.Resource()) req.return_value.content = return_value - request = config.ListDeploymentsRequest() + request = config.GetResourceRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = config.ListDeploymentsResponse() - post_with_metadata.return_value = config.ListDeploymentsResponse(), metadata + post.return_value = config.Resource() + post_with_metadata.return_value = config.Resource(), metadata - client.list_deployments( + client.get_resource( request, metadata=[ ("key", "val"), @@ -19966,12 +27088,14 @@ def test_list_deployments_rest_interceptors(null_interceptor): post_with_metadata.assert_called_once() -def test_get_deployment_rest_bad_request(request_type=config.GetDeploymentRequest): +def test_list_resources_rest_bad_request(request_type=config.ListResourcesRequest): client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/deployments/sample3"} + request_init = { + "parent": "projects/sample1/locations/sample2/deployments/sample3/revisions/sample4" + } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -19987,45 +27111,33 @@ def test_get_deployment_rest_bad_request(request_type=config.GetDeploymentReques response_value.request = mock.Mock() req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.get_deployment(request) + client.list_resources(request) @pytest.mark.parametrize( "request_type", [ - config.GetDeploymentRequest, + config.ListResourcesRequest, dict, ], ) -def test_get_deployment_rest_call_success(request_type): +def test_list_resources_rest_call_success(request_type): client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/deployments/sample3"} + request_init = { + "parent": "projects/sample1/locations/sample2/deployments/sample3/revisions/sample4" + } request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = config.Deployment( - name="name_value", - state=config.Deployment.State.CREATING, - latest_revision="latest_revision_value", - state_detail="state_detail_value", - error_code=config.Deployment.ErrorCode.REVISION_FAILED, - delete_build="delete_build_value", - delete_logs="delete_logs_value", - error_logs="error_logs_value", - artifacts_gcs_bucket="artifacts_gcs_bucket_value", - service_account="service_account_value", - import_existing_resources=True, - worker_pool="worker_pool_value", - lock_state=config.Deployment.LockState.LOCKED, - tf_version_constraint="tf_version_constraint_value", - tf_version="tf_version_value", - quota_validation=config.QuotaValidation.ENABLED, + return_value = config.ListResourcesResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], ) # Wrap the value into a proper Response obj @@ -20033,35 +27145,21 @@ def test_get_deployment_rest_call_success(request_type): response_value.status_code = 200 # Convert return value to protobuf type - return_value = config.Deployment.pb(return_value) + return_value = config.ListResourcesResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.get_deployment(request) + response = client.list_resources(request) # Establish that the response is the type that we expect. - assert isinstance(response, config.Deployment) - assert response.name == "name_value" - assert response.state == config.Deployment.State.CREATING - assert response.latest_revision == "latest_revision_value" - assert response.state_detail == "state_detail_value" - assert response.error_code == config.Deployment.ErrorCode.REVISION_FAILED - assert response.delete_build == "delete_build_value" - assert response.delete_logs == "delete_logs_value" - assert response.error_logs == "error_logs_value" - assert response.artifacts_gcs_bucket == "artifacts_gcs_bucket_value" - assert response.service_account == "service_account_value" - assert response.import_existing_resources is True - assert response.worker_pool == "worker_pool_value" - assert response.lock_state == config.Deployment.LockState.LOCKED - assert response.tf_version_constraint == "tf_version_constraint_value" - assert response.tf_version == "tf_version_value" - assert response.quota_validation == config.QuotaValidation.ENABLED + assert isinstance(response, pagers.ListResourcesPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_deployment_rest_interceptors(null_interceptor): +def test_list_resources_rest_interceptors(null_interceptor): transport = transports.ConfigRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None if null_interceptor else transports.ConfigRestInterceptor(), @@ -20072,19 +27170,19 @@ def test_get_deployment_rest_interceptors(null_interceptor): mock.patch.object(type(client.transport._session), "request") as req, mock.patch.object(path_template, "transcode") as transcode, mock.patch.object( - transports.ConfigRestInterceptor, "post_get_deployment" + transports.ConfigRestInterceptor, "post_list_resources" ) as post, mock.patch.object( - transports.ConfigRestInterceptor, "post_get_deployment_with_metadata" + transports.ConfigRestInterceptor, "post_list_resources_with_metadata" ) as post_with_metadata, mock.patch.object( - transports.ConfigRestInterceptor, "pre_get_deployment" + transports.ConfigRestInterceptor, "pre_list_resources" ) as pre, ): pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() - pb_message = config.GetDeploymentRequest.pb(config.GetDeploymentRequest()) + pb_message = config.ListResourcesRequest.pb(config.ListResourcesRequest()) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -20095,19 +27193,21 @@ def test_get_deployment_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = config.Deployment.to_json(config.Deployment()) + return_value = config.ListResourcesResponse.to_json( + config.ListResourcesResponse() + ) req.return_value.content = return_value - request = config.GetDeploymentRequest() + request = config.ListResourcesRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = config.Deployment() - post_with_metadata.return_value = config.Deployment(), metadata + post.return_value = config.ListResourcesResponse() + post_with_metadata.return_value = config.ListResourcesResponse(), metadata - client.get_deployment( + client.list_resources( request, metadata=[ ("key", "val"), @@ -20120,14 +27220,14 @@ def test_get_deployment_rest_interceptors(null_interceptor): post_with_metadata.assert_called_once() -def test_create_deployment_rest_bad_request( - request_type=config.CreateDeploymentRequest, +def test_export_deployment_statefile_rest_bad_request( + request_type=config.ExportDeploymentStatefileRequest, ): client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} + request_init = {"parent": "projects/sample1/locations/sample2/deployments/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -20138,171 +27238,56 @@ def test_create_deployment_rest_bad_request( # Wrap the value into a proper Response obj response_value = mock.Mock() json_return_value = "" - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.create_deployment(request) - - -@pytest.mark.parametrize( - "request_type", - [ - config.CreateDeploymentRequest, - dict, - ], -) -def test_create_deployment_rest_call_success(request_type): - client = ConfigClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} - request_init["deployment"] = { - "terraform_blueprint": { - "gcs_source": "gcs_source_value", - "git_source": { - "repo": "repo_value", - "directory": "directory_value", - "ref": "ref_value", - }, - "input_values": {}, - }, - "name": "name_value", - "create_time": {"seconds": 751, "nanos": 543}, - "update_time": {}, - "labels": {}, - "state": 1, - "latest_revision": "latest_revision_value", - "state_detail": "state_detail_value", - "error_code": 1, - "delete_results": { - "content": "content_value", - "artifacts": "artifacts_value", - "outputs": {}, - }, - "delete_build": "delete_build_value", - "delete_logs": "delete_logs_value", - "tf_errors": [ - { - "resource_address": "resource_address_value", - "http_response_code": 1928, - "error_description": "error_description_value", - "error": { - "code": 411, - "message": "message_value", - "details": [ - { - "type_url": "type.googleapis.com/google.protobuf.Duration", - "value": b"\x08\x0c\x10\xdb\x07", - } - ], - }, - } - ], - "error_logs": "error_logs_value", - "artifacts_gcs_bucket": "artifacts_gcs_bucket_value", - "service_account": "service_account_value", - "import_existing_resources": True, - "worker_pool": "worker_pool_value", - "lock_state": 1, - "tf_version_constraint": "tf_version_constraint_value", - "tf_version": "tf_version_value", - "quota_validation": 1, - "annotations": {}, - "provider_config": {"source_type": 1}, - } - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = config.CreateDeploymentRequest.meta.fields["deployment"] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["deployment"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - { - "field": field, - "subfield": subfield, - "is_repeated": is_repeated, - } - ) + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.export_deployment_statefile(request) - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["deployment"][field])): - del request_init["deployment"][field][i][subfield] - else: - del request_init["deployment"][field][subfield] + +@pytest.mark.parametrize( + "request_type", + [ + config.ExportDeploymentStatefileRequest, + dict, + ], +) +def test_export_deployment_statefile_rest_call_success(request_type): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2/deployments/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = config.Statefile( + signed_uri="signed_uri_value", + ) # Wrap the value into a proper Response obj response_value = mock.Mock() response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = config.Statefile.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.create_deployment(request) + response = client.export_deployment_statefile(request) # Establish that the response is the type that we expect. - json_return_value = json_format.MessageToJson(return_value) + assert isinstance(response, config.Statefile) + assert response.signed_uri == "signed_uri_value" @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_deployment_rest_interceptors(null_interceptor): +def test_export_deployment_statefile_rest_interceptors(null_interceptor): transport = transports.ConfigRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None if null_interceptor else transports.ConfigRestInterceptor(), @@ -20312,21 +27297,23 @@ def test_create_deployment_rest_interceptors(null_interceptor): with ( mock.patch.object(type(client.transport._session), "request") as req, mock.patch.object(path_template, "transcode") as transcode, - mock.patch.object(operation.Operation, "_set_result_from_operation"), mock.patch.object( - transports.ConfigRestInterceptor, "post_create_deployment" + transports.ConfigRestInterceptor, "post_export_deployment_statefile" ) as post, mock.patch.object( - transports.ConfigRestInterceptor, "post_create_deployment_with_metadata" + transports.ConfigRestInterceptor, + "post_export_deployment_statefile_with_metadata", ) as post_with_metadata, mock.patch.object( - transports.ConfigRestInterceptor, "pre_create_deployment" + transports.ConfigRestInterceptor, "pre_export_deployment_statefile" ) as pre, ): pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() - pb_message = config.CreateDeploymentRequest.pb(config.CreateDeploymentRequest()) + pb_message = config.ExportDeploymentStatefileRequest.pb( + config.ExportDeploymentStatefileRequest() + ) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -20337,19 +27324,19 @@ def test_create_deployment_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = json_format.MessageToJson(operations_pb2.Operation()) + return_value = config.Statefile.to_json(config.Statefile()) req.return_value.content = return_value - request = config.CreateDeploymentRequest() + request = config.ExportDeploymentStatefileRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() - post_with_metadata.return_value = operations_pb2.Operation(), metadata + post.return_value = config.Statefile() + post_with_metadata.return_value = config.Statefile(), metadata - client.create_deployment( + client.export_deployment_statefile( request, metadata=[ ("key", "val"), @@ -20362,15 +27349,15 @@ def test_create_deployment_rest_interceptors(null_interceptor): post_with_metadata.assert_called_once() -def test_update_deployment_rest_bad_request( - request_type=config.UpdateDeploymentRequest, +def test_export_revision_statefile_rest_bad_request( + request_type=config.ExportRevisionStatefileRequest, ): client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding request_init = { - "deployment": {"name": "projects/sample1/locations/sample2/deployments/sample3"} + "parent": "projects/sample1/locations/sample2/deployments/sample3/revisions/sample4" } request = request_type(**request_init) @@ -20387,168 +27374,180 @@ def test_update_deployment_rest_bad_request( response_value.request = mock.Mock() req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.update_deployment(request) + client.export_revision_statefile(request) @pytest.mark.parametrize( "request_type", [ - config.UpdateDeploymentRequest, + config.ExportRevisionStatefileRequest, dict, ], ) -def test_update_deployment_rest_call_success(request_type): +def test_export_revision_statefile_rest_call_success(request_type): client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding request_init = { - "deployment": {"name": "projects/sample1/locations/sample2/deployments/sample3"} - } - request_init["deployment"] = { - "terraform_blueprint": { - "gcs_source": "gcs_source_value", - "git_source": { - "repo": "repo_value", - "directory": "directory_value", - "ref": "ref_value", - }, - "input_values": {}, - }, - "name": "projects/sample1/locations/sample2/deployments/sample3", - "create_time": {"seconds": 751, "nanos": 543}, - "update_time": {}, - "labels": {}, - "state": 1, - "latest_revision": "latest_revision_value", - "state_detail": "state_detail_value", - "error_code": 1, - "delete_results": { - "content": "content_value", - "artifacts": "artifacts_value", - "outputs": {}, - }, - "delete_build": "delete_build_value", - "delete_logs": "delete_logs_value", - "tf_errors": [ - { - "resource_address": "resource_address_value", - "http_response_code": 1928, - "error_description": "error_description_value", - "error": { - "code": 411, - "message": "message_value", - "details": [ - { - "type_url": "type.googleapis.com/google.protobuf.Duration", - "value": b"\x08\x0c\x10\xdb\x07", - } - ], - }, - } - ], - "error_logs": "error_logs_value", - "artifacts_gcs_bucket": "artifacts_gcs_bucket_value", - "service_account": "service_account_value", - "import_existing_resources": True, - "worker_pool": "worker_pool_value", - "lock_state": 1, - "tf_version_constraint": "tf_version_constraint_value", - "tf_version": "tf_version_value", - "quota_validation": 1, - "annotations": {}, - "provider_config": {"source_type": 1}, + "parent": "projects/sample1/locations/sample2/deployments/sample3/revisions/sample4" } - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 + request = request_type(**request_init) - # Determine if the message type is proto-plus or protobuf - test_field = config.UpdateDeploymentRequest.meta.fields["deployment"] + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = config.Statefile( + signed_uri="signed_uri_value", + ) - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + # Convert return value to protobuf type + return_value = config.Statefile.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.export_revision_statefile(request) - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields + # Establish that the response is the type that we expect. + assert isinstance(response, config.Statefile) + assert response.signed_uri == "signed_uri_value" - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - subfields_not_in_runtime = [] +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_export_revision_statefile_rest_interceptors(null_interceptor): + transport = transports.ConfigRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ConfigRestInterceptor(), + ) + client = ConfigClient(transport=transport) + + with ( + mock.patch.object(type(client.transport._session), "request") as req, + mock.patch.object(path_template, "transcode") as transcode, + mock.patch.object( + transports.ConfigRestInterceptor, "post_export_revision_statefile" + ) as post, + mock.patch.object( + transports.ConfigRestInterceptor, + "post_export_revision_statefile_with_metadata", + ) as post_with_metadata, + mock.patch.object( + transports.ConfigRestInterceptor, "pre_export_revision_statefile" + ) as pre, + ): + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = config.ExportRevisionStatefileRequest.pb( + config.ExportRevisionStatefileRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = config.Statefile.to_json(config.Statefile()) + req.return_value.content = return_value + + request = config.ExportRevisionStatefileRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = config.Statefile() + post_with_metadata.return_value = config.Statefile(), metadata + + client.export_revision_statefile( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_import_statefile_rest_bad_request(request_type=config.ImportStatefileRequest): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2/deployments/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with ( + mock.patch.object(Session, "request") as req, + pytest.raises(core_exceptions.BadRequest), + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.import_statefile(request) - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["deployment"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - { - "field": field, - "subfield": subfield, - "is_repeated": is_repeated, - } - ) +@pytest.mark.parametrize( + "request_type", + [ + config.ImportStatefileRequest, + dict, + ], +) +def test_import_statefile_rest_call_success(request_type): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["deployment"][field])): - del request_init["deployment"][field][i][subfield] - else: - del request_init["deployment"][field][subfield] + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2/deployments/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = config.Statefile( + signed_uri="signed_uri_value", + ) # Wrap the value into a proper Response obj response_value = mock.Mock() response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = config.Statefile.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.update_deployment(request) + response = client.import_statefile(request) # Establish that the response is the type that we expect. - json_return_value = json_format.MessageToJson(return_value) + assert isinstance(response, config.Statefile) + assert response.signed_uri == "signed_uri_value" @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_update_deployment_rest_interceptors(null_interceptor): +def test_import_statefile_rest_interceptors(null_interceptor): transport = transports.ConfigRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None if null_interceptor else transports.ConfigRestInterceptor(), @@ -20558,21 +27557,20 @@ def test_update_deployment_rest_interceptors(null_interceptor): with ( mock.patch.object(type(client.transport._session), "request") as req, mock.patch.object(path_template, "transcode") as transcode, - mock.patch.object(operation.Operation, "_set_result_from_operation"), mock.patch.object( - transports.ConfigRestInterceptor, "post_update_deployment" + transports.ConfigRestInterceptor, "post_import_statefile" ) as post, mock.patch.object( - transports.ConfigRestInterceptor, "post_update_deployment_with_metadata" + transports.ConfigRestInterceptor, "post_import_statefile_with_metadata" ) as post_with_metadata, mock.patch.object( - transports.ConfigRestInterceptor, "pre_update_deployment" + transports.ConfigRestInterceptor, "pre_import_statefile" ) as pre, ): pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() - pb_message = config.UpdateDeploymentRequest.pb(config.UpdateDeploymentRequest()) + pb_message = config.ImportStatefileRequest.pb(config.ImportStatefileRequest()) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -20583,19 +27581,19 @@ def test_update_deployment_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = json_format.MessageToJson(operations_pb2.Operation()) + return_value = config.Statefile.to_json(config.Statefile()) req.return_value.content = return_value - request = config.UpdateDeploymentRequest() + request = config.ImportStatefileRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() - post_with_metadata.return_value = operations_pb2.Operation(), metadata + post.return_value = config.Statefile() + post_with_metadata.return_value = config.Statefile(), metadata - client.update_deployment( + client.import_statefile( request, metadata=[ ("key", "val"), @@ -20608,9 +27606,7 @@ def test_update_deployment_rest_interceptors(null_interceptor): post_with_metadata.assert_called_once() -def test_delete_deployment_rest_bad_request( - request_type=config.DeleteDeploymentRequest, -): +def test_delete_statefile_rest_bad_request(request_type=config.DeleteStatefileRequest): client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -20631,17 +27627,17 @@ def test_delete_deployment_rest_bad_request( response_value.request = mock.Mock() req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.delete_deployment(request) + client.delete_statefile(request) @pytest.mark.parametrize( "request_type", [ - config.DeleteDeploymentRequest, + config.DeleteStatefileRequest, dict, ], ) -def test_delete_deployment_rest_call_success(request_type): +def test_delete_statefile_rest_call_success(request_type): client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) @@ -20653,23 +27649,23 @@ def test_delete_deployment_rest_call_success(request_type): # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = None # Wrap the value into a proper Response obj response_value = mock.Mock() response_value.status_code = 200 - json_return_value = json_format.MessageToJson(return_value) + json_return_value = "" response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.delete_deployment(request) + response = client.delete_statefile(request) # Establish that the response is the type that we expect. - json_return_value = json_format.MessageToJson(return_value) + assert response is None @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_deployment_rest_interceptors(null_interceptor): +def test_delete_statefile_rest_interceptors(null_interceptor): transport = transports.ConfigRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None if null_interceptor else transports.ConfigRestInterceptor(), @@ -20679,21 +27675,12 @@ def test_delete_deployment_rest_interceptors(null_interceptor): with ( mock.patch.object(type(client.transport._session), "request") as req, mock.patch.object(path_template, "transcode") as transcode, - mock.patch.object(operation.Operation, "_set_result_from_operation"), - mock.patch.object( - transports.ConfigRestInterceptor, "post_delete_deployment" - ) as post, - mock.patch.object( - transports.ConfigRestInterceptor, "post_delete_deployment_with_metadata" - ) as post_with_metadata, mock.patch.object( - transports.ConfigRestInterceptor, "pre_delete_deployment" + transports.ConfigRestInterceptor, "pre_delete_statefile" ) as pre, ): pre.assert_not_called() - post.assert_not_called() - post_with_metadata.assert_not_called() - pb_message = config.DeleteDeploymentRequest.pb(config.DeleteDeploymentRequest()) + pb_message = config.DeleteStatefileRequest.pb(config.DeleteStatefileRequest()) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -20704,19 +27691,15 @@ def test_delete_deployment_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = json_format.MessageToJson(operations_pb2.Operation()) - req.return_value.content = return_value - request = config.DeleteDeploymentRequest() + request = config.DeleteStatefileRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() - post_with_metadata.return_value = operations_pb2.Operation(), metadata - client.delete_deployment( + client.delete_statefile( request, metadata=[ ("key", "val"), @@ -20725,16 +27708,14 @@ def test_delete_deployment_rest_interceptors(null_interceptor): ) pre.assert_called_once() - post.assert_called_once() - post_with_metadata.assert_called_once() -def test_list_revisions_rest_bad_request(request_type=config.ListRevisionsRequest): +def test_lock_deployment_rest_bad_request(request_type=config.LockDeploymentRequest): client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2/deployments/sample3"} + request_init = {"name": "projects/sample1/locations/sample2/deployments/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -20750,53 +27731,45 @@ def test_list_revisions_rest_bad_request(request_type=config.ListRevisionsReques response_value.request = mock.Mock() req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.list_revisions(request) + client.lock_deployment(request) @pytest.mark.parametrize( "request_type", [ - config.ListRevisionsRequest, + config.LockDeploymentRequest, dict, ], ) -def test_list_revisions_rest_call_success(request_type): +def test_lock_deployment_rest_call_success(request_type): client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2/deployments/sample3"} + request_init = {"name": "projects/sample1/locations/sample2/deployments/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = config.ListRevisionsResponse( - next_page_token="next_page_token_value", - unreachable=["unreachable_value"], - ) + return_value = operations_pb2.Operation(name="operations/spam") # Wrap the value into a proper Response obj response_value = mock.Mock() response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = config.ListRevisionsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.list_revisions(request) + response = client.lock_deployment(request) # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListRevisionsPager) - assert response.next_page_token == "next_page_token_value" - assert response.unreachable == ["unreachable_value"] + json_return_value = json_format.MessageToJson(return_value) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_revisions_rest_interceptors(null_interceptor): +def test_lock_deployment_rest_interceptors(null_interceptor): transport = transports.ConfigRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None if null_interceptor else transports.ConfigRestInterceptor(), @@ -20806,20 +27779,21 @@ def test_list_revisions_rest_interceptors(null_interceptor): with ( mock.patch.object(type(client.transport._session), "request") as req, mock.patch.object(path_template, "transcode") as transcode, + mock.patch.object(operation.Operation, "_set_result_from_operation"), mock.patch.object( - transports.ConfigRestInterceptor, "post_list_revisions" + transports.ConfigRestInterceptor, "post_lock_deployment" ) as post, mock.patch.object( - transports.ConfigRestInterceptor, "post_list_revisions_with_metadata" + transports.ConfigRestInterceptor, "post_lock_deployment_with_metadata" ) as post_with_metadata, mock.patch.object( - transports.ConfigRestInterceptor, "pre_list_revisions" + transports.ConfigRestInterceptor, "pre_lock_deployment" ) as pre, ): pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() - pb_message = config.ListRevisionsRequest.pb(config.ListRevisionsRequest()) + pb_message = config.LockDeploymentRequest.pb(config.LockDeploymentRequest()) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -20830,21 +27804,19 @@ def test_list_revisions_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = config.ListRevisionsResponse.to_json( - config.ListRevisionsResponse() - ) + return_value = json_format.MessageToJson(operations_pb2.Operation()) req.return_value.content = return_value - request = config.ListRevisionsRequest() + request = config.LockDeploymentRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = config.ListRevisionsResponse() - post_with_metadata.return_value = config.ListRevisionsResponse(), metadata + post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata - client.list_revisions( + client.lock_deployment( request, metadata=[ ("key", "val"), @@ -20857,14 +27829,14 @@ def test_list_revisions_rest_interceptors(null_interceptor): post_with_metadata.assert_called_once() -def test_get_revision_rest_bad_request(request_type=config.GetRevisionRequest): +def test_unlock_deployment_rest_bad_request( + request_type=config.UnlockDeploymentRequest, +): client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = { - "name": "projects/sample1/locations/sample2/deployments/sample3/revisions/sample4" - } + request_init = {"name": "projects/sample1/locations/sample2/deployments/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -20880,83 +27852,45 @@ def test_get_revision_rest_bad_request(request_type=config.GetRevisionRequest): response_value.request = mock.Mock() req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.get_revision(request) + client.unlock_deployment(request) @pytest.mark.parametrize( "request_type", [ - config.GetRevisionRequest, + config.UnlockDeploymentRequest, dict, ], ) -def test_get_revision_rest_call_success(request_type): +def test_unlock_deployment_rest_call_success(request_type): client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = { - "name": "projects/sample1/locations/sample2/deployments/sample3/revisions/sample4" - } + request_init = {"name": "projects/sample1/locations/sample2/deployments/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = config.Revision( - name="name_value", - action=config.Revision.Action.CREATE, - state=config.Revision.State.APPLYING, - state_detail="state_detail_value", - error_code=config.Revision.ErrorCode.CLOUD_BUILD_PERMISSION_DENIED, - build="build_value", - logs="logs_value", - error_logs="error_logs_value", - service_account="service_account_value", - import_existing_resources=True, - worker_pool="worker_pool_value", - tf_version_constraint="tf_version_constraint_value", - tf_version="tf_version_value", - quota_validation_results="quota_validation_results_value", - quota_validation=config.QuotaValidation.ENABLED, - ) + return_value = operations_pb2.Operation(name="operations/spam") # Wrap the value into a proper Response obj response_value = mock.Mock() response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = config.Revision.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.get_revision(request) + response = client.unlock_deployment(request) # Establish that the response is the type that we expect. - assert isinstance(response, config.Revision) - assert response.name == "name_value" - assert response.action == config.Revision.Action.CREATE - assert response.state == config.Revision.State.APPLYING - assert response.state_detail == "state_detail_value" - assert ( - response.error_code == config.Revision.ErrorCode.CLOUD_BUILD_PERMISSION_DENIED - ) - assert response.build == "build_value" - assert response.logs == "logs_value" - assert response.error_logs == "error_logs_value" - assert response.service_account == "service_account_value" - assert response.import_existing_resources is True - assert response.worker_pool == "worker_pool_value" - assert response.tf_version_constraint == "tf_version_constraint_value" - assert response.tf_version == "tf_version_value" - assert response.quota_validation_results == "quota_validation_results_value" - assert response.quota_validation == config.QuotaValidation.ENABLED + json_return_value = json_format.MessageToJson(return_value) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_revision_rest_interceptors(null_interceptor): +def test_unlock_deployment_rest_interceptors(null_interceptor): transport = transports.ConfigRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None if null_interceptor else transports.ConfigRestInterceptor(), @@ -20966,18 +27900,21 @@ def test_get_revision_rest_interceptors(null_interceptor): with ( mock.patch.object(type(client.transport._session), "request") as req, mock.patch.object(path_template, "transcode") as transcode, + mock.patch.object(operation.Operation, "_set_result_from_operation"), mock.patch.object( - transports.ConfigRestInterceptor, "post_get_revision" + transports.ConfigRestInterceptor, "post_unlock_deployment" ) as post, mock.patch.object( - transports.ConfigRestInterceptor, "post_get_revision_with_metadata" + transports.ConfigRestInterceptor, "post_unlock_deployment_with_metadata" ) as post_with_metadata, - mock.patch.object(transports.ConfigRestInterceptor, "pre_get_revision") as pre, + mock.patch.object( + transports.ConfigRestInterceptor, "pre_unlock_deployment" + ) as pre, ): pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() - pb_message = config.GetRevisionRequest.pb(config.GetRevisionRequest()) + pb_message = config.UnlockDeploymentRequest.pb(config.UnlockDeploymentRequest()) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -20988,19 +27925,19 @@ def test_get_revision_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = config.Revision.to_json(config.Revision()) + return_value = json_format.MessageToJson(operations_pb2.Operation()) req.return_value.content = return_value - request = config.GetRevisionRequest() + request = config.UnlockDeploymentRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = config.Revision() - post_with_metadata.return_value = config.Revision(), metadata + post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata - client.get_revision( + client.unlock_deployment( request, metadata=[ ("key", "val"), @@ -21013,14 +27950,12 @@ def test_get_revision_rest_interceptors(null_interceptor): post_with_metadata.assert_called_once() -def test_get_resource_rest_bad_request(request_type=config.GetResourceRequest): +def test_export_lock_info_rest_bad_request(request_type=config.ExportLockInfoRequest): client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = { - "name": "projects/sample1/locations/sample2/deployments/sample3/revisions/sample4/resources/sample5" - } + request_init = {"name": "projects/sample1/locations/sample2/deployments/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -21036,34 +27971,34 @@ def test_get_resource_rest_bad_request(request_type=config.GetResourceRequest): response_value.request = mock.Mock() req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.get_resource(request) + client.export_lock_info(request) @pytest.mark.parametrize( "request_type", [ - config.GetResourceRequest, + config.ExportLockInfoRequest, dict, ], ) -def test_get_resource_rest_call_success(request_type): +def test_export_lock_info_rest_call_success(request_type): client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = { - "name": "projects/sample1/locations/sample2/deployments/sample3/revisions/sample4/resources/sample5" - } + request_init = {"name": "projects/sample1/locations/sample2/deployments/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = config.Resource( - name="name_value", - intent=config.Resource.Intent.CREATE, - state=config.Resource.State.PLANNED, + return_value = config.LockInfo( + lock_id=725, + operation="operation_value", + info="info_value", + who="who_value", + version="version_value", ) # Wrap the value into a proper Response obj @@ -21071,22 +28006,24 @@ def test_get_resource_rest_call_success(request_type): response_value.status_code = 200 # Convert return value to protobuf type - return_value = config.Resource.pb(return_value) + return_value = config.LockInfo.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.get_resource(request) + response = client.export_lock_info(request) # Establish that the response is the type that we expect. - assert isinstance(response, config.Resource) - assert response.name == "name_value" - assert response.intent == config.Resource.Intent.CREATE - assert response.state == config.Resource.State.PLANNED + assert isinstance(response, config.LockInfo) + assert response.lock_id == 725 + assert response.operation == "operation_value" + assert response.info == "info_value" + assert response.who == "who_value" + assert response.version == "version_value" @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_resource_rest_interceptors(null_interceptor): +def test_export_lock_info_rest_interceptors(null_interceptor): transport = transports.ConfigRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None if null_interceptor else transports.ConfigRestInterceptor(), @@ -21097,17 +28034,19 @@ def test_get_resource_rest_interceptors(null_interceptor): mock.patch.object(type(client.transport._session), "request") as req, mock.patch.object(path_template, "transcode") as transcode, mock.patch.object( - transports.ConfigRestInterceptor, "post_get_resource" + transports.ConfigRestInterceptor, "post_export_lock_info" ) as post, mock.patch.object( - transports.ConfigRestInterceptor, "post_get_resource_with_metadata" + transports.ConfigRestInterceptor, "post_export_lock_info_with_metadata" ) as post_with_metadata, - mock.patch.object(transports.ConfigRestInterceptor, "pre_get_resource") as pre, + mock.patch.object( + transports.ConfigRestInterceptor, "pre_export_lock_info" + ) as pre, ): pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() - pb_message = config.GetResourceRequest.pb(config.GetResourceRequest()) + pb_message = config.ExportLockInfoRequest.pb(config.ExportLockInfoRequest()) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -21118,19 +28057,19 @@ def test_get_resource_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = config.Resource.to_json(config.Resource()) + return_value = config.LockInfo.to_json(config.LockInfo()) req.return_value.content = return_value - request = config.GetResourceRequest() + request = config.ExportLockInfoRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = config.Resource() - post_with_metadata.return_value = config.Resource(), metadata + post.return_value = config.LockInfo() + post_with_metadata.return_value = config.LockInfo(), metadata - client.get_resource( + client.export_lock_info( request, metadata=[ ("key", "val"), @@ -21143,14 +28082,12 @@ def test_get_resource_rest_interceptors(null_interceptor): post_with_metadata.assert_called_once() -def test_list_resources_rest_bad_request(request_type=config.ListResourcesRequest): +def test_create_preview_rest_bad_request(request_type=config.CreatePreviewRequest): client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = { - "parent": "projects/sample1/locations/sample2/deployments/sample3/revisions/sample4" - } + request_init = {"parent": "projects/sample1/locations/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -21166,55 +28103,163 @@ def test_list_resources_rest_bad_request(request_type=config.ListResourcesReques response_value.request = mock.Mock() req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.list_resources(request) + client.create_preview(request) @pytest.mark.parametrize( "request_type", [ - config.ListResourcesRequest, + config.CreatePreviewRequest, dict, ], ) -def test_list_resources_rest_call_success(request_type): +def test_create_preview_rest_call_success(request_type): client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = { - "parent": "projects/sample1/locations/sample2/deployments/sample3/revisions/sample4" + request_init = {"parent": "projects/sample1/locations/sample2"} + request_init["preview"] = { + "terraform_blueprint": { + "gcs_source": "gcs_source_value", + "git_source": { + "repo": "repo_value", + "directory": "directory_value", + "ref": "ref_value", + }, + "input_values": {}, + "external_values": {}, + }, + "name": "name_value", + "create_time": {"seconds": 751, "nanos": 543}, + "labels": {}, + "state": 1, + "deployment": "deployment_value", + "preview_mode": 1, + "service_account": "service_account_value", + "artifacts_gcs_bucket": "artifacts_gcs_bucket_value", + "worker_pool": "worker_pool_value", + "error_code": 1, + "error_status": { + "code": 411, + "message": "message_value", + "details": [ + { + "type_url": "type.googleapis.com/google.protobuf.Duration", + "value": b"\x08\x0c\x10\xdb\x07", + } + ], + }, + "build": "build_value", + "tf_errors": [ + { + "resource_address": "resource_address_value", + "http_response_code": 1928, + "error_description": "error_description_value", + "error": {}, + } + ], + "error_logs": "error_logs_value", + "preview_artifacts": { + "content": "content_value", + "artifacts": "artifacts_value", + }, + "logs": "logs_value", + "tf_version": "tf_version_value", + "tf_version_constraint": "tf_version_constraint_value", + "annotations": {}, + "provider_config": {"source_type": 1}, } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = config.CreatePreviewRequest.meta.fields["preview"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["preview"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["preview"][field])): + del request_init["preview"][field][i][subfield] + else: + del request_init["preview"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = config.ListResourcesResponse( - next_page_token="next_page_token_value", - unreachable=["unreachable_value"], - ) + return_value = operations_pb2.Operation(name="operations/spam") # Wrap the value into a proper Response obj response_value = mock.Mock() response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = config.ListResourcesResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.list_resources(request) + response = client.create_preview(request) # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListResourcesPager) - assert response.next_page_token == "next_page_token_value" - assert response.unreachable == ["unreachable_value"] + json_return_value = json_format.MessageToJson(return_value) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_resources_rest_interceptors(null_interceptor): +def test_create_preview_rest_interceptors(null_interceptor): transport = transports.ConfigRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None if null_interceptor else transports.ConfigRestInterceptor(), @@ -21224,20 +28269,21 @@ def test_list_resources_rest_interceptors(null_interceptor): with ( mock.patch.object(type(client.transport._session), "request") as req, mock.patch.object(path_template, "transcode") as transcode, + mock.patch.object(operation.Operation, "_set_result_from_operation"), mock.patch.object( - transports.ConfigRestInterceptor, "post_list_resources" + transports.ConfigRestInterceptor, "post_create_preview" ) as post, mock.patch.object( - transports.ConfigRestInterceptor, "post_list_resources_with_metadata" + transports.ConfigRestInterceptor, "post_create_preview_with_metadata" ) as post_with_metadata, mock.patch.object( - transports.ConfigRestInterceptor, "pre_list_resources" + transports.ConfigRestInterceptor, "pre_create_preview" ) as pre, ): pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() - pb_message = config.ListResourcesRequest.pb(config.ListResourcesRequest()) + pb_message = config.CreatePreviewRequest.pb(config.CreatePreviewRequest()) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -21248,21 +28294,19 @@ def test_list_resources_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = config.ListResourcesResponse.to_json( - config.ListResourcesResponse() - ) + return_value = json_format.MessageToJson(operations_pb2.Operation()) req.return_value.content = return_value - request = config.ListResourcesRequest() + request = config.CreatePreviewRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = config.ListResourcesResponse() - post_with_metadata.return_value = config.ListResourcesResponse(), metadata + post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata - client.list_resources( + client.create_preview( request, metadata=[ ("key", "val"), @@ -21275,14 +28319,12 @@ def test_list_resources_rest_interceptors(null_interceptor): post_with_metadata.assert_called_once() -def test_export_deployment_statefile_rest_bad_request( - request_type=config.ExportDeploymentStatefileRequest, -): +def test_get_preview_rest_bad_request(request_type=config.GetPreviewRequest): client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2/deployments/sample3"} + request_init = {"name": "projects/sample1/locations/sample2/previews/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -21298,30 +28340,42 @@ def test_export_deployment_statefile_rest_bad_request( response_value.request = mock.Mock() req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.export_deployment_statefile(request) + client.get_preview(request) @pytest.mark.parametrize( "request_type", [ - config.ExportDeploymentStatefileRequest, + config.GetPreviewRequest, dict, ], ) -def test_export_deployment_statefile_rest_call_success(request_type): +def test_get_preview_rest_call_success(request_type): client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2/deployments/sample3"} + request_init = {"name": "projects/sample1/locations/sample2/previews/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = config.Statefile( - signed_uri="signed_uri_value", + return_value = config.Preview( + name="name_value", + state=config.Preview.State.CREATING, + deployment="deployment_value", + preview_mode=config.Preview.PreviewMode.DEFAULT, + service_account="service_account_value", + artifacts_gcs_bucket="artifacts_gcs_bucket_value", + worker_pool="worker_pool_value", + error_code=config.Preview.ErrorCode.CLOUD_BUILD_PERMISSION_DENIED, + build="build_value", + error_logs="error_logs_value", + logs="logs_value", + tf_version="tf_version_value", + tf_version_constraint="tf_version_constraint_value", ) # Wrap the value into a proper Response obj @@ -21329,20 +28383,32 @@ def test_export_deployment_statefile_rest_call_success(request_type): response_value.status_code = 200 # Convert return value to protobuf type - return_value = config.Statefile.pb(return_value) + return_value = config.Preview.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.export_deployment_statefile(request) + response = client.get_preview(request) # Establish that the response is the type that we expect. - assert isinstance(response, config.Statefile) - assert response.signed_uri == "signed_uri_value" + assert isinstance(response, config.Preview) + assert response.name == "name_value" + assert response.state == config.Preview.State.CREATING + assert response.deployment == "deployment_value" + assert response.preview_mode == config.Preview.PreviewMode.DEFAULT + assert response.service_account == "service_account_value" + assert response.artifacts_gcs_bucket == "artifacts_gcs_bucket_value" + assert response.worker_pool == "worker_pool_value" + assert response.error_code == config.Preview.ErrorCode.CLOUD_BUILD_PERMISSION_DENIED + assert response.build == "build_value" + assert response.error_logs == "error_logs_value" + assert response.logs == "logs_value" + assert response.tf_version == "tf_version_value" + assert response.tf_version_constraint == "tf_version_constraint_value" @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_export_deployment_statefile_rest_interceptors(null_interceptor): +def test_get_preview_rest_interceptors(null_interceptor): transport = transports.ConfigRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None if null_interceptor else transports.ConfigRestInterceptor(), @@ -21352,23 +28418,16 @@ def test_export_deployment_statefile_rest_interceptors(null_interceptor): with ( mock.patch.object(type(client.transport._session), "request") as req, mock.patch.object(path_template, "transcode") as transcode, + mock.patch.object(transports.ConfigRestInterceptor, "post_get_preview") as post, mock.patch.object( - transports.ConfigRestInterceptor, "post_export_deployment_statefile" - ) as post, - mock.patch.object( - transports.ConfigRestInterceptor, - "post_export_deployment_statefile_with_metadata", + transports.ConfigRestInterceptor, "post_get_preview_with_metadata" ) as post_with_metadata, - mock.patch.object( - transports.ConfigRestInterceptor, "pre_export_deployment_statefile" - ) as pre, + mock.patch.object(transports.ConfigRestInterceptor, "pre_get_preview") as pre, ): pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() - pb_message = config.ExportDeploymentStatefileRequest.pb( - config.ExportDeploymentStatefileRequest() - ) + pb_message = config.GetPreviewRequest.pb(config.GetPreviewRequest()) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -21379,19 +28438,19 @@ def test_export_deployment_statefile_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = config.Statefile.to_json(config.Statefile()) + return_value = config.Preview.to_json(config.Preview()) req.return_value.content = return_value - request = config.ExportDeploymentStatefileRequest() + request = config.GetPreviewRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = config.Statefile() - post_with_metadata.return_value = config.Statefile(), metadata + post.return_value = config.Preview() + post_with_metadata.return_value = config.Preview(), metadata - client.export_deployment_statefile( + client.get_preview( request, metadata=[ ("key", "val"), @@ -21404,16 +28463,12 @@ def test_export_deployment_statefile_rest_interceptors(null_interceptor): post_with_metadata.assert_called_once() -def test_export_revision_statefile_rest_bad_request( - request_type=config.ExportRevisionStatefileRequest, -): +def test_list_previews_rest_bad_request(request_type=config.ListPreviewsRequest): client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = { - "parent": "projects/sample1/locations/sample2/deployments/sample3/revisions/sample4" - } + request_init = {"parent": "projects/sample1/locations/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -21429,32 +28484,31 @@ def test_export_revision_statefile_rest_bad_request( response_value.request = mock.Mock() req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.export_revision_statefile(request) + client.list_previews(request) @pytest.mark.parametrize( "request_type", [ - config.ExportRevisionStatefileRequest, + config.ListPreviewsRequest, dict, ], ) -def test_export_revision_statefile_rest_call_success(request_type): +def test_list_previews_rest_call_success(request_type): client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = { - "parent": "projects/sample1/locations/sample2/deployments/sample3/revisions/sample4" - } + request_init = {"parent": "projects/sample1/locations/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = config.Statefile( - signed_uri="signed_uri_value", + return_value = config.ListPreviewsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], ) # Wrap the value into a proper Response obj @@ -21462,20 +28516,21 @@ def test_export_revision_statefile_rest_call_success(request_type): response_value.status_code = 200 # Convert return value to protobuf type - return_value = config.Statefile.pb(return_value) + return_value = config.ListPreviewsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.export_revision_statefile(request) + response = client.list_previews(request) # Establish that the response is the type that we expect. - assert isinstance(response, config.Statefile) - assert response.signed_uri == "signed_uri_value" + assert isinstance(response, pagers.ListPreviewsPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_export_revision_statefile_rest_interceptors(null_interceptor): +def test_list_previews_rest_interceptors(null_interceptor): transport = transports.ConfigRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None if null_interceptor else transports.ConfigRestInterceptor(), @@ -21486,22 +28541,17 @@ def test_export_revision_statefile_rest_interceptors(null_interceptor): mock.patch.object(type(client.transport._session), "request") as req, mock.patch.object(path_template, "transcode") as transcode, mock.patch.object( - transports.ConfigRestInterceptor, "post_export_revision_statefile" + transports.ConfigRestInterceptor, "post_list_previews" ) as post, mock.patch.object( - transports.ConfigRestInterceptor, - "post_export_revision_statefile_with_metadata", + transports.ConfigRestInterceptor, "post_list_previews_with_metadata" ) as post_with_metadata, - mock.patch.object( - transports.ConfigRestInterceptor, "pre_export_revision_statefile" - ) as pre, + mock.patch.object(transports.ConfigRestInterceptor, "pre_list_previews") as pre, ): pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() - pb_message = config.ExportRevisionStatefileRequest.pb( - config.ExportRevisionStatefileRequest() - ) + pb_message = config.ListPreviewsRequest.pb(config.ListPreviewsRequest()) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -21512,19 +28562,21 @@ def test_export_revision_statefile_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = config.Statefile.to_json(config.Statefile()) + return_value = config.ListPreviewsResponse.to_json( + config.ListPreviewsResponse() + ) req.return_value.content = return_value - request = config.ExportRevisionStatefileRequest() + request = config.ListPreviewsRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = config.Statefile() - post_with_metadata.return_value = config.Statefile(), metadata + post.return_value = config.ListPreviewsResponse() + post_with_metadata.return_value = config.ListPreviewsResponse(), metadata - client.export_revision_statefile( + client.list_previews( request, metadata=[ ("key", "val"), @@ -21537,12 +28589,12 @@ def test_export_revision_statefile_rest_interceptors(null_interceptor): post_with_metadata.assert_called_once() -def test_import_statefile_rest_bad_request(request_type=config.ImportStatefileRequest): +def test_delete_preview_rest_bad_request(request_type=config.DeletePreviewRequest): client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2/deployments/sample3"} + request_init = {"name": "projects/sample1/locations/sample2/previews/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -21558,51 +28610,45 @@ def test_import_statefile_rest_bad_request(request_type=config.ImportStatefileRe response_value.request = mock.Mock() req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.import_statefile(request) + client.delete_preview(request) @pytest.mark.parametrize( "request_type", [ - config.ImportStatefileRequest, + config.DeletePreviewRequest, dict, ], ) -def test_import_statefile_rest_call_success(request_type): +def test_delete_preview_rest_call_success(request_type): client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2/deployments/sample3"} + request_init = {"name": "projects/sample1/locations/sample2/previews/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = config.Statefile( - signed_uri="signed_uri_value", - ) + return_value = operations_pb2.Operation(name="operations/spam") # Wrap the value into a proper Response obj response_value = mock.Mock() response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = config.Statefile.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.import_statefile(request) + response = client.delete_preview(request) # Establish that the response is the type that we expect. - assert isinstance(response, config.Statefile) - assert response.signed_uri == "signed_uri_value" + json_return_value = json_format.MessageToJson(return_value) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_import_statefile_rest_interceptors(null_interceptor): +def test_delete_preview_rest_interceptors(null_interceptor): transport = transports.ConfigRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None if null_interceptor else transports.ConfigRestInterceptor(), @@ -21612,20 +28658,21 @@ def test_import_statefile_rest_interceptors(null_interceptor): with ( mock.patch.object(type(client.transport._session), "request") as req, mock.patch.object(path_template, "transcode") as transcode, + mock.patch.object(operation.Operation, "_set_result_from_operation"), mock.patch.object( - transports.ConfigRestInterceptor, "post_import_statefile" + transports.ConfigRestInterceptor, "post_delete_preview" ) as post, mock.patch.object( - transports.ConfigRestInterceptor, "post_import_statefile_with_metadata" + transports.ConfigRestInterceptor, "post_delete_preview_with_metadata" ) as post_with_metadata, mock.patch.object( - transports.ConfigRestInterceptor, "pre_import_statefile" + transports.ConfigRestInterceptor, "pre_delete_preview" ) as pre, ): pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() - pb_message = config.ImportStatefileRequest.pb(config.ImportStatefileRequest()) + pb_message = config.DeletePreviewRequest.pb(config.DeletePreviewRequest()) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -21636,19 +28683,19 @@ def test_import_statefile_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = config.Statefile.to_json(config.Statefile()) + return_value = json_format.MessageToJson(operations_pb2.Operation()) req.return_value.content = return_value - request = config.ImportStatefileRequest() + request = config.DeletePreviewRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = config.Statefile() - post_with_metadata.return_value = config.Statefile(), metadata + post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata - client.import_statefile( + client.delete_preview( request, metadata=[ ("key", "val"), @@ -21661,12 +28708,14 @@ def test_import_statefile_rest_interceptors(null_interceptor): post_with_metadata.assert_called_once() -def test_delete_statefile_rest_bad_request(request_type=config.DeleteStatefileRequest): +def test_export_preview_result_rest_bad_request( + request_type=config.ExportPreviewResultRequest, +): client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/deployments/sample3"} + request_init = {"parent": "projects/sample1/locations/sample2/previews/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -21682,45 +28731,48 @@ def test_delete_statefile_rest_bad_request(request_type=config.DeleteStatefileRe response_value.request = mock.Mock() req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.delete_statefile(request) + client.export_preview_result(request) @pytest.mark.parametrize( "request_type", [ - config.DeleteStatefileRequest, + config.ExportPreviewResultRequest, dict, ], ) -def test_delete_statefile_rest_call_success(request_type): +def test_export_preview_result_rest_call_success(request_type): client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/deployments/sample3"} + request_init = {"parent": "projects/sample1/locations/sample2/previews/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = None + return_value = config.ExportPreviewResultResponse() # Wrap the value into a proper Response obj response_value = mock.Mock() response_value.status_code = 200 - json_return_value = "" + + # Convert return value to protobuf type + return_value = config.ExportPreviewResultResponse.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.delete_statefile(request) + response = client.export_preview_result(request) # Establish that the response is the type that we expect. - assert response is None + assert isinstance(response, config.ExportPreviewResultResponse) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_statefile_rest_interceptors(null_interceptor): +def test_export_preview_result_rest_interceptors(null_interceptor): transport = transports.ConfigRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None if null_interceptor else transports.ConfigRestInterceptor(), @@ -21731,11 +28783,21 @@ def test_delete_statefile_rest_interceptors(null_interceptor): mock.patch.object(type(client.transport._session), "request") as req, mock.patch.object(path_template, "transcode") as transcode, mock.patch.object( - transports.ConfigRestInterceptor, "pre_delete_statefile" + transports.ConfigRestInterceptor, "post_export_preview_result" + ) as post, + mock.patch.object( + transports.ConfigRestInterceptor, "post_export_preview_result_with_metadata" + ) as post_with_metadata, + mock.patch.object( + transports.ConfigRestInterceptor, "pre_export_preview_result" ) as pre, ): pre.assert_not_called() - pb_message = config.DeleteStatefileRequest.pb(config.DeleteStatefileRequest()) + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = config.ExportPreviewResultRequest.pb( + config.ExportPreviewResultRequest() + ) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -21746,15 +28808,21 @@ def test_delete_statefile_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = config.ExportPreviewResultResponse.to_json( + config.ExportPreviewResultResponse() + ) + req.return_value.content = return_value - request = config.DeleteStatefileRequest() + request = config.ExportPreviewResultRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata + post.return_value = config.ExportPreviewResultResponse() + post_with_metadata.return_value = config.ExportPreviewResultResponse(), metadata - client.delete_statefile( + client.export_preview_result( request, metadata=[ ("key", "val"), @@ -21763,14 +28831,18 @@ def test_delete_statefile_rest_interceptors(null_interceptor): ) pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() -def test_lock_deployment_rest_bad_request(request_type=config.LockDeploymentRequest): +def test_list_terraform_versions_rest_bad_request( + request_type=config.ListTerraformVersionsRequest, +): client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/deployments/sample3"} + request_init = {"parent": "projects/sample1/locations/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -21786,45 +28858,53 @@ def test_lock_deployment_rest_bad_request(request_type=config.LockDeploymentRequ response_value.request = mock.Mock() req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.lock_deployment(request) + client.list_terraform_versions(request) @pytest.mark.parametrize( "request_type", [ - config.LockDeploymentRequest, + config.ListTerraformVersionsRequest, dict, ], ) -def test_lock_deployment_rest_call_success(request_type): +def test_list_terraform_versions_rest_call_success(request_type): client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/deployments/sample3"} + request_init = {"parent": "projects/sample1/locations/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = config.ListTerraformVersionsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) # Wrap the value into a proper Response obj response_value = mock.Mock() response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = config.ListTerraformVersionsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.lock_deployment(request) + response = client.list_terraform_versions(request) # Establish that the response is the type that we expect. - json_return_value = json_format.MessageToJson(return_value) + assert isinstance(response, pagers.ListTerraformVersionsPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_lock_deployment_rest_interceptors(null_interceptor): +def test_list_terraform_versions_rest_interceptors(null_interceptor): transport = transports.ConfigRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None if null_interceptor else transports.ConfigRestInterceptor(), @@ -21834,21 +28914,23 @@ def test_lock_deployment_rest_interceptors(null_interceptor): with ( mock.patch.object(type(client.transport._session), "request") as req, mock.patch.object(path_template, "transcode") as transcode, - mock.patch.object(operation.Operation, "_set_result_from_operation"), mock.patch.object( - transports.ConfigRestInterceptor, "post_lock_deployment" + transports.ConfigRestInterceptor, "post_list_terraform_versions" ) as post, mock.patch.object( - transports.ConfigRestInterceptor, "post_lock_deployment_with_metadata" + transports.ConfigRestInterceptor, + "post_list_terraform_versions_with_metadata", ) as post_with_metadata, mock.patch.object( - transports.ConfigRestInterceptor, "pre_lock_deployment" + transports.ConfigRestInterceptor, "pre_list_terraform_versions" ) as pre, ): pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() - pb_message = config.LockDeploymentRequest.pb(config.LockDeploymentRequest()) + pb_message = config.ListTerraformVersionsRequest.pb( + config.ListTerraformVersionsRequest() + ) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -21859,19 +28941,24 @@ def test_lock_deployment_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = json_format.MessageToJson(operations_pb2.Operation()) + return_value = config.ListTerraformVersionsResponse.to_json( + config.ListTerraformVersionsResponse() + ) req.return_value.content = return_value - request = config.LockDeploymentRequest() + request = config.ListTerraformVersionsRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() - post_with_metadata.return_value = operations_pb2.Operation(), metadata + post.return_value = config.ListTerraformVersionsResponse() + post_with_metadata.return_value = ( + config.ListTerraformVersionsResponse(), + metadata, + ) - client.lock_deployment( + client.list_terraform_versions( request, metadata=[ ("key", "val"), @@ -21884,14 +28971,16 @@ def test_lock_deployment_rest_interceptors(null_interceptor): post_with_metadata.assert_called_once() -def test_unlock_deployment_rest_bad_request( - request_type=config.UnlockDeploymentRequest, +def test_get_terraform_version_rest_bad_request( + request_type=config.GetTerraformVersionRequest, ): client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/deployments/sample3"} + request_init = { + "name": "projects/sample1/locations/sample2/terraformVersions/sample3" + } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -21907,45 +28996,55 @@ def test_unlock_deployment_rest_bad_request( response_value.request = mock.Mock() req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.unlock_deployment(request) + client.get_terraform_version(request) @pytest.mark.parametrize( "request_type", [ - config.UnlockDeploymentRequest, + config.GetTerraformVersionRequest, dict, ], ) -def test_unlock_deployment_rest_call_success(request_type): +def test_get_terraform_version_rest_call_success(request_type): client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/deployments/sample3"} + request_init = { + "name": "projects/sample1/locations/sample2/terraformVersions/sample3" + } request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = config.TerraformVersion( + name="name_value", + state=config.TerraformVersion.State.ACTIVE, + ) # Wrap the value into a proper Response obj response_value = mock.Mock() response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = config.TerraformVersion.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.unlock_deployment(request) + response = client.get_terraform_version(request) # Establish that the response is the type that we expect. - json_return_value = json_format.MessageToJson(return_value) + assert isinstance(response, config.TerraformVersion) + assert response.name == "name_value" + assert response.state == config.TerraformVersion.State.ACTIVE @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_unlock_deployment_rest_interceptors(null_interceptor): +def test_get_terraform_version_rest_interceptors(null_interceptor): transport = transports.ConfigRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None if null_interceptor else transports.ConfigRestInterceptor(), @@ -21955,21 +29054,22 @@ def test_unlock_deployment_rest_interceptors(null_interceptor): with ( mock.patch.object(type(client.transport._session), "request") as req, mock.patch.object(path_template, "transcode") as transcode, - mock.patch.object(operation.Operation, "_set_result_from_operation"), mock.patch.object( - transports.ConfigRestInterceptor, "post_unlock_deployment" + transports.ConfigRestInterceptor, "post_get_terraform_version" ) as post, mock.patch.object( - transports.ConfigRestInterceptor, "post_unlock_deployment_with_metadata" + transports.ConfigRestInterceptor, "post_get_terraform_version_with_metadata" ) as post_with_metadata, mock.patch.object( - transports.ConfigRestInterceptor, "pre_unlock_deployment" + transports.ConfigRestInterceptor, "pre_get_terraform_version" ) as pre, ): pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() - pb_message = config.UnlockDeploymentRequest.pb(config.UnlockDeploymentRequest()) + pb_message = config.GetTerraformVersionRequest.pb( + config.GetTerraformVersionRequest() + ) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -21980,19 +29080,19 @@ def test_unlock_deployment_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = json_format.MessageToJson(operations_pb2.Operation()) + return_value = config.TerraformVersion.to_json(config.TerraformVersion()) req.return_value.content = return_value - request = config.UnlockDeploymentRequest() + request = config.GetTerraformVersionRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() - post_with_metadata.return_value = operations_pb2.Operation(), metadata + post.return_value = config.TerraformVersion() + post_with_metadata.return_value = config.TerraformVersion(), metadata - client.unlock_deployment( + client.get_terraform_version( request, metadata=[ ("key", "val"), @@ -22005,12 +29105,14 @@ def test_unlock_deployment_rest_interceptors(null_interceptor): post_with_metadata.assert_called_once() -def test_export_lock_info_rest_bad_request(request_type=config.ExportLockInfoRequest): +def test_list_resource_changes_rest_bad_request( + request_type=config.ListResourceChangesRequest, +): client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/deployments/sample3"} + request_init = {"parent": "projects/sample1/locations/sample2/previews/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -22026,34 +29128,31 @@ def test_export_lock_info_rest_bad_request(request_type=config.ExportLockInfoReq response_value.request = mock.Mock() req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.export_lock_info(request) + client.list_resource_changes(request) @pytest.mark.parametrize( "request_type", [ - config.ExportLockInfoRequest, + config.ListResourceChangesRequest, dict, ], ) -def test_export_lock_info_rest_call_success(request_type): +def test_list_resource_changes_rest_call_success(request_type): client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/deployments/sample3"} + request_init = {"parent": "projects/sample1/locations/sample2/previews/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = config.LockInfo( - lock_id=725, - operation="operation_value", - info="info_value", - who="who_value", - version="version_value", + return_value = config.ListResourceChangesResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], ) # Wrap the value into a proper Response obj @@ -22061,24 +29160,21 @@ def test_export_lock_info_rest_call_success(request_type): response_value.status_code = 200 # Convert return value to protobuf type - return_value = config.LockInfo.pb(return_value) + return_value = config.ListResourceChangesResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.export_lock_info(request) + response = client.list_resource_changes(request) # Establish that the response is the type that we expect. - assert isinstance(response, config.LockInfo) - assert response.lock_id == 725 - assert response.operation == "operation_value" - assert response.info == "info_value" - assert response.who == "who_value" - assert response.version == "version_value" + assert isinstance(response, pagers.ListResourceChangesPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_export_lock_info_rest_interceptors(null_interceptor): +def test_list_resource_changes_rest_interceptors(null_interceptor): transport = transports.ConfigRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None if null_interceptor else transports.ConfigRestInterceptor(), @@ -22089,19 +29185,21 @@ def test_export_lock_info_rest_interceptors(null_interceptor): mock.patch.object(type(client.transport._session), "request") as req, mock.patch.object(path_template, "transcode") as transcode, mock.patch.object( - transports.ConfigRestInterceptor, "post_export_lock_info" + transports.ConfigRestInterceptor, "post_list_resource_changes" ) as post, mock.patch.object( - transports.ConfigRestInterceptor, "post_export_lock_info_with_metadata" + transports.ConfigRestInterceptor, "post_list_resource_changes_with_metadata" ) as post_with_metadata, mock.patch.object( - transports.ConfigRestInterceptor, "pre_export_lock_info" + transports.ConfigRestInterceptor, "pre_list_resource_changes" ) as pre, ): pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() - pb_message = config.ExportLockInfoRequest.pb(config.ExportLockInfoRequest()) + pb_message = config.ListResourceChangesRequest.pb( + config.ListResourceChangesRequest() + ) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -22112,19 +29210,21 @@ def test_export_lock_info_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = config.LockInfo.to_json(config.LockInfo()) + return_value = config.ListResourceChangesResponse.to_json( + config.ListResourceChangesResponse() + ) req.return_value.content = return_value - request = config.ExportLockInfoRequest() + request = config.ListResourceChangesRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = config.LockInfo() - post_with_metadata.return_value = config.LockInfo(), metadata + post.return_value = config.ListResourceChangesResponse() + post_with_metadata.return_value = config.ListResourceChangesResponse(), metadata - client.export_lock_info( + client.list_resource_changes( request, metadata=[ ("key", "val"), @@ -22137,12 +29237,16 @@ def test_export_lock_info_rest_interceptors(null_interceptor): post_with_metadata.assert_called_once() -def test_create_preview_rest_bad_request(request_type=config.CreatePreviewRequest): +def test_get_resource_change_rest_bad_request( + request_type=config.GetResourceChangeRequest, +): client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} + request_init = { + "name": "projects/sample1/locations/sample2/previews/sample3/resourceChanges/sample4" + } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -22158,162 +29262,185 @@ def test_create_preview_rest_bad_request(request_type=config.CreatePreviewReques response_value.request = mock.Mock() req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.create_preview(request) + client.get_resource_change(request) @pytest.mark.parametrize( "request_type", [ - config.CreatePreviewRequest, + config.GetResourceChangeRequest, dict, ], ) -def test_create_preview_rest_call_success(request_type): +def test_get_resource_change_rest_call_success(request_type): client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} - request_init["preview"] = { - "terraform_blueprint": { - "gcs_source": "gcs_source_value", - "git_source": { - "repo": "repo_value", - "directory": "directory_value", - "ref": "ref_value", - }, - "input_values": {}, - }, - "name": "name_value", - "create_time": {"seconds": 751, "nanos": 543}, - "labels": {}, - "state": 1, - "deployment": "deployment_value", - "preview_mode": 1, - "service_account": "service_account_value", - "artifacts_gcs_bucket": "artifacts_gcs_bucket_value", - "worker_pool": "worker_pool_value", - "error_code": 1, - "error_status": { - "code": 411, - "message": "message_value", - "details": [ - { - "type_url": "type.googleapis.com/google.protobuf.Duration", - "value": b"\x08\x0c\x10\xdb\x07", - } - ], - }, - "build": "build_value", - "tf_errors": [ - { - "resource_address": "resource_address_value", - "http_response_code": 1928, - "error_description": "error_description_value", - "error": {}, - } - ], - "error_logs": "error_logs_value", - "preview_artifacts": { - "content": "content_value", - "artifacts": "artifacts_value", - }, - "logs": "logs_value", - "tf_version": "tf_version_value", - "tf_version_constraint": "tf_version_constraint_value", - "annotations": {}, - "provider_config": {"source_type": 1}, + request_init = { + "name": "projects/sample1/locations/sample2/previews/sample3/resourceChanges/sample4" } - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = config.ResourceChange( + name="name_value", + intent=config.ResourceChange.Intent.CREATE, + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = config.ResourceChange.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.get_resource_change(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, config.ResourceChange) + assert response.name == "name_value" + assert response.intent == config.ResourceChange.Intent.CREATE + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_resource_change_rest_interceptors(null_interceptor): + transport = transports.ConfigRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None if null_interceptor else transports.ConfigRestInterceptor(), + ) + client = ConfigClient(transport=transport) + + with ( + mock.patch.object(type(client.transport._session), "request") as req, + mock.patch.object(path_template, "transcode") as transcode, + mock.patch.object( + transports.ConfigRestInterceptor, "post_get_resource_change" + ) as post, + mock.patch.object( + transports.ConfigRestInterceptor, "post_get_resource_change_with_metadata" + ) as post_with_metadata, + mock.patch.object( + transports.ConfigRestInterceptor, "pre_get_resource_change" + ) as pre, + ): + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = config.GetResourceChangeRequest.pb( + config.GetResourceChangeRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } - # Determine if the message type is proto-plus or protobuf - test_field = config.CreatePreviewRequest.meta.fields["preview"] + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = config.ResourceChange.to_json(config.ResourceChange()) + req.return_value.content = return_value - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] + request = config.GetResourceChangeRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = config.ResourceChange() + post_with_metadata.return_value = config.ResourceChange(), metadata - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + client.get_resource_change( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - subfields_not_in_runtime = [] +def test_list_resource_drifts_rest_bad_request( + request_type=config.ListResourceDriftsRequest, +): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2/previews/sample3"} + request = request_type(**request_init) - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["preview"].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value + # Mock the http request call within the method and fake a BadRequest error. + with ( + mock.patch.object(Session, "request") as req, + pytest.raises(core_exceptions.BadRequest), + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.list_resource_drifts(request) - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - { - "field": field, - "subfield": subfield, - "is_repeated": is_repeated, - } - ) - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["preview"][field])): - del request_init["preview"][field][i][subfield] - else: - del request_init["preview"][field][subfield] +@pytest.mark.parametrize( + "request_type", + [ + config.ListResourceDriftsRequest, + dict, + ], +) +def test_list_resource_drifts_rest_call_success(request_type): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2/previews/sample3"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = config.ListResourceDriftsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) # Wrap the value into a proper Response obj response_value = mock.Mock() response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = config.ListResourceDriftsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.create_preview(request) + response = client.list_resource_drifts(request) # Establish that the response is the type that we expect. - json_return_value = json_format.MessageToJson(return_value) + assert isinstance(response, pagers.ListResourceDriftsPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_create_preview_rest_interceptors(null_interceptor): +def test_list_resource_drifts_rest_interceptors(null_interceptor): transport = transports.ConfigRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None if null_interceptor else transports.ConfigRestInterceptor(), @@ -22323,21 +29450,22 @@ def test_create_preview_rest_interceptors(null_interceptor): with ( mock.patch.object(type(client.transport._session), "request") as req, mock.patch.object(path_template, "transcode") as transcode, - mock.patch.object(operation.Operation, "_set_result_from_operation"), mock.patch.object( - transports.ConfigRestInterceptor, "post_create_preview" + transports.ConfigRestInterceptor, "post_list_resource_drifts" ) as post, mock.patch.object( - transports.ConfigRestInterceptor, "post_create_preview_with_metadata" + transports.ConfigRestInterceptor, "post_list_resource_drifts_with_metadata" ) as post_with_metadata, mock.patch.object( - transports.ConfigRestInterceptor, "pre_create_preview" + transports.ConfigRestInterceptor, "pre_list_resource_drifts" ) as pre, ): pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() - pb_message = config.CreatePreviewRequest.pb(config.CreatePreviewRequest()) + pb_message = config.ListResourceDriftsRequest.pb( + config.ListResourceDriftsRequest() + ) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -22348,19 +29476,21 @@ def test_create_preview_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = json_format.MessageToJson(operations_pb2.Operation()) + return_value = config.ListResourceDriftsResponse.to_json( + config.ListResourceDriftsResponse() + ) req.return_value.content = return_value - request = config.CreatePreviewRequest() + request = config.ListResourceDriftsRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() - post_with_metadata.return_value = operations_pb2.Operation(), metadata + post.return_value = config.ListResourceDriftsResponse() + post_with_metadata.return_value = config.ListResourceDriftsResponse(), metadata - client.create_preview( + client.list_resource_drifts( request, metadata=[ ("key", "val"), @@ -22373,12 +29503,16 @@ def test_create_preview_rest_interceptors(null_interceptor): post_with_metadata.assert_called_once() -def test_get_preview_rest_bad_request(request_type=config.GetPreviewRequest): +def test_get_resource_drift_rest_bad_request( + request_type=config.GetResourceDriftRequest, +): client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/previews/sample3"} + request_init = { + "name": "projects/sample1/locations/sample2/previews/sample3/resourceDrifts/sample4" + } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -22394,42 +29528,32 @@ def test_get_preview_rest_bad_request(request_type=config.GetPreviewRequest): response_value.request = mock.Mock() req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.get_preview(request) + client.get_resource_drift(request) @pytest.mark.parametrize( "request_type", [ - config.GetPreviewRequest, + config.GetResourceDriftRequest, dict, ], ) -def test_get_preview_rest_call_success(request_type): +def test_get_resource_drift_rest_call_success(request_type): client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/previews/sample3"} + request_init = { + "name": "projects/sample1/locations/sample2/previews/sample3/resourceDrifts/sample4" + } request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = config.Preview( + return_value = config.ResourceDrift( name="name_value", - state=config.Preview.State.CREATING, - deployment="deployment_value", - preview_mode=config.Preview.PreviewMode.DEFAULT, - service_account="service_account_value", - artifacts_gcs_bucket="artifacts_gcs_bucket_value", - worker_pool="worker_pool_value", - error_code=config.Preview.ErrorCode.CLOUD_BUILD_PERMISSION_DENIED, - build="build_value", - error_logs="error_logs_value", - logs="logs_value", - tf_version="tf_version_value", - tf_version_constraint="tf_version_constraint_value", ) # Wrap the value into a proper Response obj @@ -22437,32 +29561,20 @@ def test_get_preview_rest_call_success(request_type): response_value.status_code = 200 # Convert return value to protobuf type - return_value = config.Preview.pb(return_value) + return_value = config.ResourceDrift.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.get_preview(request) + response = client.get_resource_drift(request) # Establish that the response is the type that we expect. - assert isinstance(response, config.Preview) + assert isinstance(response, config.ResourceDrift) assert response.name == "name_value" - assert response.state == config.Preview.State.CREATING - assert response.deployment == "deployment_value" - assert response.preview_mode == config.Preview.PreviewMode.DEFAULT - assert response.service_account == "service_account_value" - assert response.artifacts_gcs_bucket == "artifacts_gcs_bucket_value" - assert response.worker_pool == "worker_pool_value" - assert response.error_code == config.Preview.ErrorCode.CLOUD_BUILD_PERMISSION_DENIED - assert response.build == "build_value" - assert response.error_logs == "error_logs_value" - assert response.logs == "logs_value" - assert response.tf_version == "tf_version_value" - assert response.tf_version_constraint == "tf_version_constraint_value" @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_preview_rest_interceptors(null_interceptor): +def test_get_resource_drift_rest_interceptors(null_interceptor): transport = transports.ConfigRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None if null_interceptor else transports.ConfigRestInterceptor(), @@ -22472,16 +29584,20 @@ def test_get_preview_rest_interceptors(null_interceptor): with ( mock.patch.object(type(client.transport._session), "request") as req, mock.patch.object(path_template, "transcode") as transcode, - mock.patch.object(transports.ConfigRestInterceptor, "post_get_preview") as post, mock.patch.object( - transports.ConfigRestInterceptor, "post_get_preview_with_metadata" + transports.ConfigRestInterceptor, "post_get_resource_drift" + ) as post, + mock.patch.object( + transports.ConfigRestInterceptor, "post_get_resource_drift_with_metadata" ) as post_with_metadata, - mock.patch.object(transports.ConfigRestInterceptor, "pre_get_preview") as pre, + mock.patch.object( + transports.ConfigRestInterceptor, "pre_get_resource_drift" + ) as pre, ): pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() - pb_message = config.GetPreviewRequest.pb(config.GetPreviewRequest()) + pb_message = config.GetResourceDriftRequest.pb(config.GetResourceDriftRequest()) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -22492,19 +29608,19 @@ def test_get_preview_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = config.Preview.to_json(config.Preview()) + return_value = config.ResourceDrift.to_json(config.ResourceDrift()) req.return_value.content = return_value - request = config.GetPreviewRequest() + request = config.GetResourceDriftRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = config.Preview() - post_with_metadata.return_value = config.Preview(), metadata + post.return_value = config.ResourceDrift() + post_with_metadata.return_value = config.ResourceDrift(), metadata - client.get_preview( + client.get_resource_drift( request, metadata=[ ("key", "val"), @@ -22517,12 +29633,14 @@ def test_get_preview_rest_interceptors(null_interceptor): post_with_metadata.assert_called_once() -def test_list_previews_rest_bad_request(request_type=config.ListPreviewsRequest): +def test_get_auto_migration_config_rest_bad_request( + request_type=config.GetAutoMigrationConfigRequest, +): client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} + request_init = {"name": "projects/sample1/locations/sample2/autoMigrationConfig"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -22538,31 +29656,31 @@ def test_list_previews_rest_bad_request(request_type=config.ListPreviewsRequest) response_value.request = mock.Mock() req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.list_previews(request) + client.get_auto_migration_config(request) @pytest.mark.parametrize( "request_type", [ - config.ListPreviewsRequest, + config.GetAutoMigrationConfigRequest, dict, ], ) -def test_list_previews_rest_call_success(request_type): +def test_get_auto_migration_config_rest_call_success(request_type): client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} + request_init = {"name": "projects/sample1/locations/sample2/autoMigrationConfig"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = config.ListPreviewsResponse( - next_page_token="next_page_token_value", - unreachable=["unreachable_value"], + return_value = config.AutoMigrationConfig( + name="name_value", + auto_migration_enabled=True, ) # Wrap the value into a proper Response obj @@ -22570,21 +29688,21 @@ def test_list_previews_rest_call_success(request_type): response_value.status_code = 200 # Convert return value to protobuf type - return_value = config.ListPreviewsResponse.pb(return_value) + return_value = config.AutoMigrationConfig.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.list_previews(request) + response = client.get_auto_migration_config(request) # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListPreviewsPager) - assert response.next_page_token == "next_page_token_value" - assert response.unreachable == ["unreachable_value"] + assert isinstance(response, config.AutoMigrationConfig) + assert response.name == "name_value" + assert response.auto_migration_enabled is True @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_previews_rest_interceptors(null_interceptor): +def test_get_auto_migration_config_rest_interceptors(null_interceptor): transport = transports.ConfigRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None if null_interceptor else transports.ConfigRestInterceptor(), @@ -22595,17 +29713,22 @@ def test_list_previews_rest_interceptors(null_interceptor): mock.patch.object(type(client.transport._session), "request") as req, mock.patch.object(path_template, "transcode") as transcode, mock.patch.object( - transports.ConfigRestInterceptor, "post_list_previews" + transports.ConfigRestInterceptor, "post_get_auto_migration_config" ) as post, mock.patch.object( - transports.ConfigRestInterceptor, "post_list_previews_with_metadata" + transports.ConfigRestInterceptor, + "post_get_auto_migration_config_with_metadata", ) as post_with_metadata, - mock.patch.object(transports.ConfigRestInterceptor, "pre_list_previews") as pre, + mock.patch.object( + transports.ConfigRestInterceptor, "pre_get_auto_migration_config" + ) as pre, ): pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() - pb_message = config.ListPreviewsRequest.pb(config.ListPreviewsRequest()) + pb_message = config.GetAutoMigrationConfigRequest.pb( + config.GetAutoMigrationConfigRequest() + ) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -22616,21 +29739,19 @@ def test_list_previews_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = config.ListPreviewsResponse.to_json( - config.ListPreviewsResponse() - ) + return_value = config.AutoMigrationConfig.to_json(config.AutoMigrationConfig()) req.return_value.content = return_value - request = config.ListPreviewsRequest() + request = config.GetAutoMigrationConfigRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = config.ListPreviewsResponse() - post_with_metadata.return_value = config.ListPreviewsResponse(), metadata + post.return_value = config.AutoMigrationConfig() + post_with_metadata.return_value = config.AutoMigrationConfig(), metadata - client.list_previews( + client.get_auto_migration_config( request, metadata=[ ("key", "val"), @@ -22643,12 +29764,18 @@ def test_list_previews_rest_interceptors(null_interceptor): post_with_metadata.assert_called_once() -def test_delete_preview_rest_bad_request(request_type=config.DeletePreviewRequest): +def test_update_auto_migration_config_rest_bad_request( + request_type=config.UpdateAutoMigrationConfigRequest, +): client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/previews/sample3"} + request_init = { + "auto_migration_config": { + "name": "projects/sample1/locations/sample2/autoMigrationConfig" + } + } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -22664,23 +29791,103 @@ def test_delete_preview_rest_bad_request(request_type=config.DeletePreviewReques response_value.request = mock.Mock() req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.delete_preview(request) + client.update_auto_migration_config(request) @pytest.mark.parametrize( "request_type", [ - config.DeletePreviewRequest, + config.UpdateAutoMigrationConfigRequest, dict, ], ) -def test_delete_preview_rest_call_success(request_type): +def test_update_auto_migration_config_rest_call_success(request_type): client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/previews/sample3"} + request_init = { + "auto_migration_config": { + "name": "projects/sample1/locations/sample2/autoMigrationConfig" + } + } + request_init["auto_migration_config"] = { + "name": "projects/sample1/locations/sample2/autoMigrationConfig", + "update_time": {"seconds": 751, "nanos": 543}, + "auto_migration_enabled": True, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = config.UpdateAutoMigrationConfigRequest.meta.fields[ + "auto_migration_config" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init[ + "auto_migration_config" + ].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["auto_migration_config"][field])): + del request_init["auto_migration_config"][field][i][subfield] + else: + del request_init["auto_migration_config"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. @@ -22695,14 +29902,14 @@ def test_delete_preview_rest_call_success(request_type): response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.delete_preview(request) + response = client.update_auto_migration_config(request) # Establish that the response is the type that we expect. json_return_value = json_format.MessageToJson(return_value) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_delete_preview_rest_interceptors(null_interceptor): +def test_update_auto_migration_config_rest_interceptors(null_interceptor): transport = transports.ConfigRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None if null_interceptor else transports.ConfigRestInterceptor(), @@ -22714,19 +29921,22 @@ def test_delete_preview_rest_interceptors(null_interceptor): mock.patch.object(path_template, "transcode") as transcode, mock.patch.object(operation.Operation, "_set_result_from_operation"), mock.patch.object( - transports.ConfigRestInterceptor, "post_delete_preview" + transports.ConfigRestInterceptor, "post_update_auto_migration_config" ) as post, mock.patch.object( - transports.ConfigRestInterceptor, "post_delete_preview_with_metadata" + transports.ConfigRestInterceptor, + "post_update_auto_migration_config_with_metadata", ) as post_with_metadata, mock.patch.object( - transports.ConfigRestInterceptor, "pre_delete_preview" + transports.ConfigRestInterceptor, "pre_update_auto_migration_config" ) as pre, ): pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() - pb_message = config.DeletePreviewRequest.pb(config.DeletePreviewRequest()) + pb_message = config.UpdateAutoMigrationConfigRequest.pb( + config.UpdateAutoMigrationConfigRequest() + ) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -22740,7 +29950,7 @@ def test_delete_preview_rest_interceptors(null_interceptor): return_value = json_format.MessageToJson(operations_pb2.Operation()) req.return_value.content = return_value - request = config.DeletePreviewRequest() + request = config.UpdateAutoMigrationConfigRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), @@ -22749,7 +29959,7 @@ def test_delete_preview_rest_interceptors(null_interceptor): post.return_value = operations_pb2.Operation() post_with_metadata.return_value = operations_pb2.Operation(), metadata - client.delete_preview( + client.update_auto_migration_config( request, metadata=[ ("key", "val"), @@ -22762,14 +29972,16 @@ def test_delete_preview_rest_interceptors(null_interceptor): post_with_metadata.assert_called_once() -def test_export_preview_result_rest_bad_request( - request_type=config.ExportPreviewResultRequest, +def test_get_deployment_group_rest_bad_request( + request_type=config.GetDeploymentGroupRequest, ): client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2/previews/sample3"} + request_init = { + "name": "projects/sample1/locations/sample2/deploymentGroups/sample3" + } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -22785,48 +29997,67 @@ def test_export_preview_result_rest_bad_request( response_value.request = mock.Mock() req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.export_preview_result(request) + client.get_deployment_group(request) @pytest.mark.parametrize( "request_type", [ - config.ExportPreviewResultRequest, + config.GetDeploymentGroupRequest, dict, ], ) -def test_export_preview_result_rest_call_success(request_type): +def test_get_deployment_group_rest_call_success(request_type): client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2/previews/sample3"} + request_init = { + "name": "projects/sample1/locations/sample2/deploymentGroups/sample3" + } request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = config.ExportPreviewResultResponse() + return_value = config.DeploymentGroup( + name="name_value", + state=config.DeploymentGroup.State.CREATING, + state_description="state_description_value", + provisioning_state=config.DeploymentGroup.ProvisioningState.PROVISIONING, + provisioning_state_description="provisioning_state_description_value", + ) # Wrap the value into a proper Response obj response_value = mock.Mock() response_value.status_code = 200 # Convert return value to protobuf type - return_value = config.ExportPreviewResultResponse.pb(return_value) + return_value = config.DeploymentGroup.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.export_preview_result(request) + response = client.get_deployment_group(request) # Establish that the response is the type that we expect. - assert isinstance(response, config.ExportPreviewResultResponse) + assert isinstance(response, config.DeploymentGroup) + assert response.name == "name_value" + assert response.state == config.DeploymentGroup.State.CREATING + assert response.state_description == "state_description_value" + assert ( + response.provisioning_state + == config.DeploymentGroup.ProvisioningState.PROVISIONING + ) + assert ( + response.provisioning_state_description + == "provisioning_state_description_value" + ) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_export_preview_result_rest_interceptors(null_interceptor): +def test_get_deployment_group_rest_interceptors(null_interceptor): transport = transports.ConfigRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None if null_interceptor else transports.ConfigRestInterceptor(), @@ -22837,20 +30068,20 @@ def test_export_preview_result_rest_interceptors(null_interceptor): mock.patch.object(type(client.transport._session), "request") as req, mock.patch.object(path_template, "transcode") as transcode, mock.patch.object( - transports.ConfigRestInterceptor, "post_export_preview_result" + transports.ConfigRestInterceptor, "post_get_deployment_group" ) as post, mock.patch.object( - transports.ConfigRestInterceptor, "post_export_preview_result_with_metadata" + transports.ConfigRestInterceptor, "post_get_deployment_group_with_metadata" ) as post_with_metadata, mock.patch.object( - transports.ConfigRestInterceptor, "pre_export_preview_result" + transports.ConfigRestInterceptor, "pre_get_deployment_group" ) as pre, ): pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() - pb_message = config.ExportPreviewResultRequest.pb( - config.ExportPreviewResultRequest() + pb_message = config.GetDeploymentGroupRequest.pb( + config.GetDeploymentGroupRequest() ) transcode.return_value = { "method": "post", @@ -22862,21 +30093,19 @@ def test_export_preview_result_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = config.ExportPreviewResultResponse.to_json( - config.ExportPreviewResultResponse() - ) + return_value = config.DeploymentGroup.to_json(config.DeploymentGroup()) req.return_value.content = return_value - request = config.ExportPreviewResultRequest() + request = config.GetDeploymentGroupRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = config.ExportPreviewResultResponse() - post_with_metadata.return_value = config.ExportPreviewResultResponse(), metadata + post.return_value = config.DeploymentGroup() + post_with_metadata.return_value = config.DeploymentGroup(), metadata - client.export_preview_result( + client.get_deployment_group( request, metadata=[ ("key", "val"), @@ -22889,76 +30118,163 @@ def test_export_preview_result_rest_interceptors(null_interceptor): post_with_metadata.assert_called_once() -def test_list_terraform_versions_rest_bad_request( - request_type=config.ListTerraformVersionsRequest, -): - client = ConfigClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} - request = request_type(**request_init) +def test_create_deployment_group_rest_bad_request( + request_type=config.CreateDeploymentGroupRequest, +): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with ( + mock.patch.object(Session, "request") as req, + pytest.raises(core_exceptions.BadRequest), + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.create_deployment_group(request) + + +@pytest.mark.parametrize( + "request_type", + [ + config.CreateDeploymentGroupRequest, + dict, + ], +) +def test_create_deployment_group_rest_call_success(request_type): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request_init["deployment_group"] = { + "name": "name_value", + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "labels": {}, + "annotations": {}, + "state": 1, + "state_description": "state_description_value", + "deployment_units": [ + { + "id": "id_value", + "deployment": "deployment_value", + "dependencies": ["dependencies_value1", "dependencies_value2"], + } + ], + "provisioning_state": 1, + "provisioning_state_description": "provisioning_state_description_value", + "provisioning_error": { + "code": 411, + "message": "message_value", + "details": [ + { + "type_url": "type.googleapis.com/google.protobuf.Duration", + "value": b"\x08\x0c\x10\xdb\x07", + } + ], + }, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = config.CreateDeploymentGroupRequest.meta.fields["deployment_group"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] - # Mock the http request call within the method and fake a BadRequest error. - with ( - mock.patch.object(Session, "request") as req, - pytest.raises(core_exceptions.BadRequest), - ): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = "" - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.list_terraform_versions(request) + subfields_not_in_runtime = [] + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["deployment_group"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value -@pytest.mark.parametrize( - "request_type", - [ - config.ListTerraformVersionsRequest, - dict, - ], -) -def test_list_terraform_versions_rest_call_success(request_type): - client = ConfigClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) - # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2"} + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["deployment_group"][field])): + del request_init["deployment_group"][field][i][subfield] + else: + del request_init["deployment_group"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = config.ListTerraformVersionsResponse( - next_page_token="next_page_token_value", - unreachable=["unreachable_value"], - ) + return_value = operations_pb2.Operation(name="operations/spam") # Wrap the value into a proper Response obj response_value = mock.Mock() response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = config.ListTerraformVersionsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.list_terraform_versions(request) + response = client.create_deployment_group(request) # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListTerraformVersionsPager) - assert response.next_page_token == "next_page_token_value" - assert response.unreachable == ["unreachable_value"] + json_return_value = json_format.MessageToJson(return_value) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_terraform_versions_rest_interceptors(null_interceptor): +def test_create_deployment_group_rest_interceptors(null_interceptor): transport = transports.ConfigRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None if null_interceptor else transports.ConfigRestInterceptor(), @@ -22968,22 +30284,23 @@ def test_list_terraform_versions_rest_interceptors(null_interceptor): with ( mock.patch.object(type(client.transport._session), "request") as req, mock.patch.object(path_template, "transcode") as transcode, + mock.patch.object(operation.Operation, "_set_result_from_operation"), mock.patch.object( - transports.ConfigRestInterceptor, "post_list_terraform_versions" + transports.ConfigRestInterceptor, "post_create_deployment_group" ) as post, mock.patch.object( transports.ConfigRestInterceptor, - "post_list_terraform_versions_with_metadata", + "post_create_deployment_group_with_metadata", ) as post_with_metadata, mock.patch.object( - transports.ConfigRestInterceptor, "pre_list_terraform_versions" + transports.ConfigRestInterceptor, "pre_create_deployment_group" ) as pre, ): pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() - pb_message = config.ListTerraformVersionsRequest.pb( - config.ListTerraformVersionsRequest() + pb_message = config.CreateDeploymentGroupRequest.pb( + config.CreateDeploymentGroupRequest() ) transcode.return_value = { "method": "post", @@ -22995,24 +30312,19 @@ def test_list_terraform_versions_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = config.ListTerraformVersionsResponse.to_json( - config.ListTerraformVersionsResponse() - ) + return_value = json_format.MessageToJson(operations_pb2.Operation()) req.return_value.content = return_value - request = config.ListTerraformVersionsRequest() + request = config.CreateDeploymentGroupRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = config.ListTerraformVersionsResponse() - post_with_metadata.return_value = ( - config.ListTerraformVersionsResponse(), - metadata, - ) + post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata - client.list_terraform_versions( + client.create_deployment_group( request, metadata=[ ("key", "val"), @@ -23025,15 +30337,17 @@ def test_list_terraform_versions_rest_interceptors(null_interceptor): post_with_metadata.assert_called_once() -def test_get_terraform_version_rest_bad_request( - request_type=config.GetTerraformVersionRequest, +def test_update_deployment_group_rest_bad_request( + request_type=config.UpdateDeploymentGroupRequest, ): client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding request_init = { - "name": "projects/sample1/locations/sample2/terraformVersions/sample3" + "deployment_group": { + "name": "projects/sample1/locations/sample2/deploymentGroups/sample3" + } } request = request_type(**request_init) @@ -23050,55 +30364,144 @@ def test_get_terraform_version_rest_bad_request( response_value.request = mock.Mock() req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.get_terraform_version(request) + client.update_deployment_group(request) @pytest.mark.parametrize( "request_type", [ - config.GetTerraformVersionRequest, + config.UpdateDeploymentGroupRequest, dict, ], ) -def test_get_terraform_version_rest_call_success(request_type): +def test_update_deployment_group_rest_call_success(request_type): client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding request_init = { - "name": "projects/sample1/locations/sample2/terraformVersions/sample3" + "deployment_group": { + "name": "projects/sample1/locations/sample2/deploymentGroups/sample3" + } + } + request_init["deployment_group"] = { + "name": "projects/sample1/locations/sample2/deploymentGroups/sample3", + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "labels": {}, + "annotations": {}, + "state": 1, + "state_description": "state_description_value", + "deployment_units": [ + { + "id": "id_value", + "deployment": "deployment_value", + "dependencies": ["dependencies_value1", "dependencies_value2"], + } + ], + "provisioning_state": 1, + "provisioning_state_description": "provisioning_state_description_value", + "provisioning_error": { + "code": 411, + "message": "message_value", + "details": [ + { + "type_url": "type.googleapis.com/google.protobuf.Duration", + "value": b"\x08\x0c\x10\xdb\x07", + } + ], + }, } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = config.UpdateDeploymentGroupRequest.meta.fields["deployment_group"] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["deployment_group"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["deployment_group"][field])): + del request_init["deployment_group"][field][i][subfield] + else: + del request_init["deployment_group"][field][subfield] request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = config.TerraformVersion( - name="name_value", - state=config.TerraformVersion.State.ACTIVE, - ) + return_value = operations_pb2.Operation(name="operations/spam") # Wrap the value into a proper Response obj response_value = mock.Mock() response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = config.TerraformVersion.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.get_terraform_version(request) + response = client.update_deployment_group(request) # Establish that the response is the type that we expect. - assert isinstance(response, config.TerraformVersion) - assert response.name == "name_value" - assert response.state == config.TerraformVersion.State.ACTIVE + json_return_value = json_format.MessageToJson(return_value) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_terraform_version_rest_interceptors(null_interceptor): +def test_update_deployment_group_rest_interceptors(null_interceptor): transport = transports.ConfigRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None if null_interceptor else transports.ConfigRestInterceptor(), @@ -23108,21 +30511,23 @@ def test_get_terraform_version_rest_interceptors(null_interceptor): with ( mock.patch.object(type(client.transport._session), "request") as req, mock.patch.object(path_template, "transcode") as transcode, + mock.patch.object(operation.Operation, "_set_result_from_operation"), mock.patch.object( - transports.ConfigRestInterceptor, "post_get_terraform_version" + transports.ConfigRestInterceptor, "post_update_deployment_group" ) as post, mock.patch.object( - transports.ConfigRestInterceptor, "post_get_terraform_version_with_metadata" + transports.ConfigRestInterceptor, + "post_update_deployment_group_with_metadata", ) as post_with_metadata, mock.patch.object( - transports.ConfigRestInterceptor, "pre_get_terraform_version" + transports.ConfigRestInterceptor, "pre_update_deployment_group" ) as pre, ): pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() - pb_message = config.GetTerraformVersionRequest.pb( - config.GetTerraformVersionRequest() + pb_message = config.UpdateDeploymentGroupRequest.pb( + config.UpdateDeploymentGroupRequest() ) transcode.return_value = { "method": "post", @@ -23134,19 +30539,19 @@ def test_get_terraform_version_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = config.TerraformVersion.to_json(config.TerraformVersion()) + return_value = json_format.MessageToJson(operations_pb2.Operation()) req.return_value.content = return_value - request = config.GetTerraformVersionRequest() + request = config.UpdateDeploymentGroupRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = config.TerraformVersion() - post_with_metadata.return_value = config.TerraformVersion(), metadata + post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata - client.get_terraform_version( + client.update_deployment_group( request, metadata=[ ("key", "val"), @@ -23159,14 +30564,16 @@ def test_get_terraform_version_rest_interceptors(null_interceptor): post_with_metadata.assert_called_once() -def test_list_resource_changes_rest_bad_request( - request_type=config.ListResourceChangesRequest, +def test_delete_deployment_group_rest_bad_request( + request_type=config.DeleteDeploymentGroupRequest, ): client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2/previews/sample3"} + request_init = { + "name": "projects/sample1/locations/sample2/deploymentGroups/sample3" + } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -23182,53 +30589,47 @@ def test_list_resource_changes_rest_bad_request( response_value.request = mock.Mock() req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.list_resource_changes(request) + client.delete_deployment_group(request) @pytest.mark.parametrize( "request_type", [ - config.ListResourceChangesRequest, + config.DeleteDeploymentGroupRequest, dict, ], ) -def test_list_resource_changes_rest_call_success(request_type): +def test_delete_deployment_group_rest_call_success(request_type): client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2/previews/sample3"} + request_init = { + "name": "projects/sample1/locations/sample2/deploymentGroups/sample3" + } request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = config.ListResourceChangesResponse( - next_page_token="next_page_token_value", - unreachable=["unreachable_value"], - ) + return_value = operations_pb2.Operation(name="operations/spam") # Wrap the value into a proper Response obj response_value = mock.Mock() response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = config.ListResourceChangesResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.list_resource_changes(request) - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListResourceChangesPager) - assert response.next_page_token == "next_page_token_value" - assert response.unreachable == ["unreachable_value"] + response = client.delete_deployment_group(request) + + # Establish that the response is the type that we expect. + json_return_value = json_format.MessageToJson(return_value) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_resource_changes_rest_interceptors(null_interceptor): +def test_delete_deployment_group_rest_interceptors(null_interceptor): transport = transports.ConfigRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None if null_interceptor else transports.ConfigRestInterceptor(), @@ -23238,21 +30639,23 @@ def test_list_resource_changes_rest_interceptors(null_interceptor): with ( mock.patch.object(type(client.transport._session), "request") as req, mock.patch.object(path_template, "transcode") as transcode, + mock.patch.object(operation.Operation, "_set_result_from_operation"), mock.patch.object( - transports.ConfigRestInterceptor, "post_list_resource_changes" + transports.ConfigRestInterceptor, "post_delete_deployment_group" ) as post, mock.patch.object( - transports.ConfigRestInterceptor, "post_list_resource_changes_with_metadata" + transports.ConfigRestInterceptor, + "post_delete_deployment_group_with_metadata", ) as post_with_metadata, mock.patch.object( - transports.ConfigRestInterceptor, "pre_list_resource_changes" + transports.ConfigRestInterceptor, "pre_delete_deployment_group" ) as pre, ): pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() - pb_message = config.ListResourceChangesRequest.pb( - config.ListResourceChangesRequest() + pb_message = config.DeleteDeploymentGroupRequest.pb( + config.DeleteDeploymentGroupRequest() ) transcode.return_value = { "method": "post", @@ -23264,21 +30667,19 @@ def test_list_resource_changes_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = config.ListResourceChangesResponse.to_json( - config.ListResourceChangesResponse() - ) + return_value = json_format.MessageToJson(operations_pb2.Operation()) req.return_value.content = return_value - request = config.ListResourceChangesRequest() + request = config.DeleteDeploymentGroupRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = config.ListResourceChangesResponse() - post_with_metadata.return_value = config.ListResourceChangesResponse(), metadata + post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata - client.list_resource_changes( + client.delete_deployment_group( request, metadata=[ ("key", "val"), @@ -23291,16 +30692,14 @@ def test_list_resource_changes_rest_interceptors(null_interceptor): post_with_metadata.assert_called_once() -def test_get_resource_change_rest_bad_request( - request_type=config.GetResourceChangeRequest, +def test_list_deployment_groups_rest_bad_request( + request_type=config.ListDeploymentGroupsRequest, ): client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = { - "name": "projects/sample1/locations/sample2/previews/sample3/resourceChanges/sample4" - } + request_init = {"parent": "projects/sample1/locations/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -23316,33 +30715,31 @@ def test_get_resource_change_rest_bad_request( response_value.request = mock.Mock() req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.get_resource_change(request) + client.list_deployment_groups(request) @pytest.mark.parametrize( "request_type", [ - config.GetResourceChangeRequest, + config.ListDeploymentGroupsRequest, dict, ], ) -def test_get_resource_change_rest_call_success(request_type): +def test_list_deployment_groups_rest_call_success(request_type): client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = { - "name": "projects/sample1/locations/sample2/previews/sample3/resourceChanges/sample4" - } + request_init = {"parent": "projects/sample1/locations/sample2"} request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = config.ResourceChange( - name="name_value", - intent=config.ResourceChange.Intent.CREATE, + return_value = config.ListDeploymentGroupsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], ) # Wrap the value into a proper Response obj @@ -23350,21 +30747,21 @@ def test_get_resource_change_rest_call_success(request_type): response_value.status_code = 200 # Convert return value to protobuf type - return_value = config.ResourceChange.pb(return_value) + return_value = config.ListDeploymentGroupsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.get_resource_change(request) + response = client.list_deployment_groups(request) # Establish that the response is the type that we expect. - assert isinstance(response, config.ResourceChange) - assert response.name == "name_value" - assert response.intent == config.ResourceChange.Intent.CREATE + assert isinstance(response, pagers.ListDeploymentGroupsPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_resource_change_rest_interceptors(null_interceptor): +def test_list_deployment_groups_rest_interceptors(null_interceptor): transport = transports.ConfigRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None if null_interceptor else transports.ConfigRestInterceptor(), @@ -23375,20 +30772,21 @@ def test_get_resource_change_rest_interceptors(null_interceptor): mock.patch.object(type(client.transport._session), "request") as req, mock.patch.object(path_template, "transcode") as transcode, mock.patch.object( - transports.ConfigRestInterceptor, "post_get_resource_change" + transports.ConfigRestInterceptor, "post_list_deployment_groups" ) as post, mock.patch.object( - transports.ConfigRestInterceptor, "post_get_resource_change_with_metadata" + transports.ConfigRestInterceptor, + "post_list_deployment_groups_with_metadata", ) as post_with_metadata, mock.patch.object( - transports.ConfigRestInterceptor, "pre_get_resource_change" + transports.ConfigRestInterceptor, "pre_list_deployment_groups" ) as pre, ): pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() - pb_message = config.GetResourceChangeRequest.pb( - config.GetResourceChangeRequest() + pb_message = config.ListDeploymentGroupsRequest.pb( + config.ListDeploymentGroupsRequest() ) transcode.return_value = { "method": "post", @@ -23400,19 +30798,24 @@ def test_get_resource_change_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = config.ResourceChange.to_json(config.ResourceChange()) + return_value = config.ListDeploymentGroupsResponse.to_json( + config.ListDeploymentGroupsResponse() + ) req.return_value.content = return_value - request = config.GetResourceChangeRequest() + request = config.ListDeploymentGroupsRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = config.ResourceChange() - post_with_metadata.return_value = config.ResourceChange(), metadata + post.return_value = config.ListDeploymentGroupsResponse() + post_with_metadata.return_value = ( + config.ListDeploymentGroupsResponse(), + metadata, + ) - client.get_resource_change( + client.list_deployment_groups( request, metadata=[ ("key", "val"), @@ -23425,14 +30828,16 @@ def test_get_resource_change_rest_interceptors(null_interceptor): post_with_metadata.assert_called_once() -def test_list_resource_drifts_rest_bad_request( - request_type=config.ListResourceDriftsRequest, +def test_provision_deployment_group_rest_bad_request( + request_type=config.ProvisionDeploymentGroupRequest, ): client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2/previews/sample3"} + request_init = { + "name": "projects/sample1/locations/sample2/deploymentGroups/sample3" + } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -23448,53 +30853,47 @@ def test_list_resource_drifts_rest_bad_request( response_value.request = mock.Mock() req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.list_resource_drifts(request) + client.provision_deployment_group(request) @pytest.mark.parametrize( "request_type", [ - config.ListResourceDriftsRequest, + config.ProvisionDeploymentGroupRequest, dict, ], ) -def test_list_resource_drifts_rest_call_success(request_type): +def test_provision_deployment_group_rest_call_success(request_type): client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {"parent": "projects/sample1/locations/sample2/previews/sample3"} + request_init = { + "name": "projects/sample1/locations/sample2/deploymentGroups/sample3" + } request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = config.ListResourceDriftsResponse( - next_page_token="next_page_token_value", - unreachable=["unreachable_value"], - ) + return_value = operations_pb2.Operation(name="operations/spam") # Wrap the value into a proper Response obj response_value = mock.Mock() response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = config.ListResourceDriftsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.list_resource_drifts(request) + response = client.provision_deployment_group(request) # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListResourceDriftsPager) - assert response.next_page_token == "next_page_token_value" - assert response.unreachable == ["unreachable_value"] + json_return_value = json_format.MessageToJson(return_value) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_list_resource_drifts_rest_interceptors(null_interceptor): +def test_provision_deployment_group_rest_interceptors(null_interceptor): transport = transports.ConfigRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None if null_interceptor else transports.ConfigRestInterceptor(), @@ -23504,21 +30903,23 @@ def test_list_resource_drifts_rest_interceptors(null_interceptor): with ( mock.patch.object(type(client.transport._session), "request") as req, mock.patch.object(path_template, "transcode") as transcode, + mock.patch.object(operation.Operation, "_set_result_from_operation"), mock.patch.object( - transports.ConfigRestInterceptor, "post_list_resource_drifts" + transports.ConfigRestInterceptor, "post_provision_deployment_group" ) as post, mock.patch.object( - transports.ConfigRestInterceptor, "post_list_resource_drifts_with_metadata" + transports.ConfigRestInterceptor, + "post_provision_deployment_group_with_metadata", ) as post_with_metadata, mock.patch.object( - transports.ConfigRestInterceptor, "pre_list_resource_drifts" + transports.ConfigRestInterceptor, "pre_provision_deployment_group" ) as pre, ): pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() - pb_message = config.ListResourceDriftsRequest.pb( - config.ListResourceDriftsRequest() + pb_message = config.ProvisionDeploymentGroupRequest.pb( + config.ProvisionDeploymentGroupRequest() ) transcode.return_value = { "method": "post", @@ -23530,21 +30931,19 @@ def test_list_resource_drifts_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = config.ListResourceDriftsResponse.to_json( - config.ListResourceDriftsResponse() - ) + return_value = json_format.MessageToJson(operations_pb2.Operation()) req.return_value.content = return_value - request = config.ListResourceDriftsRequest() + request = config.ProvisionDeploymentGroupRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = config.ListResourceDriftsResponse() - post_with_metadata.return_value = config.ListResourceDriftsResponse(), metadata + post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata - client.list_resource_drifts( + client.provision_deployment_group( request, metadata=[ ("key", "val"), @@ -23557,15 +30956,15 @@ def test_list_resource_drifts_rest_interceptors(null_interceptor): post_with_metadata.assert_called_once() -def test_get_resource_drift_rest_bad_request( - request_type=config.GetResourceDriftRequest, +def test_deprovision_deployment_group_rest_bad_request( + request_type=config.DeprovisionDeploymentGroupRequest, ): client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding request_init = { - "name": "projects/sample1/locations/sample2/previews/sample3/resourceDrifts/sample4" + "name": "projects/sample1/locations/sample2/deploymentGroups/sample3" } request = request_type(**request_init) @@ -23582,53 +30981,47 @@ def test_get_resource_drift_rest_bad_request( response_value.request = mock.Mock() req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.get_resource_drift(request) + client.deprovision_deployment_group(request) @pytest.mark.parametrize( "request_type", [ - config.GetResourceDriftRequest, + config.DeprovisionDeploymentGroupRequest, dict, ], ) -def test_get_resource_drift_rest_call_success(request_type): +def test_deprovision_deployment_group_rest_call_success(request_type): client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding request_init = { - "name": "projects/sample1/locations/sample2/previews/sample3/resourceDrifts/sample4" + "name": "projects/sample1/locations/sample2/deploymentGroups/sample3" } request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = config.ResourceDrift( - name="name_value", - ) + return_value = operations_pb2.Operation(name="operations/spam") # Wrap the value into a proper Response obj response_value = mock.Mock() response_value.status_code = 200 - - # Convert return value to protobuf type - return_value = config.ResourceDrift.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.get_resource_drift(request) + response = client.deprovision_deployment_group(request) # Establish that the response is the type that we expect. - assert isinstance(response, config.ResourceDrift) - assert response.name == "name_value" + json_return_value = json_format.MessageToJson(return_value) @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_resource_drift_rest_interceptors(null_interceptor): +def test_deprovision_deployment_group_rest_interceptors(null_interceptor): transport = transports.ConfigRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None if null_interceptor else transports.ConfigRestInterceptor(), @@ -23638,20 +31031,24 @@ def test_get_resource_drift_rest_interceptors(null_interceptor): with ( mock.patch.object(type(client.transport._session), "request") as req, mock.patch.object(path_template, "transcode") as transcode, + mock.patch.object(operation.Operation, "_set_result_from_operation"), mock.patch.object( - transports.ConfigRestInterceptor, "post_get_resource_drift" + transports.ConfigRestInterceptor, "post_deprovision_deployment_group" ) as post, mock.patch.object( - transports.ConfigRestInterceptor, "post_get_resource_drift_with_metadata" + transports.ConfigRestInterceptor, + "post_deprovision_deployment_group_with_metadata", ) as post_with_metadata, mock.patch.object( - transports.ConfigRestInterceptor, "pre_get_resource_drift" + transports.ConfigRestInterceptor, "pre_deprovision_deployment_group" ) as pre, ): pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() - pb_message = config.GetResourceDriftRequest.pb(config.GetResourceDriftRequest()) + pb_message = config.DeprovisionDeploymentGroupRequest.pb( + config.DeprovisionDeploymentGroupRequest() + ) transcode.return_value = { "method": "post", "uri": "my_uri", @@ -23662,19 +31059,19 @@ def test_get_resource_drift_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = config.ResourceDrift.to_json(config.ResourceDrift()) + return_value = json_format.MessageToJson(operations_pb2.Operation()) req.return_value.content = return_value - request = config.GetResourceDriftRequest() + request = config.DeprovisionDeploymentGroupRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = config.ResourceDrift() - post_with_metadata.return_value = config.ResourceDrift(), metadata + post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata - client.get_resource_drift( + client.deprovision_deployment_group( request, metadata=[ ("key", "val"), @@ -23687,14 +31084,16 @@ def test_get_resource_drift_rest_interceptors(null_interceptor): post_with_metadata.assert_called_once() -def test_get_auto_migration_config_rest_bad_request( - request_type=config.GetAutoMigrationConfigRequest, +def test_get_deployment_group_revision_rest_bad_request( + request_type=config.GetDeploymentGroupRevisionRequest, ): client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/autoMigrationConfig"} + request_init = { + "name": "projects/sample1/locations/sample2/deploymentGroups/sample3/revisions/sample4" + } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. @@ -23710,31 +31109,33 @@ def test_get_auto_migration_config_rest_bad_request( response_value.request = mock.Mock() req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.get_auto_migration_config(request) + client.get_deployment_group_revision(request) @pytest.mark.parametrize( "request_type", [ - config.GetAutoMigrationConfigRequest, + config.GetDeploymentGroupRevisionRequest, dict, ], ) -def test_get_auto_migration_config_rest_call_success(request_type): +def test_get_deployment_group_revision_rest_call_success(request_type): client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding - request_init = {"name": "projects/sample1/locations/sample2/autoMigrationConfig"} + request_init = { + "name": "projects/sample1/locations/sample2/deploymentGroups/sample3/revisions/sample4" + } request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = config.AutoMigrationConfig( + return_value = config.DeploymentGroupRevision( name="name_value", - auto_migration_enabled=True, + alternative_ids=["alternative_ids_value"], ) # Wrap the value into a proper Response obj @@ -23742,21 +31143,21 @@ def test_get_auto_migration_config_rest_call_success(request_type): response_value.status_code = 200 # Convert return value to protobuf type - return_value = config.AutoMigrationConfig.pb(return_value) + return_value = config.DeploymentGroupRevision.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.get_auto_migration_config(request) + response = client.get_deployment_group_revision(request) # Establish that the response is the type that we expect. - assert isinstance(response, config.AutoMigrationConfig) + assert isinstance(response, config.DeploymentGroupRevision) assert response.name == "name_value" - assert response.auto_migration_enabled is True + assert response.alternative_ids == ["alternative_ids_value"] @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_get_auto_migration_config_rest_interceptors(null_interceptor): +def test_get_deployment_group_revision_rest_interceptors(null_interceptor): transport = transports.ConfigRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None if null_interceptor else transports.ConfigRestInterceptor(), @@ -23767,21 +31168,21 @@ def test_get_auto_migration_config_rest_interceptors(null_interceptor): mock.patch.object(type(client.transport._session), "request") as req, mock.patch.object(path_template, "transcode") as transcode, mock.patch.object( - transports.ConfigRestInterceptor, "post_get_auto_migration_config" + transports.ConfigRestInterceptor, "post_get_deployment_group_revision" ) as post, mock.patch.object( transports.ConfigRestInterceptor, - "post_get_auto_migration_config_with_metadata", + "post_get_deployment_group_revision_with_metadata", ) as post_with_metadata, mock.patch.object( - transports.ConfigRestInterceptor, "pre_get_auto_migration_config" + transports.ConfigRestInterceptor, "pre_get_deployment_group_revision" ) as pre, ): pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() - pb_message = config.GetAutoMigrationConfigRequest.pb( - config.GetAutoMigrationConfigRequest() + pb_message = config.GetDeploymentGroupRevisionRequest.pb( + config.GetDeploymentGroupRevisionRequest() ) transcode.return_value = { "method": "post", @@ -23793,19 +31194,21 @@ def test_get_auto_migration_config_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = config.AutoMigrationConfig.to_json(config.AutoMigrationConfig()) + return_value = config.DeploymentGroupRevision.to_json( + config.DeploymentGroupRevision() + ) req.return_value.content = return_value - request = config.GetAutoMigrationConfigRequest() + request = config.GetDeploymentGroupRevisionRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = config.AutoMigrationConfig() - post_with_metadata.return_value = config.AutoMigrationConfig(), metadata + post.return_value = config.DeploymentGroupRevision() + post_with_metadata.return_value = config.DeploymentGroupRevision(), metadata - client.get_auto_migration_config( + client.get_deployment_group_revision( request, metadata=[ ("key", "val"), @@ -23818,152 +31221,80 @@ def test_get_auto_migration_config_rest_interceptors(null_interceptor): post_with_metadata.assert_called_once() -def test_update_auto_migration_config_rest_bad_request( - request_type=config.UpdateAutoMigrationConfigRequest, +def test_list_deployment_group_revisions_rest_bad_request( + request_type=config.ListDeploymentGroupRevisionsRequest, ): client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest" ) # send a request that will satisfy transcoding request_init = { - "auto_migration_config": { - "name": "projects/sample1/locations/sample2/autoMigrationConfig" - } + "parent": "projects/sample1/locations/sample2/deploymentGroups/sample3" } request = request_type(**request_init) # Mock the http request call within the method and fake a BadRequest error. with ( - mock.patch.object(Session, "request") as req, - pytest.raises(core_exceptions.BadRequest), - ): - # Wrap the value into a proper Response obj - response_value = mock.Mock() - json_return_value = "" - response_value.json = mock.Mock(return_value={}) - response_value.status_code = 400 - response_value.request = mock.Mock() - req.return_value = response_value - req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - client.update_auto_migration_config(request) - - -@pytest.mark.parametrize( - "request_type", - [ - config.UpdateAutoMigrationConfigRequest, - dict, - ], -) -def test_update_auto_migration_config_rest_call_success(request_type): - client = ConfigClient( - credentials=ga_credentials.AnonymousCredentials(), transport="rest" - ) - - # send a request that will satisfy transcoding - request_init = { - "auto_migration_config": { - "name": "projects/sample1/locations/sample2/autoMigrationConfig" - } - } - request_init["auto_migration_config"] = { - "name": "projects/sample1/locations/sample2/autoMigrationConfig", - "update_time": {"seconds": 751, "nanos": 543}, - "auto_migration_enabled": True, - } - # The version of a generated dependency at test runtime may differ from the version used during generation. - # Delete any fields which are not present in the current runtime dependency - # See https://github.com/googleapis/gapic-generator-python/issues/1748 - - # Determine if the message type is proto-plus or protobuf - test_field = config.UpdateAutoMigrationConfigRequest.meta.fields[ - "auto_migration_config" - ] - - def get_message_fields(field): - # Given a field which is a message (composite type), return a list with - # all the fields of the message. - # If the field is not a composite type, return an empty list. - message_fields = [] - - if hasattr(field, "message") and field.message: - is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") - - if is_field_type_proto_plus_type: - message_fields = field.message.meta.fields.values() - # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER - message_fields = field.message.DESCRIPTOR.fields - return message_fields - - runtime_nested_fields = [ - (field.name, nested_field.name) - for field in get_message_fields(test_field) - for nested_field in get_message_fields(field) - ] - - subfields_not_in_runtime = [] - - # For each item in the sample request, create a list of sub fields which are not present at runtime - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init[ - "auto_migration_config" - ].items(): # pragma: NO COVER - result = None - is_repeated = False - # For repeated fields - if isinstance(value, list) and len(value): - is_repeated = True - result = value[0] - # For fields where the type is another message - if isinstance(value, dict): - result = value + mock.patch.object(Session, "request") as req, + pytest.raises(core_exceptions.BadRequest), + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.list_deployment_group_revisions(request) - if result and hasattr(result, "keys"): - for subfield in result.keys(): - if (field, subfield) not in runtime_nested_fields: - subfields_not_in_runtime.append( - { - "field": field, - "subfield": subfield, - "is_repeated": is_repeated, - } - ) - # Remove fields from the sample request which are not present in the runtime version of the dependency - # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER - field = subfield_to_delete.get("field") - field_repeated = subfield_to_delete.get("is_repeated") - subfield = subfield_to_delete.get("subfield") - if subfield: - if field_repeated: - for i in range(0, len(request_init["auto_migration_config"][field])): - del request_init["auto_migration_config"][field][i][subfield] - else: - del request_init["auto_migration_config"][field][subfield] +@pytest.mark.parametrize( + "request_type", + [ + config.ListDeploymentGroupRevisionsRequest, + dict, + ], +) +def test_list_deployment_group_revisions_rest_call_success(request_type): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = { + "parent": "projects/sample1/locations/sample2/deploymentGroups/sample3" + } request = request_type(**request_init) # Mock the http request call within the method and fake a response. with mock.patch.object(type(client.transport._session), "request") as req: # Designate an appropriate value for the returned response. - return_value = operations_pb2.Operation(name="operations/spam") + return_value = config.ListDeploymentGroupRevisionsResponse( + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], + ) # Wrap the value into a proper Response obj response_value = mock.Mock() response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = config.ListDeploymentGroupRevisionsResponse.pb(return_value) json_return_value = json_format.MessageToJson(return_value) response_value.content = json_return_value.encode("UTF-8") req.return_value = response_value req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - response = client.update_auto_migration_config(request) + response = client.list_deployment_group_revisions(request) # Establish that the response is the type that we expect. - json_return_value = json_format.MessageToJson(return_value) + assert isinstance(response, pagers.ListDeploymentGroupRevisionsPager) + assert response.next_page_token == "next_page_token_value" + assert response.unreachable == ["unreachable_value"] @pytest.mark.parametrize("null_interceptor", [True, False]) -def test_update_auto_migration_config_rest_interceptors(null_interceptor): +def test_list_deployment_group_revisions_rest_interceptors(null_interceptor): transport = transports.ConfigRestTransport( credentials=ga_credentials.AnonymousCredentials(), interceptor=None if null_interceptor else transports.ConfigRestInterceptor(), @@ -23973,23 +31304,22 @@ def test_update_auto_migration_config_rest_interceptors(null_interceptor): with ( mock.patch.object(type(client.transport._session), "request") as req, mock.patch.object(path_template, "transcode") as transcode, - mock.patch.object(operation.Operation, "_set_result_from_operation"), mock.patch.object( - transports.ConfigRestInterceptor, "post_update_auto_migration_config" + transports.ConfigRestInterceptor, "post_list_deployment_group_revisions" ) as post, mock.patch.object( transports.ConfigRestInterceptor, - "post_update_auto_migration_config_with_metadata", + "post_list_deployment_group_revisions_with_metadata", ) as post_with_metadata, mock.patch.object( - transports.ConfigRestInterceptor, "pre_update_auto_migration_config" + transports.ConfigRestInterceptor, "pre_list_deployment_group_revisions" ) as pre, ): pre.assert_not_called() post.assert_not_called() post_with_metadata.assert_not_called() - pb_message = config.UpdateAutoMigrationConfigRequest.pb( - config.UpdateAutoMigrationConfigRequest() + pb_message = config.ListDeploymentGroupRevisionsRequest.pb( + config.ListDeploymentGroupRevisionsRequest() ) transcode.return_value = { "method": "post", @@ -24001,19 +31331,24 @@ def test_update_auto_migration_config_rest_interceptors(null_interceptor): req.return_value = mock.Mock() req.return_value.status_code = 200 req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} - return_value = json_format.MessageToJson(operations_pb2.Operation()) + return_value = config.ListDeploymentGroupRevisionsResponse.to_json( + config.ListDeploymentGroupRevisionsResponse() + ) req.return_value.content = return_value - request = config.UpdateAutoMigrationConfigRequest() + request = config.ListDeploymentGroupRevisionsRequest() metadata = [ ("key", "val"), ("cephalopod", "squid"), ] pre.return_value = request, metadata - post.return_value = operations_pb2.Operation() - post_with_metadata.return_value = operations_pb2.Operation(), metadata + post.return_value = config.ListDeploymentGroupRevisionsResponse() + post_with_metadata.return_value = ( + config.ListDeploymentGroupRevisionsResponse(), + metadata, + ) - client.update_auto_migration_config( + client.list_deployment_group_revisions( request, metadata=[ ("key", "val"), @@ -24763,138 +32098,326 @@ def test_get_resource_empty_call_rest(): # Establish that the underlying stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] - request_msg = config.GetResourceRequest() + request_msg = config.GetResourceRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_resources_empty_call_rest(): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.list_resources), "__call__") as call: + client.list_resources(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = config.ListResourcesRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_export_deployment_statefile_empty_call_rest(): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.export_deployment_statefile), "__call__" + ) as call: + client.export_deployment_statefile(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = config.ExportDeploymentStatefileRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_export_revision_statefile_empty_call_rest(): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.export_revision_statefile), "__call__" + ) as call: + client.export_revision_statefile(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = config.ExportRevisionStatefileRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_import_statefile_empty_call_rest(): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.import_statefile), "__call__") as call: + client.import_statefile(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = config.ImportStatefileRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_statefile_empty_call_rest(): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.delete_statefile), "__call__") as call: + client.delete_statefile(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = config.DeleteStatefileRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_lock_deployment_empty_call_rest(): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.lock_deployment), "__call__") as call: + client.lock_deployment(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = config.LockDeploymentRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_unlock_deployment_empty_call_rest(): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.unlock_deployment), "__call__" + ) as call: + client.unlock_deployment(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = config.UnlockDeploymentRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_export_lock_info_empty_call_rest(): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.export_lock_info), "__call__") as call: + client.export_lock_info(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = config.ExportLockInfoRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_preview_empty_call_rest(): + client = ConfigClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object(type(client.transport.create_preview), "__call__") as call: + client.create_preview(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = config.CreatePreviewRequest() assert args[0] == request_msg # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. -def test_list_resources_empty_call_rest(): +def test_get_preview_empty_call_rest(): client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.list_resources), "__call__") as call: - client.list_resources(request=None) + with mock.patch.object(type(client.transport.get_preview), "__call__") as call: + client.get_preview(request=None) # Establish that the underlying stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] - request_msg = config.ListResourcesRequest() + request_msg = config.GetPreviewRequest() assert args[0] == request_msg # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. -def test_export_deployment_statefile_empty_call_rest(): +def test_list_previews_empty_call_rest(): client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.export_deployment_statefile), "__call__" - ) as call: - client.export_deployment_statefile(request=None) + with mock.patch.object(type(client.transport.list_previews), "__call__") as call: + client.list_previews(request=None) # Establish that the underlying stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] - request_msg = config.ExportDeploymentStatefileRequest() + request_msg = config.ListPreviewsRequest() assert args[0] == request_msg # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. -def test_export_revision_statefile_empty_call_rest(): +def test_delete_preview_empty_call_rest(): client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # Mock the actual call, and fake the request. - with mock.patch.object( - type(client.transport.export_revision_statefile), "__call__" - ) as call: - client.export_revision_statefile(request=None) + with mock.patch.object(type(client.transport.delete_preview), "__call__") as call: + client.delete_preview(request=None) # Establish that the underlying stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] - request_msg = config.ExportRevisionStatefileRequest() + request_msg = config.DeletePreviewRequest() assert args[0] == request_msg # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. -def test_import_statefile_empty_call_rest(): +def test_export_preview_result_empty_call_rest(): client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.import_statefile), "__call__") as call: - client.import_statefile(request=None) + with mock.patch.object( + type(client.transport.export_preview_result), "__call__" + ) as call: + client.export_preview_result(request=None) # Establish that the underlying stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] - request_msg = config.ImportStatefileRequest() + request_msg = config.ExportPreviewResultRequest() assert args[0] == request_msg # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. -def test_delete_statefile_empty_call_rest(): +def test_list_terraform_versions_empty_call_rest(): client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.delete_statefile), "__call__") as call: - client.delete_statefile(request=None) + with mock.patch.object( + type(client.transport.list_terraform_versions), "__call__" + ) as call: + client.list_terraform_versions(request=None) # Establish that the underlying stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] - request_msg = config.DeleteStatefileRequest() + request_msg = config.ListTerraformVersionsRequest() assert args[0] == request_msg # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. -def test_lock_deployment_empty_call_rest(): +def test_get_terraform_version_empty_call_rest(): client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.lock_deployment), "__call__") as call: - client.lock_deployment(request=None) + with mock.patch.object( + type(client.transport.get_terraform_version), "__call__" + ) as call: + client.get_terraform_version(request=None) # Establish that the underlying stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] - request_msg = config.LockDeploymentRequest() + request_msg = config.GetTerraformVersionRequest() assert args[0] == request_msg # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. -def test_unlock_deployment_empty_call_rest(): +def test_list_resource_changes_empty_call_rest(): client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -24902,121 +32425,131 @@ def test_unlock_deployment_empty_call_rest(): # Mock the actual call, and fake the request. with mock.patch.object( - type(client.transport.unlock_deployment), "__call__" + type(client.transport.list_resource_changes), "__call__" ) as call: - client.unlock_deployment(request=None) + client.list_resource_changes(request=None) # Establish that the underlying stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] - request_msg = config.UnlockDeploymentRequest() + request_msg = config.ListResourceChangesRequest() assert args[0] == request_msg # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. -def test_export_lock_info_empty_call_rest(): +def test_get_resource_change_empty_call_rest(): client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.export_lock_info), "__call__") as call: - client.export_lock_info(request=None) + with mock.patch.object( + type(client.transport.get_resource_change), "__call__" + ) as call: + client.get_resource_change(request=None) # Establish that the underlying stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] - request_msg = config.ExportLockInfoRequest() + request_msg = config.GetResourceChangeRequest() assert args[0] == request_msg # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. -def test_create_preview_empty_call_rest(): +def test_list_resource_drifts_empty_call_rest(): client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.create_preview), "__call__") as call: - client.create_preview(request=None) + with mock.patch.object( + type(client.transport.list_resource_drifts), "__call__" + ) as call: + client.list_resource_drifts(request=None) # Establish that the underlying stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] - request_msg = config.CreatePreviewRequest() + request_msg = config.ListResourceDriftsRequest() assert args[0] == request_msg # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. -def test_get_preview_empty_call_rest(): +def test_get_resource_drift_empty_call_rest(): client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.get_preview), "__call__") as call: - client.get_preview(request=None) + with mock.patch.object( + type(client.transport.get_resource_drift), "__call__" + ) as call: + client.get_resource_drift(request=None) # Establish that the underlying stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] - request_msg = config.GetPreviewRequest() + request_msg = config.GetResourceDriftRequest() assert args[0] == request_msg # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. -def test_list_previews_empty_call_rest(): +def test_get_auto_migration_config_empty_call_rest(): client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.list_previews), "__call__") as call: - client.list_previews(request=None) + with mock.patch.object( + type(client.transport.get_auto_migration_config), "__call__" + ) as call: + client.get_auto_migration_config(request=None) # Establish that the underlying stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] - request_msg = config.ListPreviewsRequest() + request_msg = config.GetAutoMigrationConfigRequest() assert args[0] == request_msg # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. -def test_delete_preview_empty_call_rest(): +def test_update_auto_migration_config_empty_call_rest(): client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", ) # Mock the actual call, and fake the request. - with mock.patch.object(type(client.transport.delete_preview), "__call__") as call: - client.delete_preview(request=None) + with mock.patch.object( + type(client.transport.update_auto_migration_config), "__call__" + ) as call: + client.update_auto_migration_config(request=None) # Establish that the underlying stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] - request_msg = config.DeletePreviewRequest() + request_msg = config.UpdateAutoMigrationConfigRequest() assert args[0] == request_msg # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. -def test_export_preview_result_empty_call_rest(): +def test_get_deployment_group_empty_call_rest(): client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -25024,21 +32557,21 @@ def test_export_preview_result_empty_call_rest(): # Mock the actual call, and fake the request. with mock.patch.object( - type(client.transport.export_preview_result), "__call__" + type(client.transport.get_deployment_group), "__call__" ) as call: - client.export_preview_result(request=None) + client.get_deployment_group(request=None) # Establish that the underlying stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] - request_msg = config.ExportPreviewResultRequest() + request_msg = config.GetDeploymentGroupRequest() assert args[0] == request_msg # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. -def test_list_terraform_versions_empty_call_rest(): +def test_create_deployment_group_empty_call_rest(): client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -25046,21 +32579,21 @@ def test_list_terraform_versions_empty_call_rest(): # Mock the actual call, and fake the request. with mock.patch.object( - type(client.transport.list_terraform_versions), "__call__" + type(client.transport.create_deployment_group), "__call__" ) as call: - client.list_terraform_versions(request=None) + client.create_deployment_group(request=None) # Establish that the underlying stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] - request_msg = config.ListTerraformVersionsRequest() + request_msg = config.CreateDeploymentGroupRequest() assert args[0] == request_msg # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. -def test_get_terraform_version_empty_call_rest(): +def test_update_deployment_group_empty_call_rest(): client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -25068,21 +32601,21 @@ def test_get_terraform_version_empty_call_rest(): # Mock the actual call, and fake the request. with mock.patch.object( - type(client.transport.get_terraform_version), "__call__" + type(client.transport.update_deployment_group), "__call__" ) as call: - client.get_terraform_version(request=None) + client.update_deployment_group(request=None) # Establish that the underlying stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] - request_msg = config.GetTerraformVersionRequest() + request_msg = config.UpdateDeploymentGroupRequest() assert args[0] == request_msg # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. -def test_list_resource_changes_empty_call_rest(): +def test_delete_deployment_group_empty_call_rest(): client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -25090,21 +32623,21 @@ def test_list_resource_changes_empty_call_rest(): # Mock the actual call, and fake the request. with mock.patch.object( - type(client.transport.list_resource_changes), "__call__" + type(client.transport.delete_deployment_group), "__call__" ) as call: - client.list_resource_changes(request=None) + client.delete_deployment_group(request=None) # Establish that the underlying stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] - request_msg = config.ListResourceChangesRequest() + request_msg = config.DeleteDeploymentGroupRequest() assert args[0] == request_msg # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. -def test_get_resource_change_empty_call_rest(): +def test_list_deployment_groups_empty_call_rest(): client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -25112,21 +32645,21 @@ def test_get_resource_change_empty_call_rest(): # Mock the actual call, and fake the request. with mock.patch.object( - type(client.transport.get_resource_change), "__call__" + type(client.transport.list_deployment_groups), "__call__" ) as call: - client.get_resource_change(request=None) + client.list_deployment_groups(request=None) # Establish that the underlying stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] - request_msg = config.GetResourceChangeRequest() + request_msg = config.ListDeploymentGroupsRequest() assert args[0] == request_msg # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. -def test_list_resource_drifts_empty_call_rest(): +def test_provision_deployment_group_empty_call_rest(): client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -25134,21 +32667,21 @@ def test_list_resource_drifts_empty_call_rest(): # Mock the actual call, and fake the request. with mock.patch.object( - type(client.transport.list_resource_drifts), "__call__" + type(client.transport.provision_deployment_group), "__call__" ) as call: - client.list_resource_drifts(request=None) + client.provision_deployment_group(request=None) # Establish that the underlying stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] - request_msg = config.ListResourceDriftsRequest() + request_msg = config.ProvisionDeploymentGroupRequest() assert args[0] == request_msg # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. -def test_get_resource_drift_empty_call_rest(): +def test_deprovision_deployment_group_empty_call_rest(): client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -25156,21 +32689,21 @@ def test_get_resource_drift_empty_call_rest(): # Mock the actual call, and fake the request. with mock.patch.object( - type(client.transport.get_resource_drift), "__call__" + type(client.transport.deprovision_deployment_group), "__call__" ) as call: - client.get_resource_drift(request=None) + client.deprovision_deployment_group(request=None) # Establish that the underlying stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] - request_msg = config.GetResourceDriftRequest() + request_msg = config.DeprovisionDeploymentGroupRequest() assert args[0] == request_msg # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. -def test_get_auto_migration_config_empty_call_rest(): +def test_get_deployment_group_revision_empty_call_rest(): client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -25178,21 +32711,21 @@ def test_get_auto_migration_config_empty_call_rest(): # Mock the actual call, and fake the request. with mock.patch.object( - type(client.transport.get_auto_migration_config), "__call__" + type(client.transport.get_deployment_group_revision), "__call__" ) as call: - client.get_auto_migration_config(request=None) + client.get_deployment_group_revision(request=None) # Establish that the underlying stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] - request_msg = config.GetAutoMigrationConfigRequest() + request_msg = config.GetDeploymentGroupRevisionRequest() assert args[0] == request_msg # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. -def test_update_auto_migration_config_empty_call_rest(): +def test_list_deployment_group_revisions_empty_call_rest(): client = ConfigClient( credentials=ga_credentials.AnonymousCredentials(), transport="rest", @@ -25200,14 +32733,14 @@ def test_update_auto_migration_config_empty_call_rest(): # Mock the actual call, and fake the request. with mock.patch.object( - type(client.transport.update_auto_migration_config), "__call__" + type(client.transport.list_deployment_group_revisions), "__call__" ) as call: - client.update_auto_migration_config(request=None) + client.list_deployment_group_revisions(request=None) # Establish that the underlying stub method was called. call.assert_called() _, args, _ = call.mock_calls[0] - request_msg = config.UpdateAutoMigrationConfigRequest() + request_msg = config.ListDeploymentGroupRevisionsRequest() assert args[0] == request_msg @@ -25291,6 +32824,15 @@ def test_config_base_transport(): "get_resource_drift", "get_auto_migration_config", "update_auto_migration_config", + "get_deployment_group", + "create_deployment_group", + "update_deployment_group", + "delete_deployment_group", + "list_deployment_groups", + "provision_deployment_group", + "deprovision_deployment_group", + "get_deployment_group_revision", + "list_deployment_group_revisions", "set_iam_policy", "get_iam_policy", "test_iam_permissions", @@ -25654,6 +33196,33 @@ def test_config_client_transport_session_collision(transport_name): session1 = client1.transport.update_auto_migration_config._session session2 = client2.transport.update_auto_migration_config._session assert session1 != session2 + session1 = client1.transport.get_deployment_group._session + session2 = client2.transport.get_deployment_group._session + assert session1 != session2 + session1 = client1.transport.create_deployment_group._session + session2 = client2.transport.create_deployment_group._session + assert session1 != session2 + session1 = client1.transport.update_deployment_group._session + session2 = client2.transport.update_deployment_group._session + assert session1 != session2 + session1 = client1.transport.delete_deployment_group._session + session2 = client2.transport.delete_deployment_group._session + assert session1 != session2 + session1 = client1.transport.list_deployment_groups._session + session2 = client2.transport.list_deployment_groups._session + assert session1 != session2 + session1 = client1.transport.provision_deployment_group._session + session2 = client2.transport.provision_deployment_group._session + assert session1 != session2 + session1 = client1.transport.deprovision_deployment_group._session + session2 = client2.transport.deprovision_deployment_group._session + assert session1 != session2 + session1 = client1.transport.get_deployment_group_revision._session + session2 = client2.transport.get_deployment_group_revision._session + assert session1 != session2 + session1 = client1.transport.list_deployment_group_revisions._session + session2 = client2.transport.list_deployment_group_revisions._session + assert session1 != session2 def test_config_grpc_transport_channel(): @@ -25860,10 +33429,67 @@ def test_parse_deployment_path(): assert expected == actual -def test_preview_path(): +def test_deployment_group_path(): project = "scallop" location = "abalone" - preview = "squid" + deployment_group = "squid" + expected = "projects/{project}/locations/{location}/deploymentGroups/{deployment_group}".format( + project=project, + location=location, + deployment_group=deployment_group, + ) + actual = ConfigClient.deployment_group_path(project, location, deployment_group) + assert expected == actual + + +def test_parse_deployment_group_path(): + expected = { + "project": "clam", + "location": "whelk", + "deployment_group": "octopus", + } + path = ConfigClient.deployment_group_path(**expected) + + # Check that the path construction is reversible. + actual = ConfigClient.parse_deployment_group_path(path) + assert expected == actual + + +def test_deployment_group_revision_path(): + project = "oyster" + location = "nudibranch" + deployment_group = "cuttlefish" + revision = "mussel" + expected = "projects/{project}/locations/{location}/deploymentGroups/{deployment_group}/revisions/{revision}".format( + project=project, + location=location, + deployment_group=deployment_group, + revision=revision, + ) + actual = ConfigClient.deployment_group_revision_path( + project, location, deployment_group, revision + ) + assert expected == actual + + +def test_parse_deployment_group_revision_path(): + expected = { + "project": "winkle", + "location": "nautilus", + "deployment_group": "scallop", + "revision": "abalone", + } + path = ConfigClient.deployment_group_revision_path(**expected) + + # Check that the path construction is reversible. + actual = ConfigClient.parse_deployment_group_revision_path(path) + assert expected == actual + + +def test_preview_path(): + project = "squid" + location = "clam" + preview = "whelk" expected = "projects/{project}/locations/{location}/previews/{preview}".format( project=project, location=location, @@ -25875,9 +33501,9 @@ def test_preview_path(): def test_parse_preview_path(): expected = { - "project": "clam", - "location": "whelk", - "preview": "octopus", + "project": "octopus", + "location": "oyster", + "preview": "nudibranch", } path = ConfigClient.preview_path(**expected) @@ -25887,11 +33513,11 @@ def test_parse_preview_path(): def test_resource_path(): - project = "oyster" - location = "nudibranch" - deployment = "cuttlefish" - revision = "mussel" - resource = "winkle" + project = "cuttlefish" + location = "mussel" + deployment = "winkle" + revision = "nautilus" + resource = "scallop" expected = "projects/{project}/locations/{location}/deployments/{deployment}/revisions/{revision}/resources/{resource}".format( project=project, location=location, @@ -25907,11 +33533,11 @@ def test_resource_path(): def test_parse_resource_path(): expected = { - "project": "nautilus", - "location": "scallop", - "deployment": "abalone", - "revision": "squid", - "resource": "clam", + "project": "abalone", + "location": "squid", + "deployment": "clam", + "revision": "whelk", + "resource": "octopus", } path = ConfigClient.resource_path(**expected) @@ -25921,10 +33547,10 @@ def test_parse_resource_path(): def test_resource_change_path(): - project = "whelk" - location = "octopus" - preview = "oyster" - resource_change = "nudibranch" + project = "oyster" + location = "nudibranch" + preview = "cuttlefish" + resource_change = "mussel" expected = "projects/{project}/locations/{location}/previews/{preview}/resourceChanges/{resource_change}".format( project=project, location=location, @@ -25939,10 +33565,10 @@ def test_resource_change_path(): def test_parse_resource_change_path(): expected = { - "project": "cuttlefish", - "location": "mussel", - "preview": "winkle", - "resource_change": "nautilus", + "project": "winkle", + "location": "nautilus", + "preview": "scallop", + "resource_change": "abalone", } path = ConfigClient.resource_change_path(**expected) @@ -25952,10 +33578,10 @@ def test_parse_resource_change_path(): def test_resource_drift_path(): - project = "scallop" - location = "abalone" - preview = "squid" - resource_drift = "clam" + project = "squid" + location = "clam" + preview = "whelk" + resource_drift = "octopus" expected = "projects/{project}/locations/{location}/previews/{preview}/resourceDrifts/{resource_drift}".format( project=project, location=location, @@ -25970,10 +33596,10 @@ def test_resource_drift_path(): def test_parse_resource_drift_path(): expected = { - "project": "whelk", - "location": "octopus", - "preview": "oyster", - "resource_drift": "nudibranch", + "project": "oyster", + "location": "nudibranch", + "preview": "cuttlefish", + "resource_drift": "mussel", } path = ConfigClient.resource_drift_path(**expected) @@ -25983,10 +33609,10 @@ def test_parse_resource_drift_path(): def test_revision_path(): - project = "cuttlefish" - location = "mussel" - deployment = "winkle" - revision = "nautilus" + project = "winkle" + location = "nautilus" + deployment = "scallop" + revision = "abalone" expected = "projects/{project}/locations/{location}/deployments/{deployment}/revisions/{revision}".format( project=project, location=location, @@ -25999,10 +33625,10 @@ def test_revision_path(): def test_parse_revision_path(): expected = { - "project": "scallop", - "location": "abalone", - "deployment": "squid", - "revision": "clam", + "project": "squid", + "location": "clam", + "deployment": "whelk", + "revision": "octopus", } path = ConfigClient.revision_path(**expected) @@ -26012,8 +33638,8 @@ def test_parse_revision_path(): def test_service_account_path(): - project = "whelk" - service_account = "octopus" + project = "oyster" + service_account = "nudibranch" expected = "projects/{project}/serviceAccounts/{service_account}".format( project=project, service_account=service_account, @@ -26024,8 +33650,8 @@ def test_service_account_path(): def test_parse_service_account_path(): expected = { - "project": "oyster", - "service_account": "nudibranch", + "project": "cuttlefish", + "service_account": "mussel", } path = ConfigClient.service_account_path(**expected) @@ -26035,9 +33661,9 @@ def test_parse_service_account_path(): def test_terraform_version_path(): - project = "cuttlefish" - location = "mussel" - terraform_version = "winkle" + project = "winkle" + location = "nautilus" + terraform_version = "scallop" expected = "projects/{project}/locations/{location}/terraformVersions/{terraform_version}".format( project=project, location=location, @@ -26049,9 +33675,9 @@ def test_terraform_version_path(): def test_parse_terraform_version_path(): expected = { - "project": "nautilus", - "location": "scallop", - "terraform_version": "abalone", + "project": "abalone", + "location": "squid", + "terraform_version": "clam", } path = ConfigClient.terraform_version_path(**expected) @@ -26061,9 +33687,9 @@ def test_parse_terraform_version_path(): def test_worker_pool_path(): - project = "squid" - location = "clam" - worker_pool = "whelk" + project = "whelk" + location = "octopus" + worker_pool = "oyster" expected = ( "projects/{project}/locations/{location}/workerPools/{worker_pool}".format( project=project, @@ -26077,9 +33703,9 @@ def test_worker_pool_path(): def test_parse_worker_pool_path(): expected = { - "project": "octopus", - "location": "oyster", - "worker_pool": "nudibranch", + "project": "nudibranch", + "location": "cuttlefish", + "worker_pool": "mussel", } path = ConfigClient.worker_pool_path(**expected) @@ -26089,7 +33715,7 @@ def test_parse_worker_pool_path(): def test_common_billing_account_path(): - billing_account = "cuttlefish" + billing_account = "winkle" expected = "billingAccounts/{billing_account}".format( billing_account=billing_account, ) @@ -26099,7 +33725,7 @@ def test_common_billing_account_path(): def test_parse_common_billing_account_path(): expected = { - "billing_account": "mussel", + "billing_account": "nautilus", } path = ConfigClient.common_billing_account_path(**expected) @@ -26109,7 +33735,7 @@ def test_parse_common_billing_account_path(): def test_common_folder_path(): - folder = "winkle" + folder = "scallop" expected = "folders/{folder}".format( folder=folder, ) @@ -26119,7 +33745,7 @@ def test_common_folder_path(): def test_parse_common_folder_path(): expected = { - "folder": "nautilus", + "folder": "abalone", } path = ConfigClient.common_folder_path(**expected) @@ -26129,7 +33755,7 @@ def test_parse_common_folder_path(): def test_common_organization_path(): - organization = "scallop" + organization = "squid" expected = "organizations/{organization}".format( organization=organization, ) @@ -26139,7 +33765,7 @@ def test_common_organization_path(): def test_parse_common_organization_path(): expected = { - "organization": "abalone", + "organization": "clam", } path = ConfigClient.common_organization_path(**expected) @@ -26149,7 +33775,7 @@ def test_parse_common_organization_path(): def test_common_project_path(): - project = "squid" + project = "whelk" expected = "projects/{project}".format( project=project, ) @@ -26159,7 +33785,7 @@ def test_common_project_path(): def test_parse_common_project_path(): expected = { - "project": "clam", + "project": "octopus", } path = ConfigClient.common_project_path(**expected) @@ -26169,8 +33795,8 @@ def test_parse_common_project_path(): def test_common_location_path(): - project = "whelk" - location = "octopus" + project = "oyster" + location = "nudibranch" expected = "projects/{project}/locations/{location}".format( project=project, location=location, @@ -26181,8 +33807,8 @@ def test_common_location_path(): def test_parse_common_location_path(): expected = { - "project": "oyster", - "location": "nudibranch", + "project": "cuttlefish", + "location": "mussel", } path = ConfigClient.common_location_path(**expected) diff --git a/packages/google-cloud-databasecenter/google/cloud/databasecenter/__init__.py b/packages/google-cloud-databasecenter/google/cloud/databasecenter/__init__.py index 777803fefd94..71101fb89b0c 100644 --- a/packages/google-cloud-databasecenter/google/cloud/databasecenter/__init__.py +++ b/packages/google-cloud-databasecenter/google/cloud/databasecenter/__init__.py @@ -24,12 +24,16 @@ from google.cloud.databasecenter_v1beta.services.database_center.client import ( DatabaseCenterClient, ) +from google.cloud.databasecenter_v1beta.types.affiliation import Affiliation from google.cloud.databasecenter_v1beta.types.machine_config import MachineConfig from google.cloud.databasecenter_v1beta.types.maintenance import ( MaintenanceInfo, + MaintenanceState, Phase, + PossibleFailureReason, ResourceMaintenanceDenySchedule, ResourceMaintenanceSchedule, + UpcomingMaintenance, ) from google.cloud.databasecenter_v1beta.types.metric_data import ( MetricData, @@ -102,11 +106,15 @@ __all__ = ( "DatabaseCenterClient", "DatabaseCenterAsyncClient", + "Affiliation", "MachineConfig", "MaintenanceInfo", "ResourceMaintenanceDenySchedule", "ResourceMaintenanceSchedule", + "UpcomingMaintenance", + "MaintenanceState", "Phase", + "PossibleFailureReason", "MetricData", "Metrics", "TypedValue", diff --git a/packages/google-cloud-databasecenter/google/cloud/databasecenter_v1beta/__init__.py b/packages/google-cloud-databasecenter/google/cloud/databasecenter_v1beta/__init__.py index 5c82994384d0..96d9ee7cf2b2 100644 --- a/packages/google-cloud-databasecenter/google/cloud/databasecenter_v1beta/__init__.py +++ b/packages/google-cloud-databasecenter/google/cloud/databasecenter_v1beta/__init__.py @@ -30,12 +30,16 @@ from .services.database_center import DatabaseCenterAsyncClient, DatabaseCenterClient +from .types.affiliation import Affiliation from .types.machine_config import MachineConfig from .types.maintenance import ( MaintenanceInfo, + MaintenanceState, Phase, + PossibleFailureReason, ResourceMaintenanceDenySchedule, ResourceMaintenanceSchedule, + UpcomingMaintenance, ) from .types.metric_data import MetricData, Metrics, TypedValue from .types.operation_error_type import OperationErrorType @@ -192,6 +196,7 @@ def _get_version(dependency_name): __all__ = ( "DatabaseCenterAsyncClient", "AdditionalDetail", + "Affiliation", "AggregateFleetRequest", "AggregateFleetResponse", "AggregateFleetRow", @@ -218,12 +223,14 @@ def _get_version(dependency_name): "MachineConfig", "MaintenanceInfo", "MaintenanceRecommendationInfo", + "MaintenanceState", "ManagementType", "MetricData", "Metrics", "OperationErrorType", "OutdatedMinorVersionInfo", "Phase", + "PossibleFailureReason", "Product", "ProductType", "QueryDatabaseResourceGroupsRequest", @@ -254,4 +261,5 @@ def _get_version(dependency_name): "SuspensionReason", "Tag", "TypedValue", + "UpcomingMaintenance", ) diff --git a/packages/google-cloud-databasecenter/google/cloud/databasecenter_v1beta/types/__init__.py b/packages/google-cloud-databasecenter/google/cloud/databasecenter_v1beta/types/__init__.py index 366f0e7d679b..c469d89a86ad 100644 --- a/packages/google-cloud-databasecenter/google/cloud/databasecenter_v1beta/types/__init__.py +++ b/packages/google-cloud-databasecenter/google/cloud/databasecenter_v1beta/types/__init__.py @@ -13,14 +13,20 @@ # See the License for the specific language governing permissions and # limitations under the License. # +from .affiliation import ( + Affiliation, +) from .machine_config import ( MachineConfig, ) from .maintenance import ( MaintenanceInfo, + MaintenanceState, Phase, + PossibleFailureReason, ResourceMaintenanceDenySchedule, ResourceMaintenanceSchedule, + UpcomingMaintenance, ) from .metric_data import ( MetricData, @@ -93,11 +99,15 @@ ) __all__ = ( + "Affiliation", "MachineConfig", "MaintenanceInfo", "ResourceMaintenanceDenySchedule", "ResourceMaintenanceSchedule", + "UpcomingMaintenance", + "MaintenanceState", "Phase", + "PossibleFailureReason", "MetricData", "Metrics", "TypedValue", diff --git a/packages/google-cloud-databasecenter/google/cloud/databasecenter_v1beta/types/affiliation.py b/packages/google-cloud-databasecenter/google/cloud/databasecenter_v1beta/types/affiliation.py new file mode 100644 index 000000000000..77b48b9e66b3 --- /dev/null +++ b/packages/google-cloud-databasecenter/google/cloud/databasecenter_v1beta/types/affiliation.py @@ -0,0 +1,125 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +__protobuf__ = proto.module( + package="google.cloud.databasecenter.v1beta", + manifest={ + "Affiliation", + }, +) + + +class Affiliation(proto.Message): + r"""Affiliation information of a resource + + Attributes: + resource_id (str): + Optional. resource id of affiliated resource + full_resource_name (str): + Optional. Full resource name + lineages (MutableSequence[google.cloud.databasecenter_v1beta.types.Affiliation.Lineage]): + Optional. Multiple lineages can be created + from a resource. For example, a resource can be + replicated to multiple target resources. In this + case, there will be multiple lineages for the + resource, one for each target resource. + """ + + class ProcessType(proto.Enum): + r"""Type of process which created the lineage. + + Values: + PROCESS_TYPE_UNSPECIFIED (0): + Unspecified process type. + COMPOSER (1): + Composer process type. + DATASTREAM (2): + Datastream process type. + DATAFLOW (3): + Dataflow process type. + BIGQUERY (4): + Bigquery process type. + DATA_FUSION (5): + Data fusion process type. + DATAPROC (6): + Dataproc process type. + """ + + PROCESS_TYPE_UNSPECIFIED = 0 + COMPOSER = 1 + DATASTREAM = 2 + DATAFLOW = 3 + BIGQUERY = 4 + DATA_FUSION = 5 + DATAPROC = 6 + + class Lineage(proto.Message): + r"""lineage information of the affiliated resources + This captures source, target and process which created the + lineage. + + Attributes: + source_fqn (str): + Optional. FQN of source table / column + target_fqn (str): + Optional. FQN of target table / column + process_fqn (str): + Optional. FQN of process which created the + lineage i.e. dataplex, datastream etc. + process_type (google.cloud.databasecenter_v1beta.types.Affiliation.ProcessType): + Optional. Type of process which created the + lineage. + """ + + source_fqn: str = proto.Field( + proto.STRING, + number=1, + ) + target_fqn: str = proto.Field( + proto.STRING, + number=2, + ) + process_fqn: str = proto.Field( + proto.STRING, + number=3, + ) + process_type: "Affiliation.ProcessType" = proto.Field( + proto.ENUM, + number=4, + enum="Affiliation.ProcessType", + ) + + resource_id: str = proto.Field( + proto.STRING, + number=1, + ) + full_resource_name: str = proto.Field( + proto.STRING, + number=2, + ) + lineages: MutableSequence[Lineage] = proto.RepeatedField( + proto.MESSAGE, + number=3, + message=Lineage, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-databasecenter/google/cloud/databasecenter_v1beta/types/machine_config.py b/packages/google-cloud-databasecenter/google/cloud/databasecenter_v1beta/types/machine_config.py index 95d79852ce8c..839b52cad222 100644 --- a/packages/google-cloud-databasecenter/google/cloud/databasecenter_v1beta/types/machine_config.py +++ b/packages/google-cloud-databasecenter/google/cloud/databasecenter_v1beta/types/machine_config.py @@ -47,6 +47,17 @@ class MachineConfig(proto.Message): applicable). This field is a member of `oneof`_ ``_vcpu_count``. + baseline_slot_count (int): + Optional. Baseline slots for BigQuery + Reservations. Baseline slots are in increments + of 50. + + This field is a member of `oneof`_ ``_baseline_slot_count``. + max_reservation_slot_count (int): + Optional. Max slots for BigQuery + Reservations. Max slots are in increments of 50. + + This field is a member of `oneof`_ ``_max_reservation_slot_count``. """ memory_size_bytes: int = proto.Field( @@ -63,6 +74,16 @@ class MachineConfig(proto.Message): number=4, optional=True, ) + baseline_slot_count: int = proto.Field( + proto.INT64, + number=5, + optional=True, + ) + max_reservation_slot_count: int = proto.Field( + proto.INT64, + number=6, + optional=True, + ) __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-databasecenter/google/cloud/databasecenter_v1beta/types/maintenance.py b/packages/google-cloud-databasecenter/google/cloud/databasecenter_v1beta/types/maintenance.py index 55df658ebef4..e223ed79c201 100644 --- a/packages/google-cloud-databasecenter/google/cloud/databasecenter_v1beta/types/maintenance.py +++ b/packages/google-cloud-databasecenter/google/cloud/databasecenter_v1beta/types/maintenance.py @@ -17,6 +17,7 @@ from typing import MutableMapping, MutableSequence +import google.protobuf.timestamp_pb2 as timestamp_pb2 # type: ignore import google.type.date_pb2 as date_pb2 # type: ignore import google.type.dayofweek_pb2 as dayofweek_pb2 # type: ignore import google.type.timeofday_pb2 as timeofday_pb2 # type: ignore @@ -26,8 +27,11 @@ package="google.cloud.databasecenter.v1beta", manifest={ "Phase", + "MaintenanceState", + "PossibleFailureReason", "ResourceMaintenanceSchedule", "ResourceMaintenanceDenySchedule", + "UpcomingMaintenance", "MaintenanceInfo", }, ) @@ -60,6 +64,50 @@ class Phase(proto.Enum): PHASE_ANY = 4 +class MaintenanceState(proto.Enum): + r"""Resource maintenance state. + + Values: + MAINTENANCE_STATE_UNSPECIFIED (0): + Status is unspecified. + MAINTENANCE_STATE_SCHEDULED (1): + Maintenance is scheduled. + MAINTENANCE_STATE_IN_PROGRESS (2): + Maintenance is in progress. + MAINTENANCE_STATE_COMPLETED (3): + Maintenance is completed. + MAINTENANCE_STATE_FAILED (4): + Maintenance has failed. + """ + + MAINTENANCE_STATE_UNSPECIFIED = 0 + MAINTENANCE_STATE_SCHEDULED = 1 + MAINTENANCE_STATE_IN_PROGRESS = 2 + MAINTENANCE_STATE_COMPLETED = 3 + MAINTENANCE_STATE_FAILED = 4 + + +class PossibleFailureReason(proto.Enum): + r"""Possible reasons why the maintenance is not completed. STATE_FAILED + maintenance state may not always have a failure reason. + + Values: + POSSIBLE_FAILURE_REASON_UNSPECIFIED (0): + Failure reason is unspecified. + POSSIBLE_FAILURE_REASON_DENY_POLICY_CONFLICT (1): + Maintenance may not be completed because + there is a deny policy overlapping with upcoming + maintenance schedule. + POSSIBLE_FAILURE_REASON_INSTANCE_IN_STOPPED_STATE (2): + Maintenance may not be completed because the + instance is stopped. + """ + + POSSIBLE_FAILURE_REASON_UNSPECIFIED = 0 + POSSIBLE_FAILURE_REASON_DENY_POLICY_CONFLICT = 1 + POSSIBLE_FAILURE_REASON_INSTANCE_IN_STOPPED_STATE = 2 + + class ResourceMaintenanceSchedule(proto.Message): r"""Maintenance window for the database resource. It specifies preferred time and day of the week and phase in some cases, when @@ -133,6 +181,32 @@ class ResourceMaintenanceDenySchedule(proto.Message): ) +class UpcomingMaintenance(proto.Message): + r"""Upcoming maintenance window for the database resource. + + Attributes: + start_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. Start time of the upcoming + maintenance. Start time is always populated when + an upcoming maintenance is scheduled. + end_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. End time of the upcoming + maintenance. This is only populated for an + engine, if end time is public for the engine. + """ + + start_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=1, + message=timestamp_pb2.Timestamp, + ) + end_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=2, + message=timestamp_pb2.Timestamp, + ) + + class MaintenanceInfo(proto.Message): r"""MaintenanceInfo to capture the maintenance details of database resource. @@ -147,6 +221,34 @@ class MaintenanceInfo(proto.Message): maintenance_version (str): Output only. Current Maintenance version of the database resource. Example: "MYSQL_8_0_41.R20250531.01_15". + current_version_release_date (google.type.date_pb2.Date): + Output only. The date when the maintenance + version was released. + upcoming_maintenance (google.cloud.databasecenter_v1beta.types.UpcomingMaintenance): + Output only. Upcoming maintenance window for the database + resource. This is only populated for an engine, if upcoming + maintenance is scheduled for the resource. This schedule is + generated per engine and engine version, and there is only + one upcoming maintenance window at any given time. In case + of upcoming maintenance, the maintenance_state will be set + to SCHEDULED first, and then IN_PROGRESS when the + maintenance window starts. + state (google.cloud.databasecenter_v1beta.types.MaintenanceState): + Output only. Resource maintenance state. This + is to capture the current state of the + maintenance. + possible_failure_reasons (MutableSequence[google.cloud.databasecenter_v1beta.types.PossibleFailureReason]): + Output only. List of possible reasons why the + maintenance is not completed. This is an + optional field and is only populated if there + are any reasons for failures recorded for the + maintenance by DB Center. FAILURE maintenance + status may not always have a failure reason. + previous_maintenance_version (str): + Output only. Previous maintenance version of the database + resource. Example: "MYSQL_8_0_41.R20250531.01_15". This is + available once a minor version maintenance is complete on a + database resource. """ maintenance_schedule: "ResourceMaintenanceSchedule" = proto.Field( @@ -165,6 +267,32 @@ class MaintenanceInfo(proto.Message): proto.STRING, number=3, ) + current_version_release_date: date_pb2.Date = proto.Field( + proto.MESSAGE, + number=4, + message=date_pb2.Date, + ) + upcoming_maintenance: "UpcomingMaintenance" = proto.Field( + proto.MESSAGE, + number=5, + message="UpcomingMaintenance", + ) + state: "MaintenanceState" = proto.Field( + proto.ENUM, + number=6, + enum="MaintenanceState", + ) + possible_failure_reasons: MutableSequence["PossibleFailureReason"] = ( + proto.RepeatedField( + proto.ENUM, + number=7, + enum="PossibleFailureReason", + ) + ) + previous_maintenance_version: str = proto.Field( + proto.STRING, + number=8, + ) __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-databasecenter/google/cloud/databasecenter_v1beta/types/service.py b/packages/google-cloud-databasecenter/google/cloud/databasecenter_v1beta/types/service.py index 97f495751161..b583e538ed69 100644 --- a/packages/google-cloud-databasecenter/google/cloud/databasecenter_v1beta/types/service.py +++ b/packages/google-cloud-databasecenter/google/cloud/databasecenter_v1beta/types/service.py @@ -20,19 +20,24 @@ import google.type.date_pb2 as date_pb2 # type: ignore import proto # type: ignore +from google.cloud.databasecenter_v1beta.types import ( + affiliation, + maintenance, + metric_data, + signals, +) from google.cloud.databasecenter_v1beta.types import ( machine_config as gcd_machine_config, ) -from google.cloud.databasecenter_v1beta.types import maintenance, metric_data, signals from google.cloud.databasecenter_v1beta.types import product as gcd_product __protobuf__ = proto.module( package="google.cloud.databasecenter.v1beta", manifest={ - "ResourceCategory", "Edition", "SubResourceType", "ManagementType", + "ResourceCategory", "QueryProductsRequest", "QueryProductsResponse", "QueryDatabaseResourceGroupsRequest", @@ -60,27 +65,6 @@ ) -class ResourceCategory(proto.Enum): - r"""The enum value corresponds to 'type' suffix in the resource_type - field. - - Values: - RESOURCE_CATEGORY_UNSPECIFIED (0): - Unspecified. - INSTANCE (1): - A resource that is an Instance. - CLUSTER (2): - A resource that is a Cluster. - DATABASE (3): - A resource that is a Database. - """ - - RESOURCE_CATEGORY_UNSPECIFIED = 0 - INSTANCE = 1 - CLUSTER = 2 - DATABASE = 3 - - class Edition(proto.Enum): r"""Represents the edition of a database resource. @@ -154,6 +138,33 @@ class ManagementType(proto.Enum): MANAGEMENT_TYPE_SELF_MANAGED = 2 +class ResourceCategory(proto.Enum): + r"""The enum value corresponds to 'type' suffix in the resource_type + field. + + Values: + RESOURCE_CATEGORY_UNSPECIFIED (0): + Unspecified. + INSTANCE (1): + A resource that is an Instance. + CLUSTER (2): + A resource that is a Cluster. + DATABASE (3): + A resource that is a Database. + DATASET (4): + A resource that is a Dataset. + RESERVATION (5): + A resource that is a Reservation. + """ + + RESOURCE_CATEGORY_UNSPECIFIED = 0 + INSTANCE = 1 + CLUSTER = 2 + DATABASE = 3 + DATASET = 4 + RESERVATION = 5 + + class QueryProductsRequest(proto.Message): r"""QueryProductsRequest is the request to get a list of products. @@ -533,6 +544,9 @@ class DatabaseResource(proto.Message): maintenance_info (google.cloud.databasecenter_v1beta.types.MaintenanceInfo): Optional. The maintenance information of the resource. + affiliations (MutableSequence[google.cloud.databasecenter_v1beta.types.Affiliation]): + Optional. Affiliation details of the + resource. """ child_resources: MutableSequence["DatabaseResource"] = proto.RepeatedField( @@ -615,6 +629,11 @@ class DatabaseResource(proto.Message): number=19, message=maintenance.MaintenanceInfo, ) + affiliations: MutableSequence[affiliation.Affiliation] = proto.RepeatedField( + proto.MESSAGE, + number=20, + message=affiliation.Affiliation, + ) class AggregateIssueStatsRequest(proto.Message): diff --git a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/types/data_documentation.py b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/types/data_documentation.py index d89cd19d1d5a..c8f561949215 100644 --- a/packages/google-cloud-dataplex/google/cloud/dataplex_v1/types/data_documentation.py +++ b/packages/google-cloud-dataplex/google/cloud/dataplex_v1/types/data_documentation.py @@ -82,17 +82,58 @@ class GenerationScope(proto.Enum): class DataDocumentationResult(proto.Message): r"""The output of a DataDocumentation scan. + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields Attributes: + dataset_result (google.cloud.dataplex_v1.types.DataDocumentationResult.DatasetResult): + Output only. Insights for a Dataset resource. + + This field is a member of `oneof`_ ``result``. table_result (google.cloud.dataplex_v1.types.DataDocumentationResult.TableResult): - Output only. Table result for insights. + Output only. Insights for a Table resource. This field is a member of `oneof`_ ``result``. """ + class DatasetResult(proto.Message): + r"""Insights for a dataset resource. + + Attributes: + overview (str): + Output only. Generated Dataset description. + schema_relationships (MutableSequence[google.cloud.dataplex_v1.types.DataDocumentationResult.SchemaRelationship]): + Output only. Relationships suggesting how + tables in the dataset are related to each other, + based on their schema. + queries (MutableSequence[google.cloud.dataplex_v1.types.DataDocumentationResult.Query]): + Output only. Sample SQL queries for the + dataset. + """ + + overview: str = proto.Field( + proto.STRING, + number=1, + ) + schema_relationships: MutableSequence[ + "DataDocumentationResult.SchemaRelationship" + ] = proto.RepeatedField( + proto.MESSAGE, + number=3, + message="DataDocumentationResult.SchemaRelationship", + ) + queries: MutableSequence["DataDocumentationResult.Query"] = proto.RepeatedField( + proto.MESSAGE, + number=4, + message="DataDocumentationResult.Query", + ) + class TableResult(proto.Message): - r"""Generated metadata about the table. + r"""Insights for a table resource. Attributes: name (str): @@ -129,6 +170,121 @@ class TableResult(proto.Message): message="DataDocumentationResult.Query", ) + class SchemaRelationship(proto.Message): + r"""Details of the relationship between the schema of two + resources. + + Attributes: + left_schema_paths (google.cloud.dataplex_v1.types.DataDocumentationResult.SchemaRelationship.SchemaPaths): + Output only. An ordered list of fields for the join from the + first table. The size of this list must be the same as + ``right_schema_paths``. Each field at index i in this list + must correspond to a field at the same index in the + ``right_schema_paths`` list. + right_schema_paths (google.cloud.dataplex_v1.types.DataDocumentationResult.SchemaRelationship.SchemaPaths): + Output only. An ordered list of fields for the join from the + second table. The size of this list must be the same as + ``left_schema_paths``. Each field at index i in this list + must correspond to a field at the same index in the + ``left_schema_paths`` list. + sources (MutableSequence[google.cloud.dataplex_v1.types.DataDocumentationResult.SchemaRelationship.Source]): + Output only. Sources which generated the + schema relation edge. + type_ (google.cloud.dataplex_v1.types.DataDocumentationResult.SchemaRelationship.Type): + Output only. The type of relationship between + the schema paths. + """ + + class Source(proto.Enum): + r"""Source which generated the schema relation edge. + + Values: + SOURCE_UNSPECIFIED (0): + The source of the schema relationship is + unspecified. + AGENT (4): + The source of the schema relationship is + agent. + QUERY_HISTORY (5): + The source of the schema relationship is + query history from the source system. + TABLE_CONSTRAINTS (6): + The source of the schema relationship is + table constraints added in the source system. + """ + + SOURCE_UNSPECIFIED = 0 + AGENT = 4 + QUERY_HISTORY = 5 + TABLE_CONSTRAINTS = 6 + + class Type(proto.Enum): + r"""The type of relationship. + + Values: + TYPE_UNSPECIFIED (0): + The type of the schema relationship is + unspecified. + SCHEMA_JOIN (1): + Indicates a join relationship between the + schema fields. + """ + + TYPE_UNSPECIFIED = 0 + SCHEMA_JOIN = 1 + + class SchemaPaths(proto.Message): + r"""Represents an ordered set of paths within a table's schema. + + Attributes: + table_fqn (str): + Output only. The service-qualified full resource name of the + table Ex: + //bigquery.googleapis.com/projects/PROJECT_ID/datasets/DATASET_ID/tables/TABLE_ID + paths (MutableSequence[str]): + Output only. An ordered set of Paths to fields within the + schema of the table. For fields nested within a top level + field of type record, use '.' to separate field names. + Examples: Top level field - ``top_level`` Nested field - + ``top_level.child.sub_field`` + """ + + table_fqn: str = proto.Field( + proto.STRING, + number=1, + ) + paths: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=2, + ) + + left_schema_paths: "DataDocumentationResult.SchemaRelationship.SchemaPaths" = ( + proto.Field( + proto.MESSAGE, + number=1, + message="DataDocumentationResult.SchemaRelationship.SchemaPaths", + ) + ) + right_schema_paths: "DataDocumentationResult.SchemaRelationship.SchemaPaths" = ( + proto.Field( + proto.MESSAGE, + number=2, + message="DataDocumentationResult.SchemaRelationship.SchemaPaths", + ) + ) + sources: MutableSequence[ + "DataDocumentationResult.SchemaRelationship.Source" + ] = proto.RepeatedField( + proto.ENUM, + number=4, + enum="DataDocumentationResult.SchemaRelationship.Source", + ) + type_: "DataDocumentationResult.SchemaRelationship.Type" = proto.Field( + proto.ENUM, + number=6, + enum="DataDocumentationResult.SchemaRelationship.Type", + ) + class Query(proto.Message): r"""A sample SQL query in data documentation. @@ -190,6 +346,12 @@ class Field(proto.Message): message="DataDocumentationResult.Field", ) + dataset_result: DatasetResult = proto.Field( + proto.MESSAGE, + number=7, + oneof="result", + message=DatasetResult, + ) table_result: TableResult = proto.Field( proto.MESSAGE, number=8, diff --git a/packages/google-cloud-dataplex/tests/unit/gapic/dataplex_v1/test_data_scan_service.py b/packages/google-cloud-dataplex/tests/unit/gapic/dataplex_v1/test_data_scan_service.py index 08043feff272..b2e798b9c94e 100644 --- a/packages/google-cloud-dataplex/tests/unit/gapic/dataplex_v1/test_data_scan_service.py +++ b/packages/google-cloud-dataplex/tests/unit/gapic/dataplex_v1/test_data_scan_service.py @@ -7499,6 +7499,21 @@ def test_create_data_scan_rest_call_success(request_type): }, }, "data_documentation_result": { + "dataset_result": { + "overview": "overview_value", + "schema_relationships": [ + { + "left_schema_paths": { + "table_fqn": "table_fqn_value", + "paths": ["paths_value1", "paths_value2"], + }, + "right_schema_paths": {}, + "sources": [4], + "type_": 1, + } + ], + "queries": [{"sql": "sql_value", "description": "description_value"}], + }, "table_result": { "name": "name_value", "overview": "overview_value", @@ -7511,8 +7526,8 @@ def test_create_data_scan_rest_call_success(request_type): } ] }, - "queries": [{"sql": "sql_value", "description": "description_value"}], - } + "queries": {}, + }, }, } # The version of a generated dependency at test runtime may differ from the version used during generation. @@ -7955,6 +7970,21 @@ def test_update_data_scan_rest_call_success(request_type): }, }, "data_documentation_result": { + "dataset_result": { + "overview": "overview_value", + "schema_relationships": [ + { + "left_schema_paths": { + "table_fqn": "table_fqn_value", + "paths": ["paths_value1", "paths_value2"], + }, + "right_schema_paths": {}, + "sources": [4], + "type_": 1, + } + ], + "queries": [{"sql": "sql_value", "description": "description_value"}], + }, "table_result": { "name": "name_value", "overview": "overview_value", @@ -7967,8 +7997,8 @@ def test_update_data_scan_rest_call_success(request_type): } ] }, - "queries": [{"sql": "sql_value", "description": "description_value"}], - } + "queries": {}, + }, }, } # The version of a generated dependency at test runtime may differ from the version used during generation. diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/types/search_service.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/types/search_service.py index 72961420aeb8..b9b874ad2439 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/types/search_service.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1alpha/types/search_service.py @@ -1465,11 +1465,17 @@ class Condition(proto.Enum): Disables Search As You Type. ENABLED (2): Enables Search As You Type. + AUTO (3): + Automatic switching between + search-as-you-type and standard search modes, + ideal for single-API implementations (e.g., + debouncing). """ CONDITION_UNSPECIFIED = 0 DISABLED = 1 ENABLED = 2 + AUTO = 3 condition: "SearchRequest.SearchAsYouTypeSpec.Condition" = proto.Field( proto.ENUM, diff --git a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/search_service.py b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/search_service.py index 2d227389ca27..6a492e88819f 100644 --- a/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/search_service.py +++ b/packages/google-cloud-discoveryengine/google/cloud/discoveryengine_v1beta/types/search_service.py @@ -1513,11 +1513,17 @@ class Condition(proto.Enum): Disables Search As You Type. ENABLED (2): Enables Search As You Type. + AUTO (3): + Automatic switching between + search-as-you-type and standard search modes, + ideal for single-API implementations (e.g., + debouncing). """ CONDITION_UNSPECIFIED = 0 DISABLED = 1 ENABLED = 2 + AUTO = 3 condition: "SearchRequest.SearchAsYouTypeSpec.Condition" = proto.Field( proto.ENUM, diff --git a/packages/google-cloud-iam/docs/iam_v3beta/access_policies.rst b/packages/google-cloud-iam/docs/iam_v3beta/access_policies.rst new file mode 100644 index 000000000000..397aa826896f --- /dev/null +++ b/packages/google-cloud-iam/docs/iam_v3beta/access_policies.rst @@ -0,0 +1,10 @@ +AccessPolicies +-------------------------------- + +.. automodule:: google.cloud.iam_v3beta.services.access_policies + :members: + :inherited-members: + +.. automodule:: google.cloud.iam_v3beta.services.access_policies.pagers + :members: + :inherited-members: diff --git a/packages/google-cloud-iam/docs/iam_v3beta/services_.rst b/packages/google-cloud-iam/docs/iam_v3beta/services_.rst index 8502bf6a63c0..3320e83c92e9 100644 --- a/packages/google-cloud-iam/docs/iam_v3beta/services_.rst +++ b/packages/google-cloud-iam/docs/iam_v3beta/services_.rst @@ -3,5 +3,6 @@ Services for Google Cloud Iam v3beta API .. toctree:: :maxdepth: 2 + access_policies policy_bindings principal_access_boundary_policies diff --git a/packages/google-cloud-iam/google/cloud/iam_v3beta/__init__.py b/packages/google-cloud-iam/google/cloud/iam_v3beta/__init__.py index 20cbd036ab53..bf695137c8d3 100644 --- a/packages/google-cloud-iam/google/cloud/iam_v3beta/__init__.py +++ b/packages/google-cloud-iam/google/cloud/iam_v3beta/__init__.py @@ -29,11 +29,27 @@ import importlib_metadata as metadata +from .services.access_policies import AccessPoliciesAsyncClient, AccessPoliciesClient from .services.policy_bindings import PolicyBindingsAsyncClient, PolicyBindingsClient from .services.principal_access_boundary_policies import ( PrincipalAccessBoundaryPoliciesAsyncClient, PrincipalAccessBoundaryPoliciesClient, ) +from .types.access_policies_service import ( + CreateAccessPolicyRequest, + DeleteAccessPolicyRequest, + GetAccessPolicyRequest, + ListAccessPoliciesRequest, + ListAccessPoliciesResponse, + SearchAccessPolicyBindingsRequest, + SearchAccessPolicyBindingsResponse, + UpdateAccessPolicyRequest, +) +from .types.access_policy_resources import ( + AccessPolicy, + AccessPolicyDetails, + AccessPolicyRule, +) from .types.operation_metadata import OperationMetadata from .types.policy_binding_resources import PolicyBinding from .types.policy_bindings_service import ( @@ -157,14 +173,24 @@ def _get_version(dependency_name): ) __all__ = ( + "AccessPoliciesAsyncClient", "PolicyBindingsAsyncClient", "PrincipalAccessBoundaryPoliciesAsyncClient", + "AccessPoliciesClient", + "AccessPolicy", + "AccessPolicyDetails", + "AccessPolicyRule", + "CreateAccessPolicyRequest", "CreatePolicyBindingRequest", "CreatePrincipalAccessBoundaryPolicyRequest", + "DeleteAccessPolicyRequest", "DeletePolicyBindingRequest", "DeletePrincipalAccessBoundaryPolicyRequest", + "GetAccessPolicyRequest", "GetPolicyBindingRequest", "GetPrincipalAccessBoundaryPolicyRequest", + "ListAccessPoliciesRequest", + "ListAccessPoliciesResponse", "ListPolicyBindingsRequest", "ListPolicyBindingsResponse", "ListPrincipalAccessBoundaryPoliciesRequest", @@ -176,10 +202,13 @@ def _get_version(dependency_name): "PrincipalAccessBoundaryPolicy", "PrincipalAccessBoundaryPolicyDetails", "PrincipalAccessBoundaryPolicyRule", + "SearchAccessPolicyBindingsRequest", + "SearchAccessPolicyBindingsResponse", "SearchPrincipalAccessBoundaryPolicyBindingsRequest", "SearchPrincipalAccessBoundaryPolicyBindingsResponse", "SearchTargetPolicyBindingsRequest", "SearchTargetPolicyBindingsResponse", + "UpdateAccessPolicyRequest", "UpdatePolicyBindingRequest", "UpdatePrincipalAccessBoundaryPolicyRequest", ) diff --git a/packages/google-cloud-iam/google/cloud/iam_v3beta/gapic_metadata.json b/packages/google-cloud-iam/google/cloud/iam_v3beta/gapic_metadata.json index bcfb25e34324..8f9a1299b2ee 100644 --- a/packages/google-cloud-iam/google/cloud/iam_v3beta/gapic_metadata.json +++ b/packages/google-cloud-iam/google/cloud/iam_v3beta/gapic_metadata.json @@ -5,6 +5,115 @@ "protoPackage": "google.iam.v3beta", "schema": "1.0", "services": { + "AccessPolicies": { + "clients": { + "grpc": { + "libraryClient": "AccessPoliciesClient", + "rpcs": { + "CreateAccessPolicy": { + "methods": [ + "create_access_policy" + ] + }, + "DeleteAccessPolicy": { + "methods": [ + "delete_access_policy" + ] + }, + "GetAccessPolicy": { + "methods": [ + "get_access_policy" + ] + }, + "ListAccessPolicies": { + "methods": [ + "list_access_policies" + ] + }, + "SearchAccessPolicyBindings": { + "methods": [ + "search_access_policy_bindings" + ] + }, + "UpdateAccessPolicy": { + "methods": [ + "update_access_policy" + ] + } + } + }, + "grpc-async": { + "libraryClient": "AccessPoliciesAsyncClient", + "rpcs": { + "CreateAccessPolicy": { + "methods": [ + "create_access_policy" + ] + }, + "DeleteAccessPolicy": { + "methods": [ + "delete_access_policy" + ] + }, + "GetAccessPolicy": { + "methods": [ + "get_access_policy" + ] + }, + "ListAccessPolicies": { + "methods": [ + "list_access_policies" + ] + }, + "SearchAccessPolicyBindings": { + "methods": [ + "search_access_policy_bindings" + ] + }, + "UpdateAccessPolicy": { + "methods": [ + "update_access_policy" + ] + } + } + }, + "rest": { + "libraryClient": "AccessPoliciesClient", + "rpcs": { + "CreateAccessPolicy": { + "methods": [ + "create_access_policy" + ] + }, + "DeleteAccessPolicy": { + "methods": [ + "delete_access_policy" + ] + }, + "GetAccessPolicy": { + "methods": [ + "get_access_policy" + ] + }, + "ListAccessPolicies": { + "methods": [ + "list_access_policies" + ] + }, + "SearchAccessPolicyBindings": { + "methods": [ + "search_access_policy_bindings" + ] + }, + "UpdateAccessPolicy": { + "methods": [ + "update_access_policy" + ] + } + } + } + } + }, "PolicyBindings": { "clients": { "grpc": { diff --git a/packages/google-cloud-iam/google/cloud/iam_v3beta/services/access_policies/__init__.py b/packages/google-cloud-iam/google/cloud/iam_v3beta/services/access_policies/__init__.py new file mode 100644 index 000000000000..beb21687dc98 --- /dev/null +++ b/packages/google-cloud-iam/google/cloud/iam_v3beta/services/access_policies/__init__.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .async_client import AccessPoliciesAsyncClient +from .client import AccessPoliciesClient + +__all__ = ( + "AccessPoliciesClient", + "AccessPoliciesAsyncClient", +) diff --git a/packages/google-cloud-iam/google/cloud/iam_v3beta/services/access_policies/async_client.py b/packages/google-cloud-iam/google/cloud/iam_v3beta/services/access_policies/async_client.py new file mode 100644 index 000000000000..db4b42455b60 --- /dev/null +++ b/packages/google-cloud-iam/google/cloud/iam_v3beta/services/access_policies/async_client.py @@ -0,0 +1,1179 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import logging as std_logging +import re +from collections import OrderedDict +from typing import ( + Callable, + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, +) + +import google.protobuf +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry_async as retries +from google.api_core.client_options import ClientOptions +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.iam_v3beta import gapic_version as package_version + +try: + OptionalRetry = Union[retries.AsyncRetry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.AsyncRetry, object, None] # type: ignore + +import google.api_core.operation as operation # type: ignore +import google.api_core.operation_async as operation_async # type: ignore +import google.protobuf.empty_pb2 as empty_pb2 # type: ignore +import google.protobuf.timestamp_pb2 as timestamp_pb2 # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore + +from google.cloud.iam_v3beta.services.access_policies import pagers +from google.cloud.iam_v3beta.types import ( + access_policies_service, + access_policy_resources, + operation_metadata, + policy_binding_resources, +) + +from .client import AccessPoliciesClient +from .transports.base import DEFAULT_CLIENT_INFO, AccessPoliciesTransport +from .transports.grpc_asyncio import AccessPoliciesGrpcAsyncIOTransport + +try: + from google.api_core import client_logging # type: ignore + + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) + + +class AccessPoliciesAsyncClient: + """Manages Identity and Access Management (IAM) access policies.""" + + _client: AccessPoliciesClient + + # Copy defaults from the synchronous client for use here. + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. + DEFAULT_ENDPOINT = AccessPoliciesClient.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = AccessPoliciesClient.DEFAULT_MTLS_ENDPOINT + _DEFAULT_ENDPOINT_TEMPLATE = AccessPoliciesClient._DEFAULT_ENDPOINT_TEMPLATE + _DEFAULT_UNIVERSE = AccessPoliciesClient._DEFAULT_UNIVERSE + + access_policy_path = staticmethod(AccessPoliciesClient.access_policy_path) + parse_access_policy_path = staticmethod( + AccessPoliciesClient.parse_access_policy_path + ) + policy_binding_path = staticmethod(AccessPoliciesClient.policy_binding_path) + parse_policy_binding_path = staticmethod( + AccessPoliciesClient.parse_policy_binding_path + ) + common_billing_account_path = staticmethod( + AccessPoliciesClient.common_billing_account_path + ) + parse_common_billing_account_path = staticmethod( + AccessPoliciesClient.parse_common_billing_account_path + ) + common_folder_path = staticmethod(AccessPoliciesClient.common_folder_path) + parse_common_folder_path = staticmethod( + AccessPoliciesClient.parse_common_folder_path + ) + common_organization_path = staticmethod( + AccessPoliciesClient.common_organization_path + ) + parse_common_organization_path = staticmethod( + AccessPoliciesClient.parse_common_organization_path + ) + common_project_path = staticmethod(AccessPoliciesClient.common_project_path) + parse_common_project_path = staticmethod( + AccessPoliciesClient.parse_common_project_path + ) + common_location_path = staticmethod(AccessPoliciesClient.common_location_path) + parse_common_location_path = staticmethod( + AccessPoliciesClient.parse_common_location_path + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + AccessPoliciesAsyncClient: The constructed client. + """ + sa_info_func = ( + AccessPoliciesClient.from_service_account_info.__func__ # type: ignore + ) + return sa_info_func(AccessPoliciesAsyncClient, info, *args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + AccessPoliciesAsyncClient: The constructed client. + """ + sa_file_func = ( + AccessPoliciesClient.from_service_account_file.__func__ # type: ignore + ) + return sa_file_func(AccessPoliciesAsyncClient, filename, *args, **kwargs) + + from_service_account_json = from_service_account_file + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + return AccessPoliciesClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore + + @property + def transport(self) -> AccessPoliciesTransport: + """Returns the transport used by the client instance. + + Returns: + AccessPoliciesTransport: The transport used by the client instance. + """ + return self._client.transport + + @property + def api_endpoint(self) -> str: + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used by the client instance. + """ + return self._client._api_endpoint + + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used + by the client instance. + """ + return self._client._universe_domain + + get_transport_class = AccessPoliciesClient.get_transport_class + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[ + Union[str, AccessPoliciesTransport, Callable[..., AccessPoliciesTransport]] + ] = "grpc_asyncio", + client_options: Optional[ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the access policies async client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Optional[Union[str,AccessPoliciesTransport,Callable[..., AccessPoliciesTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport to use. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the AccessPoliciesTransport constructor. + If set to None, a transport is chosen automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide a client certificate for mTLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client = AccessPoliciesClient( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + std_logging.DEBUG + ): # pragma: NO COVER + _LOGGER.debug( + "Created client `google.iam_v3beta.AccessPoliciesAsyncClient`.", + extra={ + "serviceName": "google.iam.v3beta.AccessPolicies", + "universeDomain": getattr( + self._client._transport._credentials, "universe_domain", "" + ), + "credentialsType": f"{type(self._client._transport._credentials).__module__}.{type(self._client._transport._credentials).__qualname__}", + "credentialsInfo": getattr( + self.transport._credentials, "get_cred_info", lambda: None + )(), + } + if hasattr(self._client._transport, "_credentials") + else { + "serviceName": "google.iam.v3beta.AccessPolicies", + "credentialsType": None, + }, + ) + + async def create_access_policy( + self, + request: Optional[ + Union[access_policies_service.CreateAccessPolicyRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + access_policy: Optional[access_policy_resources.AccessPolicy] = None, + access_policy_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation_async.AsyncOperation: + r"""Creates an access policy, and returns a long running + operation. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import iam_v3beta + + async def sample_create_access_policy(): + # Create a client + client = iam_v3beta.AccessPoliciesAsyncClient() + + # Initialize request argument(s) + request = iam_v3beta.CreateAccessPolicyRequest( + parent="parent_value", + access_policy_id="access_policy_id_value", + ) + + # Make the request + operation = client.create_access_policy(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.iam_v3beta.types.CreateAccessPolicyRequest, dict]]): + The request object. Request message for + CreateAccessPolicy method. + parent (:class:`str`): + Required. The parent resource where this access policy + will be created. + + Format: ``projects/{project_id}/locations/{location}`` + ``projects/{project_number}/locations/{location}`` + ``folders/{folder_id}/locations/{location}`` + ``organizations/{organization_id}/locations/{location}`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + access_policy (:class:`google.cloud.iam_v3beta.types.AccessPolicy`): + Required. The access policy to + create. + + This corresponds to the ``access_policy`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + access_policy_id (:class:`str`): + Required. The ID to use for the access policy, which + will become the final component of the access policy's + resource name. + + This value must start with a lowercase letter followed + by up to 62 lowercase letters, numbers, hyphens, or + dots. Pattern, /[a-z][a-z0-9-.]{2,62}/. + + This value must be unique among all access policies with + the same parent. + + This corresponds to the ``access_policy_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.iam_v3beta.types.AccessPolicy` An + IAM access policy resource. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent, access_policy, access_policy_id] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, access_policies_service.CreateAccessPolicyRequest): + request = access_policies_service.CreateAccessPolicyRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if access_policy is not None: + request.access_policy = access_policy + if access_policy_id is not None: + request.access_policy_id = access_policy_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.create_access_policy + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + access_policy_resources.AccessPolicy, + metadata_type=operation_metadata.OperationMetadata, + ) + + # Done; return the response. + return response + + async def get_access_policy( + self, + request: Optional[ + Union[access_policies_service.GetAccessPolicyRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> access_policy_resources.AccessPolicy: + r"""Gets an access policy. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import iam_v3beta + + async def sample_get_access_policy(): + # Create a client + client = iam_v3beta.AccessPoliciesAsyncClient() + + # Initialize request argument(s) + request = iam_v3beta.GetAccessPolicyRequest( + name="name_value", + ) + + # Make the request + response = await client.get_access_policy(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.iam_v3beta.types.GetAccessPolicyRequest, dict]]): + The request object. Request message for GetAccessPolicy + method. + name (:class:`str`): + Required. The name of the access policy to retrieve. + + Format: + ``projects/{project_id}/locations/{location}/accessPolicies/{access_policy_id}`` + ``projects/{project_number}/locations/{location}/accessPolicies/{access_policy_id}`` + ``folders/{folder_id}/locations/{location}/accessPolicies/{access_policy_id}`` + ``organizations/{organization_id}/locations/{location}/accessPolicies/{access_policy_id}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.iam_v3beta.types.AccessPolicy: + An IAM access policy resource. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, access_policies_service.GetAccessPolicyRequest): + request = access_policies_service.GetAccessPolicyRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.get_access_policy + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def update_access_policy( + self, + request: Optional[ + Union[access_policies_service.UpdateAccessPolicyRequest, dict] + ] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation_async.AsyncOperation: + r"""Updates an access policy. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import iam_v3beta + + async def sample_update_access_policy(): + # Create a client + client = iam_v3beta.AccessPoliciesAsyncClient() + + # Initialize request argument(s) + request = iam_v3beta.UpdateAccessPolicyRequest( + ) + + # Make the request + operation = client.update_access_policy(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.iam_v3beta.types.UpdateAccessPolicyRequest, dict]]): + The request object. Request message for + UpdateAccessPolicy method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.iam_v3beta.types.AccessPolicy` An + IAM access policy resource. + + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, access_policies_service.UpdateAccessPolicyRequest): + request = access_policies_service.UpdateAccessPolicyRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.update_access_policy + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("access_policy.name", request.access_policy.name),) + ), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + access_policy_resources.AccessPolicy, + metadata_type=operation_metadata.OperationMetadata, + ) + + # Done; return the response. + return response + + async def delete_access_policy( + self, + request: Optional[ + Union[access_policies_service.DeleteAccessPolicyRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation_async.AsyncOperation: + r"""Deletes an access policy. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import iam_v3beta + + async def sample_delete_access_policy(): + # Create a client + client = iam_v3beta.AccessPoliciesAsyncClient() + + # Initialize request argument(s) + request = iam_v3beta.DeleteAccessPolicyRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_access_policy(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.iam_v3beta.types.DeleteAccessPolicyRequest, dict]]): + The request object. Request message for + DeleteAccessPolicy method. + name (:class:`str`): + Required. The name of the access policy to delete. + + Format: + ``projects/{project_id}/locations/{location}/accessPolicies/{access_policy_id}`` + ``projects/{project_number}/locations/{location}/accessPolicies/{access_policy_id}`` + ``folders/{folder_id}/locations/{location}/accessPolicies/{access_policy_id}`` + ``organizations/{organization_id}/locations/{location}/accessPolicies/{access_policy_id}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation_async.AsyncOperation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, access_policies_service.DeleteAccessPolicyRequest): + request = access_policies_service.DeleteAccessPolicyRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.delete_access_policy + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation_async.from_gapic( + response, + self._client._transport.operations_client, + empty_pb2.Empty, + metadata_type=operation_metadata.OperationMetadata, + ) + + # Done; return the response. + return response + + async def list_access_policies( + self, + request: Optional[ + Union[access_policies_service.ListAccessPoliciesRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListAccessPoliciesAsyncPager: + r"""Lists access policies. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import iam_v3beta + + async def sample_list_access_policies(): + # Create a client + client = iam_v3beta.AccessPoliciesAsyncClient() + + # Initialize request argument(s) + request = iam_v3beta.ListAccessPoliciesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_access_policies(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.iam_v3beta.types.ListAccessPoliciesRequest, dict]]): + The request object. Request message for + ListAccessPolicies method. + parent (:class:`str`): + Required. The parent resource, which owns the collection + of access policy resources. + + Format: ``projects/{project_id}/locations/{location}`` + ``projects/{project_number}/locations/{location}`` + ``folders/{folder_id}/locations/{location}`` + ``organizations/{organization_id}/locations/{location}`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.iam_v3beta.services.access_policies.pagers.ListAccessPoliciesAsyncPager: + Response message for + ListAccessPolicies method. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, access_policies_service.ListAccessPoliciesRequest): + request = access_policies_service.ListAccessPoliciesRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.list_access_policies + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListAccessPoliciesAsyncPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def search_access_policy_bindings( + self, + request: Optional[ + Union[access_policies_service.SearchAccessPolicyBindingsRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.SearchAccessPolicyBindingsAsyncPager: + r"""Returns all policy bindings that bind a specific + policy if a user has searchPolicyBindings permission on + that policy. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import iam_v3beta + + async def sample_search_access_policy_bindings(): + # Create a client + client = iam_v3beta.AccessPoliciesAsyncClient() + + # Initialize request argument(s) + request = iam_v3beta.SearchAccessPolicyBindingsRequest( + name="name_value", + ) + + # Make the request + page_result = client.search_access_policy_bindings(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.iam_v3beta.types.SearchAccessPolicyBindingsRequest, dict]]): + The request object. Request message for + SearchAccessPolicyBindings rpc. + name (:class:`str`): + Required. The name of the access policy. Format: + ``organizations/{organization_id}/locations/{location}/accessPolicies/{access_policy_id}`` + ``folders/{folder_id}/locations/{location}/accessPolicies/{access_policy_id}`` + ``projects/{project_id}/locations/{location}/accessPolicies/{access_policy_id}`` + ``projects/{project_number}/locations/{location}/accessPolicies/{access_policy_id}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.iam_v3beta.services.access_policies.pagers.SearchAccessPolicyBindingsAsyncPager: + Response message for + SearchAccessPolicyBindings rpc. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance( + request, access_policies_service.SearchAccessPolicyBindingsRequest + ): + request = access_policies_service.SearchAccessPolicyBindingsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.search_access_policy_bindings + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.SearchAccessPolicyBindingsAsyncPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_operation( + self, + request: Optional[Union[operations_pb2.GetOperationRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if request is None: + request_pb = operations_pb2.GetOperationRequest() + elif isinstance(request, dict): + request_pb = operations_pb2.GetOperationRequest(**request) + else: + request_pb = request + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self.transport._wrapped_methods[self._client._transport.get_operation] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request_pb.name),)), + ) + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = await rpc( + request_pb, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def __aenter__(self) -> "AccessPoliciesAsyncClient": + return self + + async def __aexit__(self, exc_type, exc, tb): + await self.transport.close() + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + +if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER + DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ + + +__all__ = ("AccessPoliciesAsyncClient",) diff --git a/packages/google-cloud-iam/google/cloud/iam_v3beta/services/access_policies/client.py b/packages/google-cloud-iam/google/cloud/iam_v3beta/services/access_policies/client.py new file mode 100644 index 000000000000..2cdde1d795c1 --- /dev/null +++ b/packages/google-cloud-iam/google/cloud/iam_v3beta/services/access_policies/client.py @@ -0,0 +1,1624 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import json +import logging as std_logging +import os +import re +import warnings +from collections import OrderedDict +from http import HTTPStatus +from typing import ( + Callable, + Dict, + Mapping, + MutableMapping, + MutableSequence, + Optional, + Sequence, + Tuple, + Type, + Union, + cast, +) + +import google.protobuf +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.iam_v3beta import gapic_version as package_version + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + +try: + from google.api_core import client_logging # type: ignore + + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) + +import google.api_core.operation as operation # type: ignore +import google.api_core.operation_async as operation_async # type: ignore +import google.protobuf.empty_pb2 as empty_pb2 # type: ignore +import google.protobuf.timestamp_pb2 as timestamp_pb2 # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore + +from google.cloud.iam_v3beta.services.access_policies import pagers +from google.cloud.iam_v3beta.types import ( + access_policies_service, + access_policy_resources, + operation_metadata, + policy_binding_resources, +) + +from .transports.base import DEFAULT_CLIENT_INFO, AccessPoliciesTransport +from .transports.grpc import AccessPoliciesGrpcTransport +from .transports.grpc_asyncio import AccessPoliciesGrpcAsyncIOTransport +from .transports.rest import AccessPoliciesRestTransport + + +class AccessPoliciesClientMeta(type): + """Metaclass for the AccessPolicies client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + + _transport_registry = OrderedDict() # type: Dict[str, Type[AccessPoliciesTransport]] + _transport_registry["grpc"] = AccessPoliciesGrpcTransport + _transport_registry["grpc_asyncio"] = AccessPoliciesGrpcAsyncIOTransport + _transport_registry["rest"] = AccessPoliciesRestTransport + + def get_transport_class( + cls, + label: Optional[str] = None, + ) -> Type[AccessPoliciesTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class AccessPoliciesClient(metaclass=AccessPoliciesClientMeta): + """Manages Identity and Access Management (IAM) access policies.""" + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint) -> Optional[str]: + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + Optional[str]: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + if m is None: + # Could not parse api_endpoint; return as-is. + return api_endpoint + + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. + DEFAULT_ENDPOINT = "iam.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + _DEFAULT_ENDPOINT_TEMPLATE = "iam.{UNIVERSE_DOMAIN}" + _DEFAULT_UNIVERSE = "googleapis.com" + + @staticmethod + def _use_client_cert_effective(): + """Returns whether client certificate should be used for mTLS if the + google-auth version supports should_use_client_cert automatic mTLS enablement. + + Alternatively, read from the GOOGLE_API_USE_CLIENT_CERTIFICATE env var. + + Returns: + bool: whether client certificate should be used for mTLS + Raises: + ValueError: (If using a version of google-auth without should_use_client_cert and + GOOGLE_API_USE_CLIENT_CERTIFICATE is set to an unexpected value.) + """ + # check if google-auth version supports should_use_client_cert for automatic mTLS enablement + if hasattr(mtls, "should_use_client_cert"): # pragma: NO COVER + return mtls.should_use_client_cert() + else: # pragma: NO COVER + # if unsupported, fallback to reading from env var + use_client_cert_str = os.getenv( + "GOOGLE_API_USE_CLIENT_CERTIFICATE", "false" + ).lower() + if use_client_cert_str not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be" + " either `true` or `false`" + ) + return use_client_cert_str == "true" + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + AccessPoliciesClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + AccessPoliciesClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file(filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> AccessPoliciesTransport: + """Returns the transport used by the client instance. + + Returns: + AccessPoliciesTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def access_policy_path( + organization: str, + location: str, + access_policy: str, + ) -> str: + """Returns a fully-qualified access_policy string.""" + return "organizations/{organization}/locations/{location}/accessPolicies/{access_policy}".format( + organization=organization, + location=location, + access_policy=access_policy, + ) + + @staticmethod + def parse_access_policy_path(path: str) -> Dict[str, str]: + """Parses a access_policy path into its component segments.""" + m = re.match( + r"^organizations/(?P.+?)/locations/(?P.+?)/accessPolicies/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def policy_binding_path( + organization: str, + location: str, + policy_binding: str, + ) -> str: + """Returns a fully-qualified policy_binding string.""" + return "organizations/{organization}/locations/{location}/policyBindings/{policy_binding}".format( + organization=organization, + location=location, + policy_binding=policy_binding, + ) + + @staticmethod + def parse_policy_binding_path(path: str) -> Dict[str, str]: + """Parses a policy_binding path into its component segments.""" + m = re.match( + r"^organizations/(?P.+?)/locations/(?P.+?)/policyBindings/(?P.+?)$", + path, + ) + return m.groupdict() if m else {} + + @staticmethod + def common_billing_account_path( + billing_account: str, + ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str, str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path( + folder: str, + ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format( + folder=folder, + ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str, str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path( + organization: str, + ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format( + organization=organization, + ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str, str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path( + project: str, + ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format( + project=project, + ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str, str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path( + project: str, + location: str, + ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str, str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[client_options_lib.ClientOptions] = None + ): + """Deprecated. Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + + warnings.warn( + "get_mtls_endpoint_and_cert_source is deprecated. Use the api_endpoint property instead.", + DeprecationWarning, + ) + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = AccessPoliciesClient._use_client_cert_effective() + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert: + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + @staticmethod + def _read_environment_variables(): + """Returns the environment variables used by the client. + + Returns: + Tuple[bool, str, str]: returns the GOOGLE_API_USE_CLIENT_CERTIFICATE, + GOOGLE_API_USE_MTLS_ENDPOINT, and GOOGLE_CLOUD_UNIVERSE_DOMAIN environment variables. + + Raises: + ValueError: If GOOGLE_API_USE_CLIENT_CERTIFICATE is not + any of ["true", "false"]. + google.auth.exceptions.MutualTLSChannelError: If GOOGLE_API_USE_MTLS_ENDPOINT + is not any of ["auto", "never", "always"]. + """ + use_client_cert = AccessPoliciesClient._use_client_cert_effective() + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto").lower() + universe_domain_env = os.getenv("GOOGLE_CLOUD_UNIVERSE_DOMAIN") + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + return use_client_cert, use_mtls_endpoint, universe_domain_env + + @staticmethod + def _get_client_cert_source(provided_cert_source, use_cert_flag): + """Return the client cert source to be used by the client. + + Args: + provided_cert_source (bytes): The client certificate source provided. + use_cert_flag (bool): A flag indicating whether to use the client certificate. + + Returns: + bytes or None: The client cert source to be used by the client. + """ + client_cert_source = None + if use_cert_flag: + if provided_cert_source: + client_cert_source = provided_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + return client_cert_source + + @staticmethod + def _get_api_endpoint( + api_override, client_cert_source, universe_domain, use_mtls_endpoint + ) -> str: + """Return the API endpoint used by the client. + + Args: + api_override (str): The API endpoint override. If specified, this is always + the return value of this function and the other arguments are not used. + client_cert_source (bytes): The client certificate source used by the client. + universe_domain (str): The universe domain used by the client. + use_mtls_endpoint (str): How to use the mTLS endpoint, which depends also on the other parameters. + Possible values are "always", "auto", or "never". + + Returns: + str: The API endpoint to be used by the client. + """ + if api_override is not None: + api_endpoint = api_override + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + _default_universe = AccessPoliciesClient._DEFAULT_UNIVERSE + if universe_domain != _default_universe: + raise MutualTLSChannelError( + f"mTLS is not supported in any universe other than {_default_universe}." + ) + api_endpoint = AccessPoliciesClient.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = AccessPoliciesClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=universe_domain + ) + return api_endpoint + + @staticmethod + def _get_universe_domain( + client_universe_domain: Optional[str], universe_domain_env: Optional[str] + ) -> str: + """Return the universe domain used by the client. + + Args: + client_universe_domain (Optional[str]): The universe domain configured via the client options. + universe_domain_env (Optional[str]): The universe domain configured via the "GOOGLE_CLOUD_UNIVERSE_DOMAIN" environment variable. + + Returns: + str: The universe domain to be used by the client. + + Raises: + ValueError: If the universe domain is an empty string. + """ + universe_domain = AccessPoliciesClient._DEFAULT_UNIVERSE + if client_universe_domain is not None: + universe_domain = client_universe_domain + elif universe_domain_env is not None: + universe_domain = universe_domain_env + if len(universe_domain.strip()) == 0: + raise ValueError("Universe Domain cannot be an empty string.") + return universe_domain + + def _validate_universe_domain(self): + """Validates client's and credentials' universe domains are consistent. + + Returns: + bool: True iff the configured universe domain is valid. + + Raises: + ValueError: If the configured universe domain is not valid. + """ + + # NOTE (b/349488459): universe validation is disabled until further notice. + return True + + def _add_cred_info_for_auth_errors( + self, error: core_exceptions.GoogleAPICallError + ) -> None: + """Adds credential info string to error details for 401/403/404 errors. + + Args: + error (google.api_core.exceptions.GoogleAPICallError): The error to add the cred info. + """ + if error.code not in [ + HTTPStatus.UNAUTHORIZED, + HTTPStatus.FORBIDDEN, + HTTPStatus.NOT_FOUND, + ]: + return + + cred = self._transport._credentials + + # get_cred_info is only available in google-auth>=2.35.0 + if not hasattr(cred, "get_cred_info"): + return + + # ignore the type check since pypy test fails when get_cred_info + # is not available + cred_info = cred.get_cred_info() # type: ignore + if cred_info and hasattr(error._details, "append"): + error._details.append(json.dumps(cred_info)) + + @property + def api_endpoint(self) -> str: + """Return the API endpoint used by the client instance. + + Returns: + str: The API endpoint used by the client instance. + """ + return self._api_endpoint + + @property + def universe_domain(self) -> str: + """Return the universe domain used by the client instance. + + Returns: + str: The universe domain used by the client instance. + """ + return self._universe_domain + + def __init__( + self, + *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[ + Union[str, AccessPoliciesTransport, Callable[..., AccessPoliciesTransport]] + ] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the access policies client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Optional[Union[str,AccessPoliciesTransport,Callable[..., AccessPoliciesTransport]]]): + The transport to use, or a Callable that constructs and returns a new transport. + If a Callable is given, it will be called with the same set of initialization + arguments as used in the AccessPoliciesTransport constructor. + If set to None, a transport is chosen automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): + Custom options for the client. + + 1. The ``api_endpoint`` property can be used to override the + default endpoint provided by the client when ``transport`` is + not explicitly provided. Only if this property is not set and + ``transport`` was not explicitly provided, the endpoint is + determined by the GOOGLE_API_USE_MTLS_ENDPOINT environment + variable, which have one of the following values: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto-switch to the + default mTLS endpoint if client certificate is present; this is + the default value). + + 2. If the GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide a client certificate for mTLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + 3. The ``universe_domain`` property can be used to override the + default "googleapis.com" universe. Note that the ``api_endpoint`` + property still takes precedence; and ``universe_domain`` is + currently not supported for mTLS. + + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client_options = client_options + if isinstance(self._client_options, dict): + self._client_options = client_options_lib.from_dict(self._client_options) + if self._client_options is None: + self._client_options = client_options_lib.ClientOptions() + self._client_options = cast( + client_options_lib.ClientOptions, self._client_options + ) + + universe_domain_opt = getattr(self._client_options, "universe_domain", None) + + self._use_client_cert, self._use_mtls_endpoint, self._universe_domain_env = ( + AccessPoliciesClient._read_environment_variables() + ) + self._client_cert_source = AccessPoliciesClient._get_client_cert_source( + self._client_options.client_cert_source, self._use_client_cert + ) + self._universe_domain = AccessPoliciesClient._get_universe_domain( + universe_domain_opt, self._universe_domain_env + ) + self._api_endpoint: str = "" # updated below, depending on `transport` + + # Initialize the universe domain validation. + self._is_universe_domain_valid = False + + if CLIENT_LOGGING_SUPPORTED: # pragma: NO COVER + # Setup logging. + client_logging.initialize_logging() + + api_key_value = getattr(self._client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError( + "client_options.api_key and credentials are mutually exclusive" + ) + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + transport_provided = isinstance(transport, AccessPoliciesTransport) + if transport_provided: + # transport is a AccessPoliciesTransport instance. + if credentials or self._client_options.credentials_file or api_key_value: + raise ValueError( + "When providing a transport instance, " + "provide its credentials directly." + ) + if self._client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes directly." + ) + self._transport = cast(AccessPoliciesTransport, transport) + self._api_endpoint = self._transport.host + + self._api_endpoint = ( + self._api_endpoint + or AccessPoliciesClient._get_api_endpoint( + self._client_options.api_endpoint, + self._client_cert_source, + self._universe_domain, + self._use_mtls_endpoint, + ) + ) + + if not transport_provided: + import google.auth._default # type: ignore + + if api_key_value and hasattr( + google.auth._default, "get_api_key_credentials" + ): + credentials = google.auth._default.get_api_key_credentials( + api_key_value + ) + + transport_init: Union[ + Type[AccessPoliciesTransport], Callable[..., AccessPoliciesTransport] + ] = ( + AccessPoliciesClient.get_transport_class(transport) + if isinstance(transport, str) or transport is None + else cast(Callable[..., AccessPoliciesTransport], transport) + ) + # initialize with the provided callable or the passed in class + self._transport = transport_init( + credentials=credentials, + credentials_file=self._client_options.credentials_file, + host=self._api_endpoint, + scopes=self._client_options.scopes, + client_cert_source_for_mtls=self._client_cert_source, + quota_project_id=self._client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=self._client_options.api_audience, + ) + + if "async" not in str(self._transport): + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + std_logging.DEBUG + ): # pragma: NO COVER + _LOGGER.debug( + "Created client `google.iam_v3beta.AccessPoliciesClient`.", + extra={ + "serviceName": "google.iam.v3beta.AccessPolicies", + "universeDomain": getattr( + self._transport._credentials, "universe_domain", "" + ), + "credentialsType": f"{type(self._transport._credentials).__module__}.{type(self._transport._credentials).__qualname__}", + "credentialsInfo": getattr( + self.transport._credentials, "get_cred_info", lambda: None + )(), + } + if hasattr(self._transport, "_credentials") + else { + "serviceName": "google.iam.v3beta.AccessPolicies", + "credentialsType": None, + }, + ) + + def create_access_policy( + self, + request: Optional[ + Union[access_policies_service.CreateAccessPolicyRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + access_policy: Optional[access_policy_resources.AccessPolicy] = None, + access_policy_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation.Operation: + r"""Creates an access policy, and returns a long running + operation. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import iam_v3beta + + def sample_create_access_policy(): + # Create a client + client = iam_v3beta.AccessPoliciesClient() + + # Initialize request argument(s) + request = iam_v3beta.CreateAccessPolicyRequest( + parent="parent_value", + access_policy_id="access_policy_id_value", + ) + + # Make the request + operation = client.create_access_policy(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.iam_v3beta.types.CreateAccessPolicyRequest, dict]): + The request object. Request message for + CreateAccessPolicy method. + parent (str): + Required. The parent resource where this access policy + will be created. + + Format: ``projects/{project_id}/locations/{location}`` + ``projects/{project_number}/locations/{location}`` + ``folders/{folder_id}/locations/{location}`` + ``organizations/{organization_id}/locations/{location}`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + access_policy (google.cloud.iam_v3beta.types.AccessPolicy): + Required. The access policy to + create. + + This corresponds to the ``access_policy`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + access_policy_id (str): + Required. The ID to use for the access policy, which + will become the final component of the access policy's + resource name. + + This value must start with a lowercase letter followed + by up to 62 lowercase letters, numbers, hyphens, or + dots. Pattern, /[a-z][a-z0-9-.]{2,62}/. + + This value must be unique among all access policies with + the same parent. + + This corresponds to the ``access_policy_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.iam_v3beta.types.AccessPolicy` An + IAM access policy resource. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent, access_policy, access_policy_id] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, access_policies_service.CreateAccessPolicyRequest): + request = access_policies_service.CreateAccessPolicyRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if access_policy is not None: + request.access_policy = access_policy + if access_policy_id is not None: + request.access_policy_id = access_policy_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_access_policy] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + access_policy_resources.AccessPolicy, + metadata_type=operation_metadata.OperationMetadata, + ) + + # Done; return the response. + return response + + def get_access_policy( + self, + request: Optional[ + Union[access_policies_service.GetAccessPolicyRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> access_policy_resources.AccessPolicy: + r"""Gets an access policy. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import iam_v3beta + + def sample_get_access_policy(): + # Create a client + client = iam_v3beta.AccessPoliciesClient() + + # Initialize request argument(s) + request = iam_v3beta.GetAccessPolicyRequest( + name="name_value", + ) + + # Make the request + response = client.get_access_policy(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.iam_v3beta.types.GetAccessPolicyRequest, dict]): + The request object. Request message for GetAccessPolicy + method. + name (str): + Required. The name of the access policy to retrieve. + + Format: + ``projects/{project_id}/locations/{location}/accessPolicies/{access_policy_id}`` + ``projects/{project_number}/locations/{location}/accessPolicies/{access_policy_id}`` + ``folders/{folder_id}/locations/{location}/accessPolicies/{access_policy_id}`` + ``organizations/{organization_id}/locations/{location}/accessPolicies/{access_policy_id}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.iam_v3beta.types.AccessPolicy: + An IAM access policy resource. + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, access_policies_service.GetAccessPolicyRequest): + request = access_policies_service.GetAccessPolicyRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_access_policy] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def update_access_policy( + self, + request: Optional[ + Union[access_policies_service.UpdateAccessPolicyRequest, dict] + ] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation.Operation: + r"""Updates an access policy. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import iam_v3beta + + def sample_update_access_policy(): + # Create a client + client = iam_v3beta.AccessPoliciesClient() + + # Initialize request argument(s) + request = iam_v3beta.UpdateAccessPolicyRequest( + ) + + # Make the request + operation = client.update_access_policy(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.iam_v3beta.types.UpdateAccessPolicyRequest, dict]): + The request object. Request message for + UpdateAccessPolicy method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be + :class:`google.cloud.iam_v3beta.types.AccessPolicy` An + IAM access policy resource. + + """ + # Create or coerce a protobuf request object. + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, access_policies_service.UpdateAccessPolicyRequest): + request = access_policies_service.UpdateAccessPolicyRequest(request) + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.update_access_policy] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata( + (("access_policy.name", request.access_policy.name),) + ), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + access_policy_resources.AccessPolicy, + metadata_type=operation_metadata.OperationMetadata, + ) + + # Done; return the response. + return response + + def delete_access_policy( + self, + request: Optional[ + Union[access_policies_service.DeleteAccessPolicyRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operation.Operation: + r"""Deletes an access policy. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import iam_v3beta + + def sample_delete_access_policy(): + # Create a client + client = iam_v3beta.AccessPoliciesClient() + + # Initialize request argument(s) + request = iam_v3beta.DeleteAccessPolicyRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_access_policy(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.iam_v3beta.types.DeleteAccessPolicyRequest, dict]): + The request object. Request message for + DeleteAccessPolicy method. + name (str): + Required. The name of the access policy to delete. + + Format: + ``projects/{project_id}/locations/{location}/accessPolicies/{access_policy_id}`` + ``projects/{project_number}/locations/{location}/accessPolicies/{access_policy_id}`` + ``folders/{folder_id}/locations/{location}/accessPolicies/{access_policy_id}`` + ``organizations/{organization_id}/locations/{location}/accessPolicies/{access_policy_id}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.api_core.operation.Operation: + An object representing a long-running operation. + + The result type for the operation will be :class:`google.protobuf.empty_pb2.Empty` A generic empty message that you can re-use to avoid defining duplicated + empty messages in your APIs. A typical example is to + use it as the request or the response type of an API + method. For instance: + + service Foo { + rpc Bar(google.protobuf.Empty) returns + (google.protobuf.Empty); + + } + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, access_policies_service.DeleteAccessPolicyRequest): + request = access_policies_service.DeleteAccessPolicyRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_access_policy] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Wrap the response in an operation future. + response = operation.from_gapic( + response, + self._transport.operations_client, + empty_pb2.Empty, + metadata_type=operation_metadata.OperationMetadata, + ) + + # Done; return the response. + return response + + def list_access_policies( + self, + request: Optional[ + Union[access_policies_service.ListAccessPoliciesRequest, dict] + ] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.ListAccessPoliciesPager: + r"""Lists access policies. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import iam_v3beta + + def sample_list_access_policies(): + # Create a client + client = iam_v3beta.AccessPoliciesClient() + + # Initialize request argument(s) + request = iam_v3beta.ListAccessPoliciesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_access_policies(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.iam_v3beta.types.ListAccessPoliciesRequest, dict]): + The request object. Request message for + ListAccessPolicies method. + parent (str): + Required. The parent resource, which owns the collection + of access policy resources. + + Format: ``projects/{project_id}/locations/{location}`` + ``projects/{project_number}/locations/{location}`` + ``folders/{folder_id}/locations/{location}`` + ``organizations/{organization_id}/locations/{location}`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.iam_v3beta.services.access_policies.pagers.ListAccessPoliciesPager: + Response message for + ListAccessPolicies method. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [parent] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance(request, access_policies_service.ListAccessPoliciesRequest): + request = access_policies_service.ListAccessPoliciesRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_access_policies] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListAccessPoliciesPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def search_access_policy_bindings( + self, + request: Optional[ + Union[access_policies_service.SearchAccessPolicyBindingsRequest, dict] + ] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> pagers.SearchAccessPolicyBindingsPager: + r"""Returns all policy bindings that bind a specific + policy if a user has searchPolicyBindings permission on + that policy. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import iam_v3beta + + def sample_search_access_policy_bindings(): + # Create a client + client = iam_v3beta.AccessPoliciesClient() + + # Initialize request argument(s) + request = iam_v3beta.SearchAccessPolicyBindingsRequest( + name="name_value", + ) + + # Make the request + page_result = client.search_access_policy_bindings(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.iam_v3beta.types.SearchAccessPolicyBindingsRequest, dict]): + The request object. Request message for + SearchAccessPolicyBindings rpc. + name (str): + Required. The name of the access policy. Format: + ``organizations/{organization_id}/locations/{location}/accessPolicies/{access_policy_id}`` + ``folders/{folder_id}/locations/{location}/accessPolicies/{access_policy_id}`` + ``projects/{project_id}/locations/{location}/accessPolicies/{access_policy_id}`` + ``projects/{project_number}/locations/{location}/accessPolicies/{access_policy_id}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + google.cloud.iam_v3beta.services.access_policies.pagers.SearchAccessPolicyBindingsPager: + Response message for + SearchAccessPolicyBindings rpc. + Iterating over this object will yield + results and resolve additional pages + automatically. + + """ + # Create or coerce a protobuf request object. + # - Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + flattened_params = [name] + has_flattened_params = ( + len([param for param in flattened_params if param is not None]) > 0 + ) + if request is not None and has_flattened_params: + raise ValueError( + "If the `request` argument is set, then none of " + "the individual field arguments should be set." + ) + + # - Use the request object if provided (there's no risk of modifying the input as + # there are no flattened fields), or create one. + if not isinstance( + request, access_policies_service.SearchAccessPolicyBindingsRequest + ): + request = access_policies_service.SearchAccessPolicyBindingsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.search_access_policy_bindings + ] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.SearchAccessPolicyBindingsPager( + method=rpc, + request=request, + response=response, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def __enter__(self) -> "AccessPoliciesClient": + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + def get_operation( + self, + request: Optional[Union[operations_pb2.GetOperationRequest, dict]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.Operation: + r"""Gets the latest state of a long-running operation. + + Args: + request (:class:`~.operations_pb2.GetOperationRequest`): + The request object. Request message for + `GetOperation` method. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + Returns: + ~.operations_pb2.Operation: + An ``Operation`` object. + """ + # Create or coerce a protobuf request object. + # The request isn't a proto-plus wrapped type, + # so it must be constructed via keyword expansion. + if request is None: + request_pb = operations_pb2.GetOperationRequest() + elif isinstance(request, dict): + request_pb = operations_pb2.GetOperationRequest(**request) + else: + request_pb = request + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_operation] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", request_pb.name),)), + ) + + # Validate the universe domain. + self._validate_universe_domain() + + try: + # Send the request. + response = rpc( + request_pb, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + except core_exceptions.GoogleAPICallError as e: + self._add_cred_info_for_auth_errors(e) + raise e + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + +if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER + DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ + +__all__ = ("AccessPoliciesClient",) diff --git a/packages/google-cloud-iam/google/cloud/iam_v3beta/services/access_policies/pagers.py b/packages/google-cloud-iam/google/cloud/iam_v3beta/services/access_policies/pagers.py new file mode 100644 index 000000000000..6dcbc0f5636c --- /dev/null +++ b/packages/google-cloud-iam/google/cloud/iam_v3beta/services/access_policies/pagers.py @@ -0,0 +1,373 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import ( + Any, + AsyncIterator, + Awaitable, + Callable, + Iterator, + Optional, + Sequence, + Tuple, + Union, +) + +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.api_core import retry_async as retries_async + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] + OptionalAsyncRetry = Union[ + retries_async.AsyncRetry, gapic_v1.method._MethodDefault, None + ] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + OptionalAsyncRetry = Union[retries_async.AsyncRetry, object, None] # type: ignore + +from google.cloud.iam_v3beta.types import ( + access_policies_service, + access_policy_resources, + policy_binding_resources, +) + + +class ListAccessPoliciesPager: + """A pager for iterating through ``list_access_policies`` requests. + + This class thinly wraps an initial + :class:`google.cloud.iam_v3beta.types.ListAccessPoliciesResponse` object, and + provides an ``__iter__`` method to iterate through its + ``access_policies`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListAccessPolicies`` requests and continue to iterate + through the ``access_policies`` field on the + corresponding responses. + + All the usual :class:`google.cloud.iam_v3beta.types.ListAccessPoliciesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[..., access_policies_service.ListAccessPoliciesResponse], + request: access_policies_service.ListAccessPoliciesRequest, + response: access_policies_service.ListAccessPoliciesResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.iam_v3beta.types.ListAccessPoliciesRequest): + The initial request object. + response (google.cloud.iam_v3beta.types.ListAccessPoliciesResponse): + The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + self._method = method + self._request = access_policies_service.ListAccessPoliciesRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[access_policies_service.ListAccessPoliciesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) + yield self._response + + def __iter__(self) -> Iterator[access_policy_resources.AccessPolicy]: + for page in self.pages: + yield from page.access_policies + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class ListAccessPoliciesAsyncPager: + """A pager for iterating through ``list_access_policies`` requests. + + This class thinly wraps an initial + :class:`google.cloud.iam_v3beta.types.ListAccessPoliciesResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``access_policies`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListAccessPolicies`` requests and continue to iterate + through the ``access_policies`` field on the + corresponding responses. + + All the usual :class:`google.cloud.iam_v3beta.types.ListAccessPoliciesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[ + ..., Awaitable[access_policies_service.ListAccessPoliciesResponse] + ], + request: access_policies_service.ListAccessPoliciesRequest, + response: access_policies_service.ListAccessPoliciesResponse, + *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.iam_v3beta.types.ListAccessPoliciesRequest): + The initial request object. + response (google.cloud.iam_v3beta.types.ListAccessPoliciesResponse): + The initial response object. + retry (google.api_core.retry.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + self._method = method + self._request = access_policies_service.ListAccessPoliciesRequest(request) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages( + self, + ) -> AsyncIterator[access_policies_service.ListAccessPoliciesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) + yield self._response + + def __aiter__(self) -> AsyncIterator[access_policy_resources.AccessPolicy]: + async def async_generator(): + async for page in self.pages: + for response in page.access_policies: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class SearchAccessPolicyBindingsPager: + """A pager for iterating through ``search_access_policy_bindings`` requests. + + This class thinly wraps an initial + :class:`google.cloud.iam_v3beta.types.SearchAccessPolicyBindingsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``policy_bindings`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``SearchAccessPolicyBindings`` requests and continue to iterate + through the ``policy_bindings`` field on the + corresponding responses. + + All the usual :class:`google.cloud.iam_v3beta.types.SearchAccessPolicyBindingsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[ + ..., access_policies_service.SearchAccessPolicyBindingsResponse + ], + request: access_policies_service.SearchAccessPolicyBindingsRequest, + response: access_policies_service.SearchAccessPolicyBindingsResponse, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.iam_v3beta.types.SearchAccessPolicyBindingsRequest): + The initial request object. + response (google.cloud.iam_v3beta.types.SearchAccessPolicyBindingsResponse): + The initial response object. + retry (google.api_core.retry.Retry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + self._method = method + self._request = access_policies_service.SearchAccessPolicyBindingsRequest( + request + ) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages( + self, + ) -> Iterator[access_policies_service.SearchAccessPolicyBindingsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) + yield self._response + + def __iter__(self) -> Iterator[policy_binding_resources.PolicyBinding]: + for page in self.pages: + yield from page.policy_bindings + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) + + +class SearchAccessPolicyBindingsAsyncPager: + """A pager for iterating through ``search_access_policy_bindings`` requests. + + This class thinly wraps an initial + :class:`google.cloud.iam_v3beta.types.SearchAccessPolicyBindingsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``policy_bindings`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``SearchAccessPolicyBindings`` requests and continue to iterate + through the ``policy_bindings`` field on the + corresponding responses. + + All the usual :class:`google.cloud.iam_v3beta.types.SearchAccessPolicyBindingsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + + def __init__( + self, + method: Callable[ + ..., Awaitable[access_policies_service.SearchAccessPolicyBindingsResponse] + ], + request: access_policies_service.SearchAccessPolicyBindingsRequest, + response: access_policies_service.SearchAccessPolicyBindingsResponse, + *, + retry: OptionalAsyncRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.iam_v3beta.types.SearchAccessPolicyBindingsRequest): + The initial request object. + response (google.cloud.iam_v3beta.types.SearchAccessPolicyBindingsResponse): + The initial response object. + retry (google.api_core.retry.AsyncRetry): Designation of what errors, + if any, should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + """ + self._method = method + self._request = access_policies_service.SearchAccessPolicyBindingsRequest( + request + ) + self._response = response + self._retry = retry + self._timeout = timeout + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages( + self, + ) -> AsyncIterator[access_policies_service.SearchAccessPolicyBindingsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method( + self._request, + retry=self._retry, + timeout=self._timeout, + metadata=self._metadata, + ) + yield self._response + + def __aiter__(self) -> AsyncIterator[policy_binding_resources.PolicyBinding]: + async def async_generator(): + async for page in self.pages: + for response in page.policy_bindings: + yield response + + return async_generator() + + def __repr__(self) -> str: + return "{0}<{1!r}>".format(self.__class__.__name__, self._response) diff --git a/packages/google-cloud-iam/google/cloud/iam_v3beta/services/access_policies/transports/README.rst b/packages/google-cloud-iam/google/cloud/iam_v3beta/services/access_policies/transports/README.rst new file mode 100644 index 000000000000..a69bcd03a6eb --- /dev/null +++ b/packages/google-cloud-iam/google/cloud/iam_v3beta/services/access_policies/transports/README.rst @@ -0,0 +1,10 @@ + +transport inheritance structure +_______________________________ + +``AccessPoliciesTransport`` is the ABC for all transports. + +- public child ``AccessPoliciesGrpcTransport`` for sync gRPC transport (defined in ``grpc.py``). +- public child ``AccessPoliciesGrpcAsyncIOTransport`` for async gRPC transport (defined in ``grpc_asyncio.py``). +- private child ``_BaseAccessPoliciesRestTransport`` for base REST transport with inner classes ``_BaseMETHOD`` (defined in ``rest_base.py``). +- public child ``AccessPoliciesRestTransport`` for sync REST transport with inner classes ``METHOD`` derived from the parent's corresponding ``_BaseMETHOD`` classes (defined in ``rest.py``). diff --git a/packages/google-cloud-iam/google/cloud/iam_v3beta/services/access_policies/transports/__init__.py b/packages/google-cloud-iam/google/cloud/iam_v3beta/services/access_policies/transports/__init__.py new file mode 100644 index 000000000000..0cd1a221d545 --- /dev/null +++ b/packages/google-cloud-iam/google/cloud/iam_v3beta/services/access_policies/transports/__init__.py @@ -0,0 +1,36 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import AccessPoliciesTransport +from .grpc import AccessPoliciesGrpcTransport +from .grpc_asyncio import AccessPoliciesGrpcAsyncIOTransport +from .rest import AccessPoliciesRestInterceptor, AccessPoliciesRestTransport + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[AccessPoliciesTransport]] +_transport_registry["grpc"] = AccessPoliciesGrpcTransport +_transport_registry["grpc_asyncio"] = AccessPoliciesGrpcAsyncIOTransport +_transport_registry["rest"] = AccessPoliciesRestTransport + +__all__ = ( + "AccessPoliciesTransport", + "AccessPoliciesGrpcTransport", + "AccessPoliciesGrpcAsyncIOTransport", + "AccessPoliciesRestTransport", + "AccessPoliciesRestInterceptor", +) diff --git a/packages/google-cloud-iam/google/cloud/iam_v3beta/services/access_policies/transports/base.py b/packages/google-cloud-iam/google/cloud/iam_v3beta/services/access_policies/transports/base.py new file mode 100644 index 000000000000..2b6f7f56b19e --- /dev/null +++ b/packages/google-cloud-iam/google/cloud/iam_v3beta/services/access_policies/transports/base.py @@ -0,0 +1,305 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +import google.api_core +import google.auth # type: ignore +import google.protobuf +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1, operations_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.iam_v3beta import gapic_version as package_version +from google.cloud.iam_v3beta.types import ( + access_policies_service, + access_policy_resources, +) + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=package_version.__version__ +) + +if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER + DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ + + +class AccessPoliciesTransport(abc.ABC): + """Abstract transport class for AccessPolicies.""" + + AUTH_SCOPES = ("https://www.googleapis.com/auth/cloud-platform",) + + DEFAULT_HOST: str = "iam.googleapis.com" + + def __init__( + self, + *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'iam.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): Deprecated. A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. This argument will be + removed in the next major version of this library. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + api_audience (Optional[str]): The intended audience for the API calls + to the service that will be set when using certain 3rd party + authentication flows. Audience is typically a resource identifier. + If not set, the host value will be used as a default. + """ + + # Save the scopes. + self._scopes = scopes + if not hasattr(self, "_ignore_credentials"): + self._ignore_credentials: bool = False + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs( + "'credentials_file' and 'credentials' are mutually exclusive" + ) + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + default_scopes=self.AUTH_SCOPES, + ) + elif credentials is None and not self._ignore_credentials: + credentials, _ = google.auth.default( + scopes=scopes, + quota_project_id=quota_project_id, + default_scopes=self.AUTH_SCOPES, + ) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience( + api_audience if api_audience else host + ) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if ( + always_use_jwt_access + and isinstance(credentials, service_account.Credentials) + and hasattr(service_account.Credentials, "with_always_use_jwt_access") + ): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ":" not in host: + host += ":443" + self._host = host + + self._wrapped_methods: Dict[Callable, Callable] = {} + + @property + def host(self): + return self._host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.create_access_policy: gapic_v1.method.wrap_method( + self.create_access_policy, + default_timeout=30.0, + client_info=client_info, + ), + self.get_access_policy: gapic_v1.method.wrap_method( + self.get_access_policy, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.update_access_policy: gapic_v1.method.wrap_method( + self.update_access_policy, + default_timeout=30.0, + client_info=client_info, + ), + self.delete_access_policy: gapic_v1.method.wrap_method( + self.delete_access_policy, + default_timeout=30.0, + client_info=client_info, + ), + self.list_access_policies: gapic_v1.method.wrap_method( + self.list_access_policies, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.search_access_policy_bindings: gapic_v1.method.wrap_method( + self.search_access_policy_bindings, + default_retry=retries.Retry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.get_operation: gapic_v1.method.wrap_method( + self.get_operation, + default_timeout=None, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def operations_client(self): + """Return the client designed to process long-running operations.""" + raise NotImplementedError() + + @property + def create_access_policy( + self, + ) -> Callable[ + [access_policies_service.CreateAccessPolicyRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def get_access_policy( + self, + ) -> Callable[ + [access_policies_service.GetAccessPolicyRequest], + Union[ + access_policy_resources.AccessPolicy, + Awaitable[access_policy_resources.AccessPolicy], + ], + ]: + raise NotImplementedError() + + @property + def update_access_policy( + self, + ) -> Callable[ + [access_policies_service.UpdateAccessPolicyRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def delete_access_policy( + self, + ) -> Callable[ + [access_policies_service.DeleteAccessPolicyRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def list_access_policies( + self, + ) -> Callable[ + [access_policies_service.ListAccessPoliciesRequest], + Union[ + access_policies_service.ListAccessPoliciesResponse, + Awaitable[access_policies_service.ListAccessPoliciesResponse], + ], + ]: + raise NotImplementedError() + + @property + def search_access_policy_bindings( + self, + ) -> Callable[ + [access_policies_service.SearchAccessPolicyBindingsRequest], + Union[ + access_policies_service.SearchAccessPolicyBindingsResponse, + Awaitable[access_policies_service.SearchAccessPolicyBindingsResponse], + ], + ]: + raise NotImplementedError() + + @property + def get_operation( + self, + ) -> Callable[ + [operations_pb2.GetOperationRequest], + Union[operations_pb2.Operation, Awaitable[operations_pb2.Operation]], + ]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + +__all__ = ("AccessPoliciesTransport",) diff --git a/packages/google-cloud-iam/google/cloud/iam_v3beta/services/access_policies/transports/grpc.py b/packages/google-cloud-iam/google/cloud/iam_v3beta/services/access_policies/transports/grpc.py new file mode 100644 index 000000000000..bbf4bb2f3974 --- /dev/null +++ b/packages/google-cloud-iam/google/cloud/iam_v3beta/services/access_policies/transports/grpc.py @@ -0,0 +1,551 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import json +import logging as std_logging +import pickle +import warnings +from typing import Callable, Dict, Optional, Sequence, Tuple, Union + +import google.auth # type: ignore +import google.protobuf.message +import grpc # type: ignore +import proto # type: ignore +from google.api_core import gapic_v1, grpc_helpers, operations_v1 +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf.json_format import MessageToJson + +from google.cloud.iam_v3beta.types import ( + access_policies_service, + access_policy_resources, +) + +from .base import DEFAULT_CLIENT_INFO, AccessPoliciesTransport + +try: + from google.api_core import client_logging # type: ignore + + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) + + +class _LoggingClientInterceptor(grpc.UnaryUnaryClientInterceptor): # pragma: NO COVER + def intercept_unary_unary(self, continuation, client_call_details, request): + logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + std_logging.DEBUG + ) + if logging_enabled: # pragma: NO COVER + request_metadata = client_call_details.metadata + if isinstance(request, proto.Message): + request_payload = type(request).to_json(request) + elif isinstance(request, google.protobuf.message.Message): + request_payload = MessageToJson(request) + else: + request_payload = f"{type(request).__name__}: {pickle.dumps(request)!r}" + + request_metadata = { + key: value.decode("utf-8") if isinstance(value, bytes) else value + for key, value in request_metadata + } + grpc_request = { + "payload": request_payload, + "requestMethod": "grpc", + "metadata": dict(request_metadata), + } + _LOGGER.debug( + f"Sending request for {client_call_details.method}", + extra={ + "serviceName": "google.iam.v3beta.AccessPolicies", + "rpcName": str(client_call_details.method), + "request": grpc_request, + "metadata": grpc_request["metadata"], + }, + ) + response = continuation(client_call_details, request) + if logging_enabled: # pragma: NO COVER + response_metadata = response.trailing_metadata() + # Convert gRPC metadata `` to list of tuples + metadata = ( + dict([(k, str(v)) for k, v in response_metadata]) + if response_metadata + else None + ) + result = response.result() + if isinstance(result, proto.Message): + response_payload = type(result).to_json(result) + elif isinstance(result, google.protobuf.message.Message): + response_payload = MessageToJson(result) + else: + response_payload = f"{type(result).__name__}: {pickle.dumps(result)!r}" + grpc_response = { + "payload": response_payload, + "metadata": metadata, + "status": "OK", + } + _LOGGER.debug( + f"Received response for {client_call_details.method}.", + extra={ + "serviceName": "google.iam.v3beta.AccessPolicies", + "rpcName": client_call_details.method, + "response": grpc_response, + "metadata": grpc_response["metadata"], + }, + ) + return response + + +class AccessPoliciesGrpcTransport(AccessPoliciesTransport): + """gRPC backend transport for AccessPolicies. + + Manages Identity and Access Management (IAM) access policies. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _stubs: Dict[str, Callable] + + def __init__( + self, + *, + host: str = "iam.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'iam.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if a ``channel`` instance is provided. + credentials_file (Optional[str]): Deprecated. A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if a ``channel`` instance is provided. + This argument will be removed in the next major version of this library. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if a ``channel`` instance is provided. + channel (Optional[Union[grpc.Channel, Callable[..., grpc.Channel]]]): + A ``Channel`` instance through which to make calls, or a Callable + that constructs and returns one. If set to None, ``self.create_channel`` + is used to create the channel. If a Callable is given, it will be called + with the same arguments as used in ``self.create_channel``. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if a ``channel`` instance is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + api_audience (Optional[str]): The intended audience for the API calls + to the service that will be set when using certain 3rd party + authentication flows. Audience is typically a resource identifier. + If not set, the host value will be used as a default. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + self._operations_client: Optional[operations_v1.OperationsClient] = None + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if isinstance(channel, grpc.Channel): + # Ignore credentials if a channel was passed. + credentials = None + self._ignore_credentials = True + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + # initialize with the provided callable or the default channel + channel_init = channel or type(self).create_channel + self._grpc_channel = channel_init( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + self._interceptor = _LoggingClientInterceptor() + self._logged_channel = grpc.intercept_channel( + self._grpc_channel, self._interceptor + ) + + # Wrap messages. This must be done after self._logged_channel exists + self._prep_wrapped_messages(client_info) + + @classmethod + def create_channel( + cls, + host: str = "iam.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): Deprecated. A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. This argument will be + removed in the next major version of this library. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Return the channel designed to connect to this service.""" + return self._grpc_channel + + @property + def operations_client(self) -> operations_v1.OperationsClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Quick check: Only create a new client if we do not already have one. + if self._operations_client is None: + self._operations_client = operations_v1.OperationsClient( + self._logged_channel + ) + + # Return the client from cache. + return self._operations_client + + @property + def create_access_policy( + self, + ) -> Callable[ + [access_policies_service.CreateAccessPolicyRequest], operations_pb2.Operation + ]: + r"""Return a callable for the create access policy method over gRPC. + + Creates an access policy, and returns a long running + operation. + + Returns: + Callable[[~.CreateAccessPolicyRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_access_policy" not in self._stubs: + self._stubs["create_access_policy"] = self._logged_channel.unary_unary( + "/google.iam.v3beta.AccessPolicies/CreateAccessPolicy", + request_serializer=access_policies_service.CreateAccessPolicyRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["create_access_policy"] + + @property + def get_access_policy( + self, + ) -> Callable[ + [access_policies_service.GetAccessPolicyRequest], + access_policy_resources.AccessPolicy, + ]: + r"""Return a callable for the get access policy method over gRPC. + + Gets an access policy. + + Returns: + Callable[[~.GetAccessPolicyRequest], + ~.AccessPolicy]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_access_policy" not in self._stubs: + self._stubs["get_access_policy"] = self._logged_channel.unary_unary( + "/google.iam.v3beta.AccessPolicies/GetAccessPolicy", + request_serializer=access_policies_service.GetAccessPolicyRequest.serialize, + response_deserializer=access_policy_resources.AccessPolicy.deserialize, + ) + return self._stubs["get_access_policy"] + + @property + def update_access_policy( + self, + ) -> Callable[ + [access_policies_service.UpdateAccessPolicyRequest], operations_pb2.Operation + ]: + r"""Return a callable for the update access policy method over gRPC. + + Updates an access policy. + + Returns: + Callable[[~.UpdateAccessPolicyRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_access_policy" not in self._stubs: + self._stubs["update_access_policy"] = self._logged_channel.unary_unary( + "/google.iam.v3beta.AccessPolicies/UpdateAccessPolicy", + request_serializer=access_policies_service.UpdateAccessPolicyRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["update_access_policy"] + + @property + def delete_access_policy( + self, + ) -> Callable[ + [access_policies_service.DeleteAccessPolicyRequest], operations_pb2.Operation + ]: + r"""Return a callable for the delete access policy method over gRPC. + + Deletes an access policy. + + Returns: + Callable[[~.DeleteAccessPolicyRequest], + ~.Operation]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_access_policy" not in self._stubs: + self._stubs["delete_access_policy"] = self._logged_channel.unary_unary( + "/google.iam.v3beta.AccessPolicies/DeleteAccessPolicy", + request_serializer=access_policies_service.DeleteAccessPolicyRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["delete_access_policy"] + + @property + def list_access_policies( + self, + ) -> Callable[ + [access_policies_service.ListAccessPoliciesRequest], + access_policies_service.ListAccessPoliciesResponse, + ]: + r"""Return a callable for the list access policies method over gRPC. + + Lists access policies. + + Returns: + Callable[[~.ListAccessPoliciesRequest], + ~.ListAccessPoliciesResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_access_policies" not in self._stubs: + self._stubs["list_access_policies"] = self._logged_channel.unary_unary( + "/google.iam.v3beta.AccessPolicies/ListAccessPolicies", + request_serializer=access_policies_service.ListAccessPoliciesRequest.serialize, + response_deserializer=access_policies_service.ListAccessPoliciesResponse.deserialize, + ) + return self._stubs["list_access_policies"] + + @property + def search_access_policy_bindings( + self, + ) -> Callable[ + [access_policies_service.SearchAccessPolicyBindingsRequest], + access_policies_service.SearchAccessPolicyBindingsResponse, + ]: + r"""Return a callable for the search access policy bindings method over gRPC. + + Returns all policy bindings that bind a specific + policy if a user has searchPolicyBindings permission on + that policy. + + Returns: + Callable[[~.SearchAccessPolicyBindingsRequest], + ~.SearchAccessPolicyBindingsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "search_access_policy_bindings" not in self._stubs: + self._stubs["search_access_policy_bindings"] = ( + self._logged_channel.unary_unary( + "/google.iam.v3beta.AccessPolicies/SearchAccessPolicyBindings", + request_serializer=access_policies_service.SearchAccessPolicyBindingsRequest.serialize, + response_deserializer=access_policies_service.SearchAccessPolicyBindingsResponse.deserialize, + ) + ) + return self._stubs["search_access_policy_bindings"] + + def close(self): + self._logged_channel.close() + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self._logged_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + @property + def kind(self) -> str: + return "grpc" + + +__all__ = ("AccessPoliciesGrpcTransport",) diff --git a/packages/google-cloud-iam/google/cloud/iam_v3beta/services/access_policies/transports/grpc_asyncio.py b/packages/google-cloud-iam/google/cloud/iam_v3beta/services/access_policies/transports/grpc_asyncio.py new file mode 100644 index 000000000000..6cc83d6b8be6 --- /dev/null +++ b/packages/google-cloud-iam/google/cloud/iam_v3beta/services/access_policies/transports/grpc_asyncio.py @@ -0,0 +1,634 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import inspect +import json +import logging as std_logging +import pickle +import warnings +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union + +import google.protobuf.message +import grpc # type: ignore +import proto # type: ignore +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1, grpc_helpers_async, operations_v1 +from google.api_core import retry_async as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf.json_format import MessageToJson +from grpc.experimental import aio # type: ignore + +from google.cloud.iam_v3beta.types import ( + access_policies_service, + access_policy_resources, +) + +from .base import DEFAULT_CLIENT_INFO, AccessPoliciesTransport +from .grpc import AccessPoliciesGrpcTransport + +try: + from google.api_core import client_logging # type: ignore + + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = std_logging.getLogger(__name__) + + +class _LoggingClientAIOInterceptor( + grpc.aio.UnaryUnaryClientInterceptor +): # pragma: NO COVER + async def intercept_unary_unary(self, continuation, client_call_details, request): + logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + std_logging.DEBUG + ) + if logging_enabled: # pragma: NO COVER + request_metadata = client_call_details.metadata + if isinstance(request, proto.Message): + request_payload = type(request).to_json(request) + elif isinstance(request, google.protobuf.message.Message): + request_payload = MessageToJson(request) + else: + request_payload = f"{type(request).__name__}: {pickle.dumps(request)!r}" + + request_metadata = { + key: value.decode("utf-8") if isinstance(value, bytes) else value + for key, value in request_metadata + } + grpc_request = { + "payload": request_payload, + "requestMethod": "grpc", + "metadata": dict(request_metadata), + } + _LOGGER.debug( + f"Sending request for {client_call_details.method}", + extra={ + "serviceName": "google.iam.v3beta.AccessPolicies", + "rpcName": str(client_call_details.method), + "request": grpc_request, + "metadata": grpc_request["metadata"], + }, + ) + response = await continuation(client_call_details, request) + if logging_enabled: # pragma: NO COVER + response_metadata = await response.trailing_metadata() + # Convert gRPC metadata `` to list of tuples + metadata = ( + dict([(k, str(v)) for k, v in response_metadata]) + if response_metadata + else None + ) + result = await response + if isinstance(result, proto.Message): + response_payload = type(result).to_json(result) + elif isinstance(result, google.protobuf.message.Message): + response_payload = MessageToJson(result) + else: + response_payload = f"{type(result).__name__}: {pickle.dumps(result)!r}" + grpc_response = { + "payload": response_payload, + "metadata": metadata, + "status": "OK", + } + _LOGGER.debug( + f"Received response to rpc {client_call_details.method}.", + extra={ + "serviceName": "google.iam.v3beta.AccessPolicies", + "rpcName": str(client_call_details.method), + "response": grpc_response, + "metadata": grpc_response["metadata"], + }, + ) + return response + + +class AccessPoliciesGrpcAsyncIOTransport(AccessPoliciesTransport): + """gRPC AsyncIO backend transport for AccessPolicies. + + Manages Identity and Access Management (IAM) access policies. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel( + cls, + host: str = "iam.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs, + ) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): Deprecated. A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. This argument will be + removed in the next major version of this library. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs, + ) + + def __init__( + self, + *, + host: str = "iam.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[Union[aio.Channel, Callable[..., aio.Channel]]] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'iam.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if a ``channel`` instance is provided. + credentials_file (Optional[str]): Deprecated. A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if a ``channel`` instance is provided. + This argument will be removed in the next major version of this library. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[Union[aio.Channel, Callable[..., aio.Channel]]]): + A ``Channel`` instance through which to make calls, or a Callable + that constructs and returns one. If set to None, ``self.create_channel`` + is used to create the channel. If a Callable is given, it will be called + with the same arguments as used in ``self.create_channel``. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if a ``channel`` instance is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if a ``channel`` instance or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + api_audience (Optional[str]): The intended audience for the API calls + to the service that will be set when using certain 3rd party + authentication flows. Audience is typically a resource identifier. + If not set, the host value will be used as a default. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + self._operations_client: Optional[operations_v1.OperationsAsyncClient] = None + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if isinstance(channel, aio.Channel): + # Ignore credentials if a channel was passed. + credentials = None + self._ignore_credentials = True + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + # initialize with the provided callable or the default channel + channel_init = channel or type(self).create_channel + self._grpc_channel = channel_init( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + self._interceptor = _LoggingClientAIOInterceptor() + self._grpc_channel._unary_unary_interceptors.append(self._interceptor) + self._logged_channel = self._grpc_channel + self._wrap_with_kind = ( + "kind" in inspect.signature(gapic_v1.method_async.wrap_method).parameters + ) + # Wrap messages. This must be done after self._logged_channel exists + self._prep_wrapped_messages(client_info) + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def operations_client(self) -> operations_v1.OperationsAsyncClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Quick check: Only create a new client if we do not already have one. + if self._operations_client is None: + self._operations_client = operations_v1.OperationsAsyncClient( + self._logged_channel + ) + + # Return the client from cache. + return self._operations_client + + @property + def create_access_policy( + self, + ) -> Callable[ + [access_policies_service.CreateAccessPolicyRequest], + Awaitable[operations_pb2.Operation], + ]: + r"""Return a callable for the create access policy method over gRPC. + + Creates an access policy, and returns a long running + operation. + + Returns: + Callable[[~.CreateAccessPolicyRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "create_access_policy" not in self._stubs: + self._stubs["create_access_policy"] = self._logged_channel.unary_unary( + "/google.iam.v3beta.AccessPolicies/CreateAccessPolicy", + request_serializer=access_policies_service.CreateAccessPolicyRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["create_access_policy"] + + @property + def get_access_policy( + self, + ) -> Callable[ + [access_policies_service.GetAccessPolicyRequest], + Awaitable[access_policy_resources.AccessPolicy], + ]: + r"""Return a callable for the get access policy method over gRPC. + + Gets an access policy. + + Returns: + Callable[[~.GetAccessPolicyRequest], + Awaitable[~.AccessPolicy]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_access_policy" not in self._stubs: + self._stubs["get_access_policy"] = self._logged_channel.unary_unary( + "/google.iam.v3beta.AccessPolicies/GetAccessPolicy", + request_serializer=access_policies_service.GetAccessPolicyRequest.serialize, + response_deserializer=access_policy_resources.AccessPolicy.deserialize, + ) + return self._stubs["get_access_policy"] + + @property + def update_access_policy( + self, + ) -> Callable[ + [access_policies_service.UpdateAccessPolicyRequest], + Awaitable[operations_pb2.Operation], + ]: + r"""Return a callable for the update access policy method over gRPC. + + Updates an access policy. + + Returns: + Callable[[~.UpdateAccessPolicyRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "update_access_policy" not in self._stubs: + self._stubs["update_access_policy"] = self._logged_channel.unary_unary( + "/google.iam.v3beta.AccessPolicies/UpdateAccessPolicy", + request_serializer=access_policies_service.UpdateAccessPolicyRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["update_access_policy"] + + @property + def delete_access_policy( + self, + ) -> Callable[ + [access_policies_service.DeleteAccessPolicyRequest], + Awaitable[operations_pb2.Operation], + ]: + r"""Return a callable for the delete access policy method over gRPC. + + Deletes an access policy. + + Returns: + Callable[[~.DeleteAccessPolicyRequest], + Awaitable[~.Operation]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "delete_access_policy" not in self._stubs: + self._stubs["delete_access_policy"] = self._logged_channel.unary_unary( + "/google.iam.v3beta.AccessPolicies/DeleteAccessPolicy", + request_serializer=access_policies_service.DeleteAccessPolicyRequest.serialize, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["delete_access_policy"] + + @property + def list_access_policies( + self, + ) -> Callable[ + [access_policies_service.ListAccessPoliciesRequest], + Awaitable[access_policies_service.ListAccessPoliciesResponse], + ]: + r"""Return a callable for the list access policies method over gRPC. + + Lists access policies. + + Returns: + Callable[[~.ListAccessPoliciesRequest], + Awaitable[~.ListAccessPoliciesResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "list_access_policies" not in self._stubs: + self._stubs["list_access_policies"] = self._logged_channel.unary_unary( + "/google.iam.v3beta.AccessPolicies/ListAccessPolicies", + request_serializer=access_policies_service.ListAccessPoliciesRequest.serialize, + response_deserializer=access_policies_service.ListAccessPoliciesResponse.deserialize, + ) + return self._stubs["list_access_policies"] + + @property + def search_access_policy_bindings( + self, + ) -> Callable[ + [access_policies_service.SearchAccessPolicyBindingsRequest], + Awaitable[access_policies_service.SearchAccessPolicyBindingsResponse], + ]: + r"""Return a callable for the search access policy bindings method over gRPC. + + Returns all policy bindings that bind a specific + policy if a user has searchPolicyBindings permission on + that policy. + + Returns: + Callable[[~.SearchAccessPolicyBindingsRequest], + Awaitable[~.SearchAccessPolicyBindingsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "search_access_policy_bindings" not in self._stubs: + self._stubs["search_access_policy_bindings"] = ( + self._logged_channel.unary_unary( + "/google.iam.v3beta.AccessPolicies/SearchAccessPolicyBindings", + request_serializer=access_policies_service.SearchAccessPolicyBindingsRequest.serialize, + response_deserializer=access_policies_service.SearchAccessPolicyBindingsResponse.deserialize, + ) + ) + return self._stubs["search_access_policy_bindings"] + + def _prep_wrapped_messages(self, client_info): + """Precompute the wrapped methods, overriding the base class method to use async wrappers.""" + self._wrapped_methods = { + self.create_access_policy: self._wrap_method( + self.create_access_policy, + default_timeout=30.0, + client_info=client_info, + ), + self.get_access_policy: self._wrap_method( + self.get_access_policy, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.update_access_policy: self._wrap_method( + self.update_access_policy, + default_timeout=30.0, + client_info=client_info, + ), + self.delete_access_policy: self._wrap_method( + self.delete_access_policy, + default_timeout=30.0, + client_info=client_info, + ), + self.list_access_policies: self._wrap_method( + self.list_access_policies, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.search_access_policy_bindings: self._wrap_method( + self.search_access_policy_bindings, + default_retry=retries.AsyncRetry( + initial=1.0, + maximum=10.0, + multiplier=1.3, + predicate=retries.if_exception_type( + core_exceptions.ServiceUnavailable, + ), + deadline=60.0, + ), + default_timeout=60.0, + client_info=client_info, + ), + self.get_operation: self._wrap_method( + self.get_operation, + default_timeout=None, + client_info=client_info, + ), + } + + def _wrap_method(self, func, *args, **kwargs): + if self._wrap_with_kind: # pragma: NO COVER + kwargs["kind"] = self.kind + return gapic_v1.method_async.wrap_method(func, *args, **kwargs) + + def close(self): + return self._logged_channel.close() + + @property + def kind(self) -> str: + return "grpc_asyncio" + + @property + def get_operation( + self, + ) -> Callable[[operations_pb2.GetOperationRequest], operations_pb2.Operation]: + r"""Return a callable for the get_operation method over gRPC.""" + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "get_operation" not in self._stubs: + self._stubs["get_operation"] = self._logged_channel.unary_unary( + "/google.longrunning.Operations/GetOperation", + request_serializer=operations_pb2.GetOperationRequest.SerializeToString, + response_deserializer=operations_pb2.Operation.FromString, + ) + return self._stubs["get_operation"] + + +__all__ = ("AccessPoliciesGrpcAsyncIOTransport",) diff --git a/packages/google-cloud-iam/google/cloud/iam_v3beta/services/access_policies/transports/rest.py b/packages/google-cloud-iam/google/cloud/iam_v3beta/services/access_policies/transports/rest.py new file mode 100644 index 000000000000..f429dc13564c --- /dev/null +++ b/packages/google-cloud-iam/google/cloud/iam_v3beta/services/access_policies/transports/rest.py @@ -0,0 +1,1716 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import dataclasses +import json # type: ignore +import logging +import warnings +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union + +import google.protobuf +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1, operations_v1, rest_helpers, rest_streaming +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.requests import AuthorizedSession # type: ignore +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import json_format +from requests import __version__ as requests_version + +from google.cloud.iam_v3beta.types import ( + access_policies_service, + access_policy_resources, +) + +from .base import DEFAULT_CLIENT_INFO as BASE_DEFAULT_CLIENT_INFO +from .rest_base import _BaseAccessPoliciesRestTransport + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault, None] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object, None] # type: ignore + +try: + from google.api_core import client_logging # type: ignore + + CLIENT_LOGGING_SUPPORTED = True # pragma: NO COVER +except ImportError: # pragma: NO COVER + CLIENT_LOGGING_SUPPORTED = False + +_LOGGER = logging.getLogger(__name__) + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=BASE_DEFAULT_CLIENT_INFO.gapic_version, + grpc_version=None, + rest_version=f"requests@{requests_version}", +) + +if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER + DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ + + +class AccessPoliciesRestInterceptor: + """Interceptor for AccessPolicies. + + Interceptors are used to manipulate requests, request metadata, and responses + in arbitrary ways. + Example use cases include: + * Logging + * Verifying requests according to service or custom semantics + * Stripping extraneous information from responses + + These use cases and more can be enabled by injecting an + instance of a custom subclass when constructing the AccessPoliciesRestTransport. + + .. code-block:: python + class MyCustomAccessPoliciesInterceptor(AccessPoliciesRestInterceptor): + def pre_create_access_policy(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_create_access_policy(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_delete_access_policy(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_delete_access_policy(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_get_access_policy(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_get_access_policy(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_list_access_policies(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_list_access_policies(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_search_access_policy_bindings(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_search_access_policy_bindings(self, response): + logging.log(f"Received response: {response}") + return response + + def pre_update_access_policy(self, request, metadata): + logging.log(f"Received request: {request}") + return request, metadata + + def post_update_access_policy(self, response): + logging.log(f"Received response: {response}") + return response + + transport = AccessPoliciesRestTransport(interceptor=MyCustomAccessPoliciesInterceptor()) + client = AccessPoliciesClient(transport=transport) + + + """ + + def pre_create_access_policy( + self, + request: access_policies_service.CreateAccessPolicyRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + access_policies_service.CreateAccessPolicyRequest, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Pre-rpc interceptor for create_access_policy + + Override in a subclass to manipulate the request or metadata + before they are sent to the AccessPolicies server. + """ + return request, metadata + + def post_create_access_policy( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for create_access_policy + + DEPRECATED. Please use the `post_create_access_policy_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the AccessPolicies server but before + it is returned to user code. This `post_create_access_policy` interceptor runs + before the `post_create_access_policy_with_metadata` interceptor. + """ + return response + + def post_create_access_policy_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for create_access_policy + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AccessPolicies server but before it is returned to user code. + + We recommend only using this `post_create_access_policy_with_metadata` + interceptor in new development instead of the `post_create_access_policy` interceptor. + When both interceptors are used, this `post_create_access_policy_with_metadata` interceptor runs after the + `post_create_access_policy` interceptor. The (possibly modified) response returned by + `post_create_access_policy` will be passed to + `post_create_access_policy_with_metadata`. + """ + return response, metadata + + def pre_delete_access_policy( + self, + request: access_policies_service.DeleteAccessPolicyRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + access_policies_service.DeleteAccessPolicyRequest, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Pre-rpc interceptor for delete_access_policy + + Override in a subclass to manipulate the request or metadata + before they are sent to the AccessPolicies server. + """ + return request, metadata + + def post_delete_access_policy( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for delete_access_policy + + DEPRECATED. Please use the `post_delete_access_policy_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the AccessPolicies server but before + it is returned to user code. This `post_delete_access_policy` interceptor runs + before the `post_delete_access_policy_with_metadata` interceptor. + """ + return response + + def post_delete_access_policy_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for delete_access_policy + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AccessPolicies server but before it is returned to user code. + + We recommend only using this `post_delete_access_policy_with_metadata` + interceptor in new development instead of the `post_delete_access_policy` interceptor. + When both interceptors are used, this `post_delete_access_policy_with_metadata` interceptor runs after the + `post_delete_access_policy` interceptor. The (possibly modified) response returned by + `post_delete_access_policy` will be passed to + `post_delete_access_policy_with_metadata`. + """ + return response, metadata + + def pre_get_access_policy( + self, + request: access_policies_service.GetAccessPolicyRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + access_policies_service.GetAccessPolicyRequest, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Pre-rpc interceptor for get_access_policy + + Override in a subclass to manipulate the request or metadata + before they are sent to the AccessPolicies server. + """ + return request, metadata + + def post_get_access_policy( + self, response: access_policy_resources.AccessPolicy + ) -> access_policy_resources.AccessPolicy: + """Post-rpc interceptor for get_access_policy + + DEPRECATED. Please use the `post_get_access_policy_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the AccessPolicies server but before + it is returned to user code. This `post_get_access_policy` interceptor runs + before the `post_get_access_policy_with_metadata` interceptor. + """ + return response + + def post_get_access_policy_with_metadata( + self, + response: access_policy_resources.AccessPolicy, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + access_policy_resources.AccessPolicy, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Post-rpc interceptor for get_access_policy + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AccessPolicies server but before it is returned to user code. + + We recommend only using this `post_get_access_policy_with_metadata` + interceptor in new development instead of the `post_get_access_policy` interceptor. + When both interceptors are used, this `post_get_access_policy_with_metadata` interceptor runs after the + `post_get_access_policy` interceptor. The (possibly modified) response returned by + `post_get_access_policy` will be passed to + `post_get_access_policy_with_metadata`. + """ + return response, metadata + + def pre_list_access_policies( + self, + request: access_policies_service.ListAccessPoliciesRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + access_policies_service.ListAccessPoliciesRequest, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Pre-rpc interceptor for list_access_policies + + Override in a subclass to manipulate the request or metadata + before they are sent to the AccessPolicies server. + """ + return request, metadata + + def post_list_access_policies( + self, response: access_policies_service.ListAccessPoliciesResponse + ) -> access_policies_service.ListAccessPoliciesResponse: + """Post-rpc interceptor for list_access_policies + + DEPRECATED. Please use the `post_list_access_policies_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the AccessPolicies server but before + it is returned to user code. This `post_list_access_policies` interceptor runs + before the `post_list_access_policies_with_metadata` interceptor. + """ + return response + + def post_list_access_policies_with_metadata( + self, + response: access_policies_service.ListAccessPoliciesResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + access_policies_service.ListAccessPoliciesResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for list_access_policies + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AccessPolicies server but before it is returned to user code. + + We recommend only using this `post_list_access_policies_with_metadata` + interceptor in new development instead of the `post_list_access_policies` interceptor. + When both interceptors are used, this `post_list_access_policies_with_metadata` interceptor runs after the + `post_list_access_policies` interceptor. The (possibly modified) response returned by + `post_list_access_policies` will be passed to + `post_list_access_policies_with_metadata`. + """ + return response, metadata + + def pre_search_access_policy_bindings( + self, + request: access_policies_service.SearchAccessPolicyBindingsRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + access_policies_service.SearchAccessPolicyBindingsRequest, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Pre-rpc interceptor for search_access_policy_bindings + + Override in a subclass to manipulate the request or metadata + before they are sent to the AccessPolicies server. + """ + return request, metadata + + def post_search_access_policy_bindings( + self, response: access_policies_service.SearchAccessPolicyBindingsResponse + ) -> access_policies_service.SearchAccessPolicyBindingsResponse: + """Post-rpc interceptor for search_access_policy_bindings + + DEPRECATED. Please use the `post_search_access_policy_bindings_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the AccessPolicies server but before + it is returned to user code. This `post_search_access_policy_bindings` interceptor runs + before the `post_search_access_policy_bindings_with_metadata` interceptor. + """ + return response + + def post_search_access_policy_bindings_with_metadata( + self, + response: access_policies_service.SearchAccessPolicyBindingsResponse, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + access_policies_service.SearchAccessPolicyBindingsResponse, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Post-rpc interceptor for search_access_policy_bindings + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AccessPolicies server but before it is returned to user code. + + We recommend only using this `post_search_access_policy_bindings_with_metadata` + interceptor in new development instead of the `post_search_access_policy_bindings` interceptor. + When both interceptors are used, this `post_search_access_policy_bindings_with_metadata` interceptor runs after the + `post_search_access_policy_bindings` interceptor. The (possibly modified) response returned by + `post_search_access_policy_bindings` will be passed to + `post_search_access_policy_bindings_with_metadata`. + """ + return response, metadata + + def pre_update_access_policy( + self, + request: access_policies_service.UpdateAccessPolicyRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + access_policies_service.UpdateAccessPolicyRequest, + Sequence[Tuple[str, Union[str, bytes]]], + ]: + """Pre-rpc interceptor for update_access_policy + + Override in a subclass to manipulate the request or metadata + before they are sent to the AccessPolicies server. + """ + return request, metadata + + def post_update_access_policy( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for update_access_policy + + DEPRECATED. Please use the `post_update_access_policy_with_metadata` + interceptor instead. + + Override in a subclass to read or manipulate the response + after it is returned by the AccessPolicies server but before + it is returned to user code. This `post_update_access_policy` interceptor runs + before the `post_update_access_policy_with_metadata` interceptor. + """ + return response + + def post_update_access_policy_with_metadata( + self, + response: operations_pb2.Operation, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[operations_pb2.Operation, Sequence[Tuple[str, Union[str, bytes]]]]: + """Post-rpc interceptor for update_access_policy + + Override in a subclass to read or manipulate the response or metadata after it + is returned by the AccessPolicies server but before it is returned to user code. + + We recommend only using this `post_update_access_policy_with_metadata` + interceptor in new development instead of the `post_update_access_policy` interceptor. + When both interceptors are used, this `post_update_access_policy_with_metadata` interceptor runs after the + `post_update_access_policy` interceptor. The (possibly modified) response returned by + `post_update_access_policy` will be passed to + `post_update_access_policy_with_metadata`. + """ + return response, metadata + + def pre_get_operation( + self, + request: operations_pb2.GetOperationRequest, + metadata: Sequence[Tuple[str, Union[str, bytes]]], + ) -> Tuple[ + operations_pb2.GetOperationRequest, Sequence[Tuple[str, Union[str, bytes]]] + ]: + """Pre-rpc interceptor for get_operation + + Override in a subclass to manipulate the request or metadata + before they are sent to the AccessPolicies server. + """ + return request, metadata + + def post_get_operation( + self, response: operations_pb2.Operation + ) -> operations_pb2.Operation: + """Post-rpc interceptor for get_operation + + Override in a subclass to manipulate the response + after it is returned by the AccessPolicies server but before + it is returned to user code. + """ + return response + + +@dataclasses.dataclass +class AccessPoliciesRestStub: + _session: AuthorizedSession + _host: str + _interceptor: AccessPoliciesRestInterceptor + + +class AccessPoliciesRestTransport(_BaseAccessPoliciesRestTransport): + """REST backend synchronous transport for AccessPolicies. + + Manages Identity and Access Management (IAM) access policies. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + """ + + def __init__( + self, + *, + host: str = "iam.googleapis.com", + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + interceptor: Optional[AccessPoliciesRestInterceptor] = None, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to (default: 'iam.googleapis.com'). + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + + credentials_file (Optional[str]): Deprecated. A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. This argument will be + removed in the next major version of this library. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + client_cert_source_for_mtls (Callable[[], Tuple[bytes, bytes]]): Client + certificate to configure mutual TLS HTTP channel. It is ignored + if ``channel`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + interceptor (Optional[AccessPoliciesRestInterceptor]): Interceptor used + to manipulate requests, request metadata, and responses. + api_audience (Optional[str]): The intended audience for the API calls + to the service that will be set when using certain 3rd party + authentication flows. Audience is typically a resource identifier. + If not set, the host value will be used as a default. + """ + # Run the base constructor + # TODO(yon-mg): resolve other ctor params i.e. scopes, quota, etc. + # TODO: When custom host (api_endpoint) is set, `scopes` must *also* be set on the + # credentials object + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + url_scheme=url_scheme, + api_audience=api_audience, + ) + self._session = AuthorizedSession( + self._credentials, default_host=self.DEFAULT_HOST + ) + self._operations_client: Optional[operations_v1.AbstractOperationsClient] = None + if client_cert_source_for_mtls: + self._session.configure_mtls_channel(client_cert_source_for_mtls) + self._interceptor = interceptor or AccessPoliciesRestInterceptor() + self._prep_wrapped_messages(client_info) + + @property + def operations_client(self) -> operations_v1.AbstractOperationsClient: + """Create the client designed to process long-running operations. + + This property caches on the instance; repeated calls return the same + client. + """ + # Only create a new client if we do not already have one. + if self._operations_client is None: + http_options: Dict[str, List[Dict[str, str]]] = { + "google.longrunning.Operations.GetOperation": [ + { + "method": "get", + "uri": "/v3beta/{name=projects/*/locations/*/operations/*}", + }, + { + "method": "get", + "uri": "/v3beta/{name=folders/*/locations/*/operations/*}", + }, + { + "method": "get", + "uri": "/v3beta/{name=organizations/*/locations/*/operations/*}", + }, + ], + } + + rest_transport = operations_v1.OperationsRestTransport( + host=self._host, + # use the credentials which are saved + credentials=self._credentials, + scopes=self._scopes, + http_options=http_options, + path_prefix="v3beta", + ) + + self._operations_client = operations_v1.AbstractOperationsClient( + transport=rest_transport + ) + + # Return the client from cache. + return self._operations_client + + class _CreateAccessPolicy( + _BaseAccessPoliciesRestTransport._BaseCreateAccessPolicy, AccessPoliciesRestStub + ): + def __hash__(self): + return hash("AccessPoliciesRestTransport.CreateAccessPolicy") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__( + self, + request: access_policies_service.CreateAccessPolicyRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.Operation: + r"""Call the create access policy method over HTTP. + + Args: + request (~.access_policies_service.CreateAccessPolicyRequest): + The request object. Request message for + CreateAccessPolicy method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options = _BaseAccessPoliciesRestTransport._BaseCreateAccessPolicy._get_http_options() + + request, metadata = self._interceptor.pre_create_access_policy( + request, metadata + ) + transcoded_request = _BaseAccessPoliciesRestTransport._BaseCreateAccessPolicy._get_transcoded_request( + http_options, request + ) + + body = _BaseAccessPoliciesRestTransport._BaseCreateAccessPolicy._get_request_body_json( + transcoded_request + ) + + # Jsonify the query params + query_params = _BaseAccessPoliciesRestTransport._BaseCreateAccessPolicy._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.iam_v3beta.AccessPoliciesClient.CreateAccessPolicy", + extra={ + "serviceName": "google.iam.v3beta.AccessPolicies", + "rpcName": "CreateAccessPolicy", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = AccessPoliciesRestTransport._CreateAccessPolicy._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_create_access_policy(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_create_access_policy_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.iam_v3beta.AccessPoliciesClient.create_access_policy", + extra={ + "serviceName": "google.iam.v3beta.AccessPolicies", + "rpcName": "CreateAccessPolicy", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _DeleteAccessPolicy( + _BaseAccessPoliciesRestTransport._BaseDeleteAccessPolicy, AccessPoliciesRestStub + ): + def __hash__(self): + return hash("AccessPoliciesRestTransport.DeleteAccessPolicy") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: access_policies_service.DeleteAccessPolicyRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.Operation: + r"""Call the delete access policy method over HTTP. + + Args: + request (~.access_policies_service.DeleteAccessPolicyRequest): + The request object. Request message for + DeleteAccessPolicy method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options = _BaseAccessPoliciesRestTransport._BaseDeleteAccessPolicy._get_http_options() + + request, metadata = self._interceptor.pre_delete_access_policy( + request, metadata + ) + transcoded_request = _BaseAccessPoliciesRestTransport._BaseDeleteAccessPolicy._get_transcoded_request( + http_options, request + ) + + # Jsonify the query params + query_params = _BaseAccessPoliciesRestTransport._BaseDeleteAccessPolicy._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.iam_v3beta.AccessPoliciesClient.DeleteAccessPolicy", + extra={ + "serviceName": "google.iam.v3beta.AccessPolicies", + "rpcName": "DeleteAccessPolicy", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = AccessPoliciesRestTransport._DeleteAccessPolicy._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_delete_access_policy(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_delete_access_policy_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.iam_v3beta.AccessPoliciesClient.delete_access_policy", + extra={ + "serviceName": "google.iam.v3beta.AccessPolicies", + "rpcName": "DeleteAccessPolicy", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _GetAccessPolicy( + _BaseAccessPoliciesRestTransport._BaseGetAccessPolicy, AccessPoliciesRestStub + ): + def __hash__(self): + return hash("AccessPoliciesRestTransport.GetAccessPolicy") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: access_policies_service.GetAccessPolicyRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> access_policy_resources.AccessPolicy: + r"""Call the get access policy method over HTTP. + + Args: + request (~.access_policies_service.GetAccessPolicyRequest): + The request object. Request message for GetAccessPolicy + method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.access_policy_resources.AccessPolicy: + An IAM access policy resource. + """ + + http_options = _BaseAccessPoliciesRestTransport._BaseGetAccessPolicy._get_http_options() + + request, metadata = self._interceptor.pre_get_access_policy( + request, metadata + ) + transcoded_request = _BaseAccessPoliciesRestTransport._BaseGetAccessPolicy._get_transcoded_request( + http_options, request + ) + + # Jsonify the query params + query_params = _BaseAccessPoliciesRestTransport._BaseGetAccessPolicy._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.iam_v3beta.AccessPoliciesClient.GetAccessPolicy", + extra={ + "serviceName": "google.iam.v3beta.AccessPolicies", + "rpcName": "GetAccessPolicy", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = AccessPoliciesRestTransport._GetAccessPolicy._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = access_policy_resources.AccessPolicy() + pb_resp = access_policy_resources.AccessPolicy.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_get_access_policy(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_get_access_policy_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = access_policy_resources.AccessPolicy.to_json( + response + ) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.iam_v3beta.AccessPoliciesClient.get_access_policy", + extra={ + "serviceName": "google.iam.v3beta.AccessPolicies", + "rpcName": "GetAccessPolicy", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _ListAccessPolicies( + _BaseAccessPoliciesRestTransport._BaseListAccessPolicies, AccessPoliciesRestStub + ): + def __hash__(self): + return hash("AccessPoliciesRestTransport.ListAccessPolicies") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: access_policies_service.ListAccessPoliciesRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> access_policies_service.ListAccessPoliciesResponse: + r"""Call the list access policies method over HTTP. + + Args: + request (~.access_policies_service.ListAccessPoliciesRequest): + The request object. Request message for + ListAccessPolicies method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.access_policies_service.ListAccessPoliciesResponse: + Response message for + ListAccessPolicies method. + + """ + + http_options = _BaseAccessPoliciesRestTransport._BaseListAccessPolicies._get_http_options() + + request, metadata = self._interceptor.pre_list_access_policies( + request, metadata + ) + transcoded_request = _BaseAccessPoliciesRestTransport._BaseListAccessPolicies._get_transcoded_request( + http_options, request + ) + + # Jsonify the query params + query_params = _BaseAccessPoliciesRestTransport._BaseListAccessPolicies._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.iam_v3beta.AccessPoliciesClient.ListAccessPolicies", + extra={ + "serviceName": "google.iam.v3beta.AccessPolicies", + "rpcName": "ListAccessPolicies", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = AccessPoliciesRestTransport._ListAccessPolicies._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = access_policies_service.ListAccessPoliciesResponse() + pb_resp = access_policies_service.ListAccessPoliciesResponse.pb(resp) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_list_access_policies(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_list_access_policies_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = ( + access_policies_service.ListAccessPoliciesResponse.to_json( + response + ) + ) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.iam_v3beta.AccessPoliciesClient.list_access_policies", + extra={ + "serviceName": "google.iam.v3beta.AccessPolicies", + "rpcName": "ListAccessPolicies", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _SearchAccessPolicyBindings( + _BaseAccessPoliciesRestTransport._BaseSearchAccessPolicyBindings, + AccessPoliciesRestStub, + ): + def __hash__(self): + return hash("AccessPoliciesRestTransport.SearchAccessPolicyBindings") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: access_policies_service.SearchAccessPolicyBindingsRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> access_policies_service.SearchAccessPolicyBindingsResponse: + r"""Call the search access policy + bindings method over HTTP. + + Args: + request (~.access_policies_service.SearchAccessPolicyBindingsRequest): + The request object. Request message for + SearchAccessPolicyBindings rpc. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.access_policies_service.SearchAccessPolicyBindingsResponse: + Response message for + SearchAccessPolicyBindings rpc. + + """ + + http_options = _BaseAccessPoliciesRestTransport._BaseSearchAccessPolicyBindings._get_http_options() + + request, metadata = self._interceptor.pre_search_access_policy_bindings( + request, metadata + ) + transcoded_request = _BaseAccessPoliciesRestTransport._BaseSearchAccessPolicyBindings._get_transcoded_request( + http_options, request + ) + + # Jsonify the query params + query_params = _BaseAccessPoliciesRestTransport._BaseSearchAccessPolicyBindings._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.iam_v3beta.AccessPoliciesClient.SearchAccessPolicyBindings", + extra={ + "serviceName": "google.iam.v3beta.AccessPolicies", + "rpcName": "SearchAccessPolicyBindings", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = ( + AccessPoliciesRestTransport._SearchAccessPolicyBindings._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = access_policies_service.SearchAccessPolicyBindingsResponse() + pb_resp = access_policies_service.SearchAccessPolicyBindingsResponse.pb( + resp + ) + + json_format.Parse(response.content, pb_resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_search_access_policy_bindings(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = ( + self._interceptor.post_search_access_policy_bindings_with_metadata( + resp, response_metadata + ) + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = access_policies_service.SearchAccessPolicyBindingsResponse.to_json( + response + ) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.iam_v3beta.AccessPoliciesClient.search_access_policy_bindings", + extra={ + "serviceName": "google.iam.v3beta.AccessPolicies", + "rpcName": "SearchAccessPolicyBindings", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + class _UpdateAccessPolicy( + _BaseAccessPoliciesRestTransport._BaseUpdateAccessPolicy, AccessPoliciesRestStub + ): + def __hash__(self): + return hash("AccessPoliciesRestTransport.UpdateAccessPolicy") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + data=body, + ) + return response + + def __call__( + self, + request: access_policies_service.UpdateAccessPolicyRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.Operation: + r"""Call the update access policy method over HTTP. + + Args: + request (~.access_policies_service.UpdateAccessPolicyRequest): + The request object. Request message for + UpdateAccessPolicy method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + ~.operations_pb2.Operation: + This resource represents a + long-running operation that is the + result of a network API call. + + """ + + http_options = _BaseAccessPoliciesRestTransport._BaseUpdateAccessPolicy._get_http_options() + + request, metadata = self._interceptor.pre_update_access_policy( + request, metadata + ) + transcoded_request = _BaseAccessPoliciesRestTransport._BaseUpdateAccessPolicy._get_transcoded_request( + http_options, request + ) + + body = _BaseAccessPoliciesRestTransport._BaseUpdateAccessPolicy._get_request_body_json( + transcoded_request + ) + + # Jsonify the query params + query_params = _BaseAccessPoliciesRestTransport._BaseUpdateAccessPolicy._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = type(request).to_json(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.iam_v3beta.AccessPoliciesClient.UpdateAccessPolicy", + extra={ + "serviceName": "google.iam.v3beta.AccessPolicies", + "rpcName": "UpdateAccessPolicy", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = AccessPoliciesRestTransport._UpdateAccessPolicy._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + body, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + # Return the response + resp = operations_pb2.Operation() + json_format.Parse(response.content, resp, ignore_unknown_fields=True) + + resp = self._interceptor.post_update_access_policy(resp) + response_metadata = [(k, str(v)) for k, v in response.headers.items()] + resp, _ = self._interceptor.post_update_access_policy_with_metadata( + resp, response_metadata + ) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.iam_v3beta.AccessPoliciesClient.update_access_policy", + extra={ + "serviceName": "google.iam.v3beta.AccessPolicies", + "rpcName": "UpdateAccessPolicy", + "metadata": http_response["headers"], + "httpResponse": http_response, + }, + ) + return resp + + @property + def create_access_policy( + self, + ) -> Callable[ + [access_policies_service.CreateAccessPolicyRequest], operations_pb2.Operation + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._CreateAccessPolicy(self._session, self._host, self._interceptor) # type: ignore + + @property + def delete_access_policy( + self, + ) -> Callable[ + [access_policies_service.DeleteAccessPolicyRequest], operations_pb2.Operation + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._DeleteAccessPolicy(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_access_policy( + self, + ) -> Callable[ + [access_policies_service.GetAccessPolicyRequest], + access_policy_resources.AccessPolicy, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._GetAccessPolicy(self._session, self._host, self._interceptor) # type: ignore + + @property + def list_access_policies( + self, + ) -> Callable[ + [access_policies_service.ListAccessPoliciesRequest], + access_policies_service.ListAccessPoliciesResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._ListAccessPolicies(self._session, self._host, self._interceptor) # type: ignore + + @property + def search_access_policy_bindings( + self, + ) -> Callable[ + [access_policies_service.SearchAccessPolicyBindingsRequest], + access_policies_service.SearchAccessPolicyBindingsResponse, + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._SearchAccessPolicyBindings( + self._session, self._host, self._interceptor + ) # type: ignore + + @property + def update_access_policy( + self, + ) -> Callable[ + [access_policies_service.UpdateAccessPolicyRequest], operations_pb2.Operation + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._UpdateAccessPolicy(self._session, self._host, self._interceptor) # type: ignore + + @property + def get_operation(self): + return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore + + class _GetOperation( + _BaseAccessPoliciesRestTransport._BaseGetOperation, AccessPoliciesRestStub + ): + def __hash__(self): + return hash("AccessPoliciesRestTransport.GetOperation") + + @staticmethod + def _get_response( + host, + metadata, + query_params, + session, + timeout, + transcoded_request, + body=None, + ): + uri = transcoded_request["uri"] + method = transcoded_request["method"] + headers = dict(metadata) + headers["Content-Type"] = "application/json" + response = getattr(session, method)( + "{host}{uri}".format(host=host, uri=uri), + timeout=timeout, + headers=headers, + params=rest_helpers.flatten_query_params(query_params, strict=True), + ) + return response + + def __call__( + self, + request: operations_pb2.GetOperationRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> operations_pb2.Operation: + r"""Call the get operation method over HTTP. + + Args: + request (operations_pb2.GetOperationRequest): + The request object for GetOperation method. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + operations_pb2.Operation: Response from GetOperation method. + """ + + http_options = ( + _BaseAccessPoliciesRestTransport._BaseGetOperation._get_http_options() + ) + + request, metadata = self._interceptor.pre_get_operation(request, metadata) + transcoded_request = _BaseAccessPoliciesRestTransport._BaseGetOperation._get_transcoded_request( + http_options, request + ) + + # Jsonify the query params + query_params = _BaseAccessPoliciesRestTransport._BaseGetOperation._get_query_params_json( + transcoded_request + ) + + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + request_url = "{host}{uri}".format( + host=self._host, uri=transcoded_request["uri"] + ) + method = transcoded_request["method"] + try: + request_payload = json_format.MessageToJson(request) + except: + request_payload = None + http_request = { + "payload": request_payload, + "requestMethod": method, + "requestUrl": request_url, + "headers": dict(metadata), + } + _LOGGER.debug( + f"Sending request for google.iam_v3beta.AccessPoliciesClient.GetOperation", + extra={ + "serviceName": "google.iam.v3beta.AccessPolicies", + "rpcName": "GetOperation", + "httpRequest": http_request, + "metadata": http_request["headers"], + }, + ) + + # Send the request + response = AccessPoliciesRestTransport._GetOperation._get_response( + self._host, + metadata, + query_params, + self._session, + timeout, + transcoded_request, + ) + + # In case of error, raise the appropriate core_exceptions.GoogleAPICallError exception + # subclass. + if response.status_code >= 400: + raise core_exceptions.from_http_response(response) + + content = response.content.decode("utf-8") + resp = operations_pb2.Operation() + resp = json_format.Parse(content, resp) + resp = self._interceptor.post_get_operation(resp) + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor( + logging.DEBUG + ): # pragma: NO COVER + try: + response_payload = json_format.MessageToJson(resp) + except: + response_payload = None + http_response = { + "payload": response_payload, + "headers": dict(response.headers), + "status": response.status_code, + } + _LOGGER.debug( + "Received response for google.iam_v3beta.AccessPoliciesAsyncClient.GetOperation", + extra={ + "serviceName": "google.iam.v3beta.AccessPolicies", + "rpcName": "GetOperation", + "httpResponse": http_response, + "metadata": http_response["headers"], + }, + ) + return resp + + @property + def kind(self) -> str: + return "rest" + + def close(self): + self._session.close() + + +__all__ = ("AccessPoliciesRestTransport",) diff --git a/packages/google-cloud-iam/google/cloud/iam_v3beta/services/access_policies/transports/rest_base.py b/packages/google-cloud-iam/google/cloud/iam_v3beta/services/access_policies/transports/rest_base.py new file mode 100644 index 000000000000..28db819c8484 --- /dev/null +++ b/packages/google-cloud-iam/google/cloud/iam_v3beta/services/access_policies/transports/rest_base.py @@ -0,0 +1,487 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import json # type: ignore +import re +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Union + +from google.api_core import gapic_v1, path_template +from google.cloud.location import locations_pb2 # type: ignore +from google.longrunning import operations_pb2 # type: ignore +from google.protobuf import json_format + +from google.cloud.iam_v3beta.types import ( + access_policies_service, + access_policy_resources, +) + +from .base import DEFAULT_CLIENT_INFO, AccessPoliciesTransport + + +class _BaseAccessPoliciesRestTransport(AccessPoliciesTransport): + """Base REST backend transport for AccessPolicies. + + Note: This class is not meant to be used directly. Use its sync and + async sub-classes instead. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends JSON representations of protocol buffers over HTTP/1.1 + """ + + def __init__( + self, + *, + host: str = "iam.googleapis.com", + credentials: Optional[Any] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + url_scheme: str = "https", + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + Args: + host (Optional[str]): + The hostname to connect to (default: 'iam.googleapis.com'). + credentials (Optional[Any]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you are developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + url_scheme: the protocol scheme for the API endpoint. Normally + "https", but for testing or local servers, + "http" can be specified. + """ + # Run the base constructor + maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) + if maybe_url_match is None: + raise ValueError( + f"Unexpected hostname structure: {host}" + ) # pragma: NO COVER + + url_match_items = maybe_url_match.groupdict() + + host = f"{url_scheme}://{host}" if not url_match_items["scheme"] else host + + super().__init__( + host=host, + credentials=credentials, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + class _BaseCreateAccessPolicy: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = { + "accessPolicyId": "", + } + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "post", + "uri": "/v3beta/{parent=projects/*/locations/*}/accessPolicies", + "body": "access_policy", + }, + { + "method": "post", + "uri": "/v3beta/{parent=folders/*/locations/*}/accessPolicies", + "body": "access_policy", + }, + { + "method": "post", + "uri": "/v3beta/{parent=organizations/*/locations/*}/accessPolicies", + "body": "access_policy", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = access_policies_service.CreateAccessPolicyRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + return body + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseAccessPoliciesRestTransport._BaseCreateAccessPolicy._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseDeleteAccessPolicy: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "delete", + "uri": "/v3beta/{name=projects/*/locations/*/accessPolicies/*}", + }, + { + "method": "delete", + "uri": "/v3beta/{name=folders/*/locations/*/accessPolicies/*}", + }, + { + "method": "delete", + "uri": "/v3beta/{name=organizations/*/locations/*/accessPolicies/*}", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = access_policies_service.DeleteAccessPolicyRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseAccessPoliciesRestTransport._BaseDeleteAccessPolicy._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseGetAccessPolicy: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v3beta/{name=projects/*/locations/*/accessPolicies/*}", + }, + { + "method": "get", + "uri": "/v3beta/{name=folders/*/locations/*/accessPolicies/*}", + }, + { + "method": "get", + "uri": "/v3beta/{name=organizations/*/locations/*/accessPolicies/*}", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = access_policies_service.GetAccessPolicyRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseAccessPoliciesRestTransport._BaseGetAccessPolicy._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseListAccessPolicies: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v3beta/{parent=projects/*/locations/*}/accessPolicies", + }, + { + "method": "get", + "uri": "/v3beta/{parent=folders/*/locations/*}/accessPolicies", + }, + { + "method": "get", + "uri": "/v3beta/{parent=organizations/*/locations/*}/accessPolicies", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = access_policies_service.ListAccessPoliciesRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseAccessPoliciesRestTransport._BaseListAccessPolicies._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseSearchAccessPolicyBindings: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v3beta/{name=organizations/*/locations/*/accessPolicies/*}:searchPolicyBindings", + }, + { + "method": "get", + "uri": "/v3beta/{name=folders/*/locations/*/accessPolicies/*}:searchPolicyBindings", + }, + { + "method": "get", + "uri": "/v3beta/{name=projects/*/locations/*/accessPolicies/*}:searchPolicyBindings", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = access_policies_service.SearchAccessPolicyBindingsRequest.pb( + request + ) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseAccessPoliciesRestTransport._BaseSearchAccessPolicyBindings._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseUpdateAccessPolicy: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + __REQUIRED_FIELDS_DEFAULT_VALUES: Dict[str, Any] = {} + + @classmethod + def _get_unset_required_fields(cls, message_dict): + return { + k: v + for k, v in cls.__REQUIRED_FIELDS_DEFAULT_VALUES.items() + if k not in message_dict + } + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "patch", + "uri": "/v3beta/{access_policy.name=projects/*/locations/*/accessPolicies/*}", + "body": "access_policy", + }, + { + "method": "patch", + "uri": "/v3beta/{access_policy.name=folders/*/locations/*/accessPolicies/*}", + "body": "access_policy", + }, + { + "method": "patch", + "uri": "/v3beta/{access_policy.name=organizations/*/locations/*/accessPolicies/*}", + "body": "access_policy", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + pb_request = access_policies_service.UpdateAccessPolicyRequest.pb(request) + transcoded_request = path_template.transcode(http_options, pb_request) + return transcoded_request + + @staticmethod + def _get_request_body_json(transcoded_request): + # Jsonify the request body + + body = json_format.MessageToJson( + transcoded_request["body"], use_integers_for_enums=True + ) + return body + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads( + json_format.MessageToJson( + transcoded_request["query_params"], + use_integers_for_enums=True, + ) + ) + query_params.update( + _BaseAccessPoliciesRestTransport._BaseUpdateAccessPolicy._get_unset_required_fields( + query_params + ) + ) + + query_params["$alt"] = "json;enum-encoding=int" + return query_params + + class _BaseGetOperation: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + @staticmethod + def _get_http_options(): + http_options: List[Dict[str, str]] = [ + { + "method": "get", + "uri": "/v3beta/{name=projects/*/locations/*/operations/*}", + }, + { + "method": "get", + "uri": "/v3beta/{name=folders/*/locations/*/operations/*}", + }, + { + "method": "get", + "uri": "/v3beta/{name=organizations/*/locations/*/operations/*}", + }, + ] + return http_options + + @staticmethod + def _get_transcoded_request(http_options, request): + request_kwargs = json_format.MessageToDict(request) + transcoded_request = path_template.transcode(http_options, **request_kwargs) + return transcoded_request + + @staticmethod + def _get_query_params_json(transcoded_request): + query_params = json.loads(json.dumps(transcoded_request["query_params"])) + return query_params + + +__all__ = ("_BaseAccessPoliciesRestTransport",) diff --git a/packages/google-cloud-iam/google/cloud/iam_v3beta/services/policy_bindings/async_client.py b/packages/google-cloud-iam/google/cloud/iam_v3beta/services/policy_bindings/async_client.py index 46f846bc892d..109c64cfbff6 100644 --- a/packages/google-cloud-iam/google/cloud/iam_v3beta/services/policy_bindings/async_client.py +++ b/packages/google-cloud-iam/google/cloud/iam_v3beta/services/policy_bindings/async_client.py @@ -325,7 +325,7 @@ async def create_policy_binding( ) -> operation_async.AsyncOperation: r"""Creates a policy binding and returns a long-running operation. Callers will need the IAM permissions on both - the policy and target. Once the binding is created, the + the policy and target. After the binding is created, the policy is applied to the target. .. code-block:: python @@ -612,11 +612,8 @@ async def update_policy_binding( ) -> operation_async.AsyncOperation: r"""Updates a policy binding and returns a long-running operation. Callers will need the IAM permissions on the - policy and target in the binding to update, and the IAM - permission to remove the existing policy from the - binding. Target is immutable and cannot be updated. Once - the binding is updated, the new policy is applied to the - target. + policy and target in the binding to update. Target and + policy are immutable and cannot be updated. .. code-block:: python @@ -763,7 +760,7 @@ async def delete_policy_binding( ) -> operation_async.AsyncOperation: r"""Deletes a policy binding and returns a long-running operation. Callers will need the IAM permissions on both - the policy and target. Once the binding is deleted, the + the policy and target. After the binding is deleted, the policy no longer applies to the target. .. code-block:: python diff --git a/packages/google-cloud-iam/google/cloud/iam_v3beta/services/policy_bindings/client.py b/packages/google-cloud-iam/google/cloud/iam_v3beta/services/policy_bindings/client.py index 0bbc1c8de412..2f3f69758a8a 100644 --- a/packages/google-cloud-iam/google/cloud/iam_v3beta/services/policy_bindings/client.py +++ b/packages/google-cloud-iam/google/cloud/iam_v3beta/services/policy_bindings/client.py @@ -757,7 +757,7 @@ def create_policy_binding( ) -> operation.Operation: r"""Creates a policy binding and returns a long-running operation. Callers will need the IAM permissions on both - the policy and target. Once the binding is created, the + the policy and target. After the binding is created, the policy is applied to the target. .. code-block:: python @@ -1038,11 +1038,8 @@ def update_policy_binding( ) -> operation.Operation: r"""Updates a policy binding and returns a long-running operation. Callers will need the IAM permissions on the - policy and target in the binding to update, and the IAM - permission to remove the existing policy from the - binding. Target is immutable and cannot be updated. Once - the binding is updated, the new policy is applied to the - target. + policy and target in the binding to update. Target and + policy are immutable and cannot be updated. .. code-block:: python @@ -1186,7 +1183,7 @@ def delete_policy_binding( ) -> operation.Operation: r"""Deletes a policy binding and returns a long-running operation. Callers will need the IAM permissions on both - the policy and target. Once the binding is deleted, the + the policy and target. After the binding is deleted, the policy no longer applies to the target. .. code-block:: python diff --git a/packages/google-cloud-iam/google/cloud/iam_v3beta/services/policy_bindings/transports/grpc.py b/packages/google-cloud-iam/google/cloud/iam_v3beta/services/policy_bindings/transports/grpc.py index 0a5cb039af84..2538802b0a37 100644 --- a/packages/google-cloud-iam/google/cloud/iam_v3beta/services/policy_bindings/transports/grpc.py +++ b/packages/google-cloud-iam/google/cloud/iam_v3beta/services/policy_bindings/transports/grpc.py @@ -358,7 +358,7 @@ def create_policy_binding( Creates a policy binding and returns a long-running operation. Callers will need the IAM permissions on both - the policy and target. Once the binding is created, the + the policy and target. After the binding is created, the policy is applied to the target. Returns: @@ -418,11 +418,8 @@ def update_policy_binding( Updates a policy binding and returns a long-running operation. Callers will need the IAM permissions on the - policy and target in the binding to update, and the IAM - permission to remove the existing policy from the - binding. Target is immutable and cannot be updated. Once - the binding is updated, the new policy is applied to the - target. + policy and target in the binding to update. Target and + policy are immutable and cannot be updated. Returns: Callable[[~.UpdatePolicyBindingRequest], @@ -452,7 +449,7 @@ def delete_policy_binding( Deletes a policy binding and returns a long-running operation. Callers will need the IAM permissions on both - the policy and target. Once the binding is deleted, the + the policy and target. After the binding is deleted, the policy no longer applies to the target. Returns: diff --git a/packages/google-cloud-iam/google/cloud/iam_v3beta/services/policy_bindings/transports/grpc_asyncio.py b/packages/google-cloud-iam/google/cloud/iam_v3beta/services/policy_bindings/transports/grpc_asyncio.py index 5598743b10ed..eaa48ba31908 100644 --- a/packages/google-cloud-iam/google/cloud/iam_v3beta/services/policy_bindings/transports/grpc_asyncio.py +++ b/packages/google-cloud-iam/google/cloud/iam_v3beta/services/policy_bindings/transports/grpc_asyncio.py @@ -367,7 +367,7 @@ def create_policy_binding( Creates a policy binding and returns a long-running operation. Callers will need the IAM permissions on both - the policy and target. Once the binding is created, the + the policy and target. After the binding is created, the policy is applied to the target. Returns: @@ -428,11 +428,8 @@ def update_policy_binding( Updates a policy binding and returns a long-running operation. Callers will need the IAM permissions on the - policy and target in the binding to update, and the IAM - permission to remove the existing policy from the - binding. Target is immutable and cannot be updated. Once - the binding is updated, the new policy is applied to the - target. + policy and target in the binding to update. Target and + policy are immutable and cannot be updated. Returns: Callable[[~.UpdatePolicyBindingRequest], @@ -463,7 +460,7 @@ def delete_policy_binding( Deletes a policy binding and returns a long-running operation. Callers will need the IAM permissions on both - the policy and target. Once the binding is deleted, the + the policy and target. After the binding is deleted, the policy no longer applies to the target. Returns: diff --git a/packages/google-cloud-iam/google/cloud/iam_v3beta/types/__init__.py b/packages/google-cloud-iam/google/cloud/iam_v3beta/types/__init__.py index bb0caa3a5e8d..b71033797a12 100644 --- a/packages/google-cloud-iam/google/cloud/iam_v3beta/types/__init__.py +++ b/packages/google-cloud-iam/google/cloud/iam_v3beta/types/__init__.py @@ -13,6 +13,21 @@ # See the License for the specific language governing permissions and # limitations under the License. # +from .access_policies_service import ( + CreateAccessPolicyRequest, + DeleteAccessPolicyRequest, + GetAccessPolicyRequest, + ListAccessPoliciesRequest, + ListAccessPoliciesResponse, + SearchAccessPolicyBindingsRequest, + SearchAccessPolicyBindingsResponse, + UpdateAccessPolicyRequest, +) +from .access_policy_resources import ( + AccessPolicy, + AccessPolicyDetails, + AccessPolicyRule, +) from .operation_metadata import ( OperationMetadata, ) @@ -46,6 +61,17 @@ ) __all__ = ( + "CreateAccessPolicyRequest", + "DeleteAccessPolicyRequest", + "GetAccessPolicyRequest", + "ListAccessPoliciesRequest", + "ListAccessPoliciesResponse", + "SearchAccessPolicyBindingsRequest", + "SearchAccessPolicyBindingsResponse", + "UpdateAccessPolicyRequest", + "AccessPolicy", + "AccessPolicyDetails", + "AccessPolicyRule", "OperationMetadata", "PolicyBinding", "CreatePolicyBindingRequest", diff --git a/packages/google-cloud-iam/google/cloud/iam_v3beta/types/access_policies_service.py b/packages/google-cloud-iam/google/cloud/iam_v3beta/types/access_policies_service.py new file mode 100644 index 000000000000..1db0ef4b5e70 --- /dev/null +++ b/packages/google-cloud-iam/google/cloud/iam_v3beta/types/access_policies_service.py @@ -0,0 +1,327 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +from google.cloud.iam_v3beta.types import ( + access_policy_resources, + policy_binding_resources, +) + +__protobuf__ = proto.module( + package="google.iam.v3beta", + manifest={ + "CreateAccessPolicyRequest", + "GetAccessPolicyRequest", + "UpdateAccessPolicyRequest", + "DeleteAccessPolicyRequest", + "ListAccessPoliciesRequest", + "ListAccessPoliciesResponse", + "SearchAccessPolicyBindingsRequest", + "SearchAccessPolicyBindingsResponse", + }, +) + + +class CreateAccessPolicyRequest(proto.Message): + r"""Request message for CreateAccessPolicy method. + + Attributes: + parent (str): + Required. The parent resource where this access policy will + be created. + + Format: ``projects/{project_id}/locations/{location}`` + ``projects/{project_number}/locations/{location}`` + ``folders/{folder_id}/locations/{location}`` + ``organizations/{organization_id}/locations/{location}`` + access_policy_id (str): + Required. The ID to use for the access policy, which will + become the final component of the access policy's resource + name. + + This value must start with a lowercase letter followed by up + to 62 lowercase letters, numbers, hyphens, or dots. Pattern, + /[a-z][a-z0-9-.]{2,62}/. + + This value must be unique among all access policies with the + same parent. + access_policy (google.cloud.iam_v3beta.types.AccessPolicy): + Required. The access policy to create. + validate_only (bool): + Optional. If set, validate the request and + preview the creation, but do not actually post + it. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + access_policy_id: str = proto.Field( + proto.STRING, + number=2, + ) + access_policy: access_policy_resources.AccessPolicy = proto.Field( + proto.MESSAGE, + number=3, + message=access_policy_resources.AccessPolicy, + ) + validate_only: bool = proto.Field( + proto.BOOL, + number=4, + ) + + +class GetAccessPolicyRequest(proto.Message): + r"""Request message for GetAccessPolicy method. + + Attributes: + name (str): + Required. The name of the access policy to retrieve. + + Format: + ``projects/{project_id}/locations/{location}/accessPolicies/{access_policy_id}`` + ``projects/{project_number}/locations/{location}/accessPolicies/{access_policy_id}`` + ``folders/{folder_id}/locations/{location}/accessPolicies/{access_policy_id}`` + ``organizations/{organization_id}/locations/{location}/accessPolicies/{access_policy_id}`` + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class UpdateAccessPolicyRequest(proto.Message): + r"""Request message for UpdateAccessPolicy method. + + Attributes: + access_policy (google.cloud.iam_v3beta.types.AccessPolicy): + Required. The access policy to update. + + The access policy's ``name`` field is used to identify the + policy to update. + validate_only (bool): + Optional. If set, validate the request and + preview the update, but do not actually post it. + """ + + access_policy: access_policy_resources.AccessPolicy = proto.Field( + proto.MESSAGE, + number=1, + message=access_policy_resources.AccessPolicy, + ) + validate_only: bool = proto.Field( + proto.BOOL, + number=2, + ) + + +class DeleteAccessPolicyRequest(proto.Message): + r"""Request message for DeleteAccessPolicy method. + + Attributes: + name (str): + Required. The name of the access policy to delete. + + Format: + ``projects/{project_id}/locations/{location}/accessPolicies/{access_policy_id}`` + ``projects/{project_number}/locations/{location}/accessPolicies/{access_policy_id}`` + ``folders/{folder_id}/locations/{location}/accessPolicies/{access_policy_id}`` + ``organizations/{organization_id}/locations/{location}/accessPolicies/{access_policy_id}`` + etag (str): + Optional. The etag of the access policy. If + this is provided, it must match the server's + etag. + validate_only (bool): + Optional. If set, validate the request and + preview the deletion, but do not actually post + it. + force (bool): + Optional. If set to true, the request will + force the deletion of the Policy even if the + Policy references PolicyBindings. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + etag: str = proto.Field( + proto.STRING, + number=2, + ) + validate_only: bool = proto.Field( + proto.BOOL, + number=3, + ) + force: bool = proto.Field( + proto.BOOL, + number=4, + ) + + +class ListAccessPoliciesRequest(proto.Message): + r"""Request message for ListAccessPolicies method. + + Attributes: + parent (str): + Required. The parent resource, which owns the collection of + access policy resources. + + Format: ``projects/{project_id}/locations/{location}`` + ``projects/{project_number}/locations/{location}`` + ``folders/{folder_id}/locations/{location}`` + ``organizations/{organization_id}/locations/{location}`` + page_size (int): + Optional. The maximum number of access + policies to return. The service may return fewer + than this value. + + If unspecified, at most 50 access policies will + be returned. Valid value ranges from 1 to 1000; + values above 1000 will be coerced to 1000. + page_token (str): + Optional. A page token, received from a previous + ``ListAccessPolicies`` call. Provide this to retrieve the + subsequent page. + + When paginating, all other parameters provided to + ``ListAccessPolicies`` must match the call that provided the + page token. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + + +class ListAccessPoliciesResponse(proto.Message): + r"""Response message for ListAccessPolicies method. + + Attributes: + access_policies (MutableSequence[google.cloud.iam_v3beta.types.AccessPolicy]): + The access policies from the specified + parent. + next_page_token (str): + Optional. A token, which can be sent as ``page_token`` to + retrieve the next page. If this field is omitted, there are + no subsequent pages. + """ + + @property + def raw_page(self): + return self + + access_policies: MutableSequence[access_policy_resources.AccessPolicy] = ( + proto.RepeatedField( + proto.MESSAGE, + number=1, + message=access_policy_resources.AccessPolicy, + ) + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +class SearchAccessPolicyBindingsRequest(proto.Message): + r"""Request message for SearchAccessPolicyBindings rpc. + + Attributes: + name (str): + Required. The name of the access policy. Format: + ``organizations/{organization_id}/locations/{location}/accessPolicies/{access_policy_id}`` + ``folders/{folder_id}/locations/{location}/accessPolicies/{access_policy_id}`` + ``projects/{project_id}/locations/{location}/accessPolicies/{access_policy_id}`` + ``projects/{project_number}/locations/{location}/accessPolicies/{access_policy_id}`` + page_size (int): + Optional. The maximum number of policy + bindings to return. The service may return fewer + than this value. + + If unspecified, at most 50 policy bindings will + be returned. The maximum value is 1000; values + above 1000 will be coerced to 1000. + page_token (str): + Optional. A page token, received from a previous + ``SearchAccessPolicyBindingsRequest`` call. Provide this to + retrieve the subsequent page. + + When paginating, all other parameters provided to + ``SearchAccessPolicyBindingsRequest`` must match the call + that provided the page token. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + + +class SearchAccessPolicyBindingsResponse(proto.Message): + r"""Response message for SearchAccessPolicyBindings rpc. + + Attributes: + policy_bindings (MutableSequence[google.cloud.iam_v3beta.types.PolicyBinding]): + The policy bindings that reference the + specified policy. + next_page_token (str): + Optional. A token, which can be sent as ``page_token`` to + retrieve the next page. If this field is omitted, there are + no subsequent pages. + """ + + @property + def raw_page(self): + return self + + policy_bindings: MutableSequence[policy_binding_resources.PolicyBinding] = ( + proto.RepeatedField( + proto.MESSAGE, + number=1, + message=policy_binding_resources.PolicyBinding, + ) + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-iam/google/cloud/iam_v3beta/types/access_policy_resources.py b/packages/google-cloud-iam/google/cloud/iam_v3beta/types/access_policy_resources.py new file mode 100644 index 000000000000..9cd8e3a20832 --- /dev/null +++ b/packages/google-cloud-iam/google/cloud/iam_v3beta/types/access_policy_resources.py @@ -0,0 +1,311 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from __future__ import annotations + +from typing import MutableMapping, MutableSequence + +import google.protobuf.timestamp_pb2 as timestamp_pb2 # type: ignore +import google.type.expr_pb2 as expr_pb2 # type: ignore +import proto # type: ignore + +__protobuf__ = proto.module( + package="google.iam.v3beta", + manifest={ + "AccessPolicy", + "AccessPolicyDetails", + "AccessPolicyRule", + }, +) + + +class AccessPolicy(proto.Message): + r"""An IAM access policy resource. + + Attributes: + name (str): + Identifier. The resource name of the access policy. + + The following formats are supported: + + - ``projects/{project_id}/locations/{location}/accessPolicies/{policy_id}`` + - ``projects/{project_number}/locations/{location}/accessPolicies/{policy_id}`` + - ``folders/{folder_id}/locations/{location}/accessPolicies/{policy_id}`` + - ``organizations/{organization_id}/locations/{location}/accessPolicies/{policy_id}`` + uid (str): + Output only. The globally unique ID of the + access policy. + etag (str): + Optional. The etag for the access policy. + If this is provided on update, it must match the + server's etag. + display_name (str): + Optional. The description of the access + policy. Must be less than or equal to 63 + characters. + annotations (MutableMapping[str, str]): + Optional. User defined annotations. See + https://google.aip.dev/148#annotations for more + details such as format and size limitations + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time when the access policy + was created. + update_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time when the access policy + was most recently updated. + details (google.cloud.iam_v3beta.types.AccessPolicyDetails): + Optional. The details for the access policy. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + uid: str = proto.Field( + proto.STRING, + number=2, + ) + etag: str = proto.Field( + proto.STRING, + number=3, + ) + display_name: str = proto.Field( + proto.STRING, + number=4, + ) + annotations: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=5, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=6, + message=timestamp_pb2.Timestamp, + ) + update_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=7, + message=timestamp_pb2.Timestamp, + ) + details: "AccessPolicyDetails" = proto.Field( + proto.MESSAGE, + number=8, + message="AccessPolicyDetails", + ) + + +class AccessPolicyDetails(proto.Message): + r"""Access policy details. + + Attributes: + rules (MutableSequence[google.cloud.iam_v3beta.types.AccessPolicyRule]): + Required. A list of access policy rules. + """ + + rules: MutableSequence["AccessPolicyRule"] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="AccessPolicyRule", + ) + + +class AccessPolicyRule(proto.Message): + r"""Access Policy Rule that determines the behavior of the + policy. + + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + description (str): + Optional. Customer specified description of + the rule. Must be less than or equal to 256 + characters. + + This field is a member of `oneof`_ ``_description``. + effect (google.cloud.iam_v3beta.types.AccessPolicyRule.Effect): + Required. The effect of the rule. + + This field is a member of `oneof`_ ``_effect``. + principals (MutableSequence[str]): + Required. The identities for which this rule's effect + governs using one or more permissions on Google Cloud + resources. This field can contain the following values: + + - ``principal://goog/subject/{email_id}``: A specific Google + Account. Includes Gmail, Cloud Identity, and Google + Workspace user accounts. For example, + ``principal://goog/subject/alice@example.com``. + + - ``principal://iam.googleapis.com/projects/-/serviceAccounts/{service_account_id}``: + A Google Cloud service account. For example, + ``principal://iam.googleapis.com/projects/-/serviceAccounts/my-service-account@iam.gserviceaccount.com``. + + - ``principalSet://goog/group/{group_id}``: A Google group. + For example, + ``principalSet://goog/group/admins@example.com``. + + - ``principalSet://goog/cloudIdentityCustomerId/{customer_id}``: + All of the principals associated with the specified Google + Workspace or Cloud Identity customer ID. For example, + ``principalSet://goog/cloudIdentityCustomerId/C01Abc35``. + + If an identifier that was previously set on a policy is soft + deleted, then calls to read that policy will return the + identifier with a deleted prefix. Users cannot set + identifiers with this syntax. + + - ``deleted:principal://goog/subject/{email_id}?uid={uid}``: + A specific Google Account that was deleted recently. For + example, + ``deleted:principal://goog/subject/alice@example.com?uid=1234567890``. + If the Google Account is recovered, this identifier + reverts to the standard identifier for a Google Account. + + - ``deleted:principalSet://goog/group/{group_id}?uid={uid}``: + A Google group that was deleted recently. For example, + ``deleted:principalSet://goog/group/admins@example.com?uid=1234567890``. + If the Google group is restored, this identifier reverts + to the standard identifier for a Google group. + + - ``deleted:principal://iam.googleapis.com/projects/-/serviceAccounts/{service_account_id}?uid={uid}``: + A Google Cloud service account that was deleted recently. + For example, + ``deleted:principal://iam.googleapis.com/projects/-/serviceAccounts/my-service-account@iam.gserviceaccount.com?uid=1234567890``. + If the service account is undeleted, this identifier + reverts to the standard identifier for a service account. + excluded_principals (MutableSequence[str]): + Optional. The identities that are excluded from the access + policy rule, even if they are listed in the ``principals``. + For example, you could add a Google group to the + ``principals``, then exclude specific users who belong to + that group. + operation (google.cloud.iam_v3beta.types.AccessPolicyRule.Operation): + Required. Attributes that are used to + determine whether this rule applies to a + request. + conditions (MutableMapping[str, google.type.expr_pb2.Expr]): + Optional. The conditions that determine whether this rule + applies to a request. Conditions are identified by their + key, which is the FQDN of the service that they are relevant + to. For example: + + :: + + "conditions": { + "iam.googleapis.com": { + "expression": + } + } + + Each rule is evaluated independently. If this rule does not + apply to a request, other rules might still apply. Currently + supported keys are as follows: + + - ``eventarc.googleapis.com``: Can use ``CEL`` functions + that evaluate resource fields. + + - ``iam.googleapis.com``: Can use ``CEL`` functions that + evaluate `resource + tags `__ + and combine them using boolean and logical operators. + Other functions and operators are not supported. + """ + + class Effect(proto.Enum): + r"""An effect to describe the access relationship. + + Values: + EFFECT_UNSPECIFIED (0): + The effect is unspecified. + DENY (1): + The policy will deny access if it evaluates + to true. + ALLOW (2): + The policy will grant access if it evaluates + to true. + """ + + EFFECT_UNSPECIFIED = 0 + DENY = 1 + ALLOW = 2 + + class Operation(proto.Message): + r"""Attributes that are used to determine whether this rule + applies to a request. + + Attributes: + permissions (MutableSequence[str]): + Optional. The permissions that are explicitly affected by + this rule. Each permission uses the format + ``{service_fqdn}/{resource}.{verb}``, where + ``{service_fqdn}`` is the fully qualified domain name for + the service. Currently supported permissions are as follows: + + - ``eventarc.googleapis.com/messageBuses.publish``. + excluded_permissions (MutableSequence[str]): + Optional. Specifies the permissions that this rule excludes + from the set of affected permissions given by + ``permissions``. If a permission appears in ``permissions`` + *and* in ``excluded_permissions`` then it will *not* be + subject to the policy effect. + + The excluded permissions can be specified using the same + syntax as ``permissions``. + """ + + permissions: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=1, + ) + excluded_permissions: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=2, + ) + + description: str = proto.Field( + proto.STRING, + number=1, + optional=True, + ) + effect: Effect = proto.Field( + proto.ENUM, + number=2, + optional=True, + enum=Effect, + ) + principals: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + excluded_principals: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=4, + ) + operation: Operation = proto.Field( + proto.MESSAGE, + number=10, + message=Operation, + ) + conditions: MutableMapping[str, expr_pb2.Expr] = proto.MapField( + proto.STRING, + proto.MESSAGE, + number=9, + message=expr_pb2.Expr, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-cloud-iam/google/cloud/iam_v3beta/types/policy_binding_resources.py b/packages/google-cloud-iam/google/cloud/iam_v3beta/types/policy_binding_resources.py index 364ebe83c9b4..05b4f0c48daf 100644 --- a/packages/google-cloud-iam/google/cloud/iam_v3beta/types/policy_binding_resources.py +++ b/packages/google-cloud-iam/google/cloud/iam_v3beta/types/policy_binding_resources.py @@ -62,9 +62,9 @@ class PolicyBinding(proto.Message): https://google.aip.dev/148#annotations for more details such as format and size limitations target (google.cloud.iam_v3beta.types.PolicyBinding.Target): - Required. Immutable. Target is the full - resource name of the resource to which the - policy will be bound. Immutable once set. + Required. Immutable. The full resource name + of the resource to which the policy will be + bound. Immutable once set. policy_kind (google.cloud.iam_v3beta.types.PolicyBinding.PolicyKind): Immutable. The kind of the policy to attach in this binding. This field must be one of the @@ -111,14 +111,16 @@ class PolicyBinding(proto.Message): - ``principal.type != `` - ``principal.type in []`` - Supported principal types are Workspace, Workforce Pool, - Workload Pool and Service Account. Allowed string must be - one of: + Supported principal types are workspace, workforce pool, + workload pool, service account, and Agent Identity. Allowed + string must be one of: - - iam.googleapis.com/WorkspaceIdentity - - iam.googleapis.com/WorkforcePoolIdentity - - iam.googleapis.com/WorkloadPoolIdentity - - iam.googleapis.com/ServiceAccount + - ``iam.googleapis.com/WorkspaceIdentity`` + - ``iam.googleapis.com/WorkforcePoolIdentity`` + - ``iam.googleapis.com/WorkloadPoolIdentity`` + - ``iam.googleapis.com/ServiceAccount`` + - ``iam.googleapis.com/AgentPoolIdentity`` (available in + Preview) create_time (google.protobuf.timestamp_pb2.Timestamp): Output only. The time when the policy binding was created. @@ -128,35 +130,43 @@ class PolicyBinding(proto.Message): """ class PolicyKind(proto.Enum): - r"""Different policy kinds supported in this binding. + r"""The different policy kinds supported in this binding. Values: POLICY_KIND_UNSPECIFIED (0): Unspecified policy kind; Not a valid state PRINCIPAL_ACCESS_BOUNDARY (1): Principal access boundary policy kind + ACCESS (2): + Access policy kind. """ POLICY_KIND_UNSPECIFIED = 0 PRINCIPAL_ACCESS_BOUNDARY = 1 + ACCESS = 2 class Target(proto.Message): - r"""Target is the full resource name of the resource to which the - policy will be bound. Immutable once set. + r"""The full resource name of the resource to which the policy + will be bound. Immutable once set. + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields Attributes: principal_set (str): - Immutable. Full Resource Name used for principal access - boundary policy bindings. The principal set must be directly - parented by the policy binding's parent or same as the - parent if the target is a project/folder/organization. + Immutable. The full resource name that's used for principal + access boundary policy bindings. The principal set must be + directly parented by the policy binding's parent or same as + the parent if the target is a project, folder, or + organization. Examples: - - For binding's parented by an organization: + - For bindings parented by an organization: - Organization: ``//cloudresourcemanager.googleapis.com/organizations/ORGANIZATION_ID`` @@ -165,12 +175,12 @@ class Target(proto.Message): - Workspace Identity: ``//iam.googleapis.com/locations/global/workspace/WORKSPACE_ID`` - - For binding's parented by a folder: + - For bindings parented by a folder: - Folder: ``//cloudresourcemanager.googleapis.com/folders/FOLDER_ID`` - - For binding's parented by a project: + - For bindings parented by a project: - Project: @@ -180,6 +190,22 @@ class Target(proto.Message): - Workload Identity Pool: ``//iam.googleapis.com/projects/PROJECT_NUMBER/locations/LOCATION/workloadIdentityPools/WORKLOAD_POOL_ID`` + This field is a member of `oneof`_ ``target``. + resource (str): + Immutable. The full resource name that's used for access + policy bindings. + + Examples: + + - Organization: + ``//cloudresourcemanager.googleapis.com/organizations/ORGANIZATION_ID`` + - Folder: + ``//cloudresourcemanager.googleapis.com/folders/FOLDER_ID`` + - Project: + + - ``//cloudresourcemanager.googleapis.com/projects/PROJECT_NUMBER`` + - ``//cloudresourcemanager.googleapis.com/projects/PROJECT_ID`` + This field is a member of `oneof`_ ``target``. """ @@ -188,6 +214,11 @@ class Target(proto.Message): number=1, oneof="target", ) + resource: str = proto.Field( + proto.STRING, + number=2, + oneof="target", + ) name: str = proto.Field( proto.STRING, diff --git a/packages/google-cloud-iam/google/cloud/iam_v3beta/types/policy_bindings_service.py b/packages/google-cloud-iam/google/cloud/iam_v3beta/types/policy_bindings_service.py index 83de2fce724d..a11607677582 100644 --- a/packages/google-cloud-iam/google/cloud/iam_v3beta/types/policy_bindings_service.py +++ b/packages/google-cloud-iam/google/cloud/iam_v3beta/types/policy_bindings_service.py @@ -197,9 +197,8 @@ class ListPolicyBindingsRequest(proto.Message): bindings to return. The service may return fewer than this value. - If unspecified, at most 50 policy bindings will - be returned. The maximum value is 1000; values - above 1000 will be coerced to 1000. + The default value is 50. The maximum value is + 1000. page_token (str): Optional. A page token, received from a previous ``ListPolicyBindings`` call. Provide this to retrieve the @@ -211,7 +210,7 @@ class ListPolicyBindingsRequest(proto.Message): filter (str): Optional. An expression for filtering the results of the request. Filter rules are case insensitive. Some eligible - fields for filtering are: + fields for filtering are the following: - ``target`` - ``policy`` @@ -295,9 +294,8 @@ class SearchTargetPolicyBindingsRequest(proto.Message): bindings to return. The service may return fewer than this value. - If unspecified, at most 50 policy bindings will - be returned. The maximum value is 1000; values - above 1000 will be coerced to 1000. + The default value is 50. The maximum value is + 1000. page_token (str): Optional. A page token, received from a previous ``SearchTargetPolicyBindingsRequest`` call. Provide this to @@ -317,6 +315,18 @@ class SearchTargetPolicyBindingsRequest(proto.Message): - ``projects/{project_number}/locations/{location}`` - ``folders/{folder_id}/locations/{location}`` - ``organizations/{organization_id}/locations/{location}`` + filter (str): + Optional. Filtering currently only supports the kind of + policies to return, and must be in the format + "policy_kind={policy_kind}". + + If String is empty, bindings bound to all kinds of policies + would be returned. + + The only supported values are the following: + + - "policy_kind=PRINCIPAL_ACCESS_BOUNDARY", + - "policy_kind=ACCESS". """ target: str = proto.Field( @@ -335,6 +345,10 @@ class SearchTargetPolicyBindingsRequest(proto.Message): proto.STRING, number=5, ) + filter: str = proto.Field( + proto.STRING, + number=6, + ) class SearchTargetPolicyBindingsResponse(proto.Message): diff --git a/packages/google-cloud-iam/samples/generated_samples/iam_v3beta_generated_access_policies_create_access_policy_async.py b/packages/google-cloud-iam/samples/generated_samples/iam_v3beta_generated_access_policies_create_access_policy_async.py new file mode 100644 index 000000000000..4bf641fe2906 --- /dev/null +++ b/packages/google-cloud-iam/samples/generated_samples/iam_v3beta_generated_access_policies_create_access_policy_async.py @@ -0,0 +1,58 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateAccessPolicy +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-iam + + +# [START iam_v3beta_generated_AccessPolicies_CreateAccessPolicy_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import iam_v3beta + + +async def sample_create_access_policy(): + # Create a client + client = iam_v3beta.AccessPoliciesAsyncClient() + + # Initialize request argument(s) + request = iam_v3beta.CreateAccessPolicyRequest( + parent="parent_value", + access_policy_id="access_policy_id_value", + ) + + # Make the request + operation = client.create_access_policy(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + +# [END iam_v3beta_generated_AccessPolicies_CreateAccessPolicy_async] diff --git a/packages/google-cloud-iam/samples/generated_samples/iam_v3beta_generated_access_policies_create_access_policy_sync.py b/packages/google-cloud-iam/samples/generated_samples/iam_v3beta_generated_access_policies_create_access_policy_sync.py new file mode 100644 index 000000000000..9be838d0c9c4 --- /dev/null +++ b/packages/google-cloud-iam/samples/generated_samples/iam_v3beta_generated_access_policies_create_access_policy_sync.py @@ -0,0 +1,58 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateAccessPolicy +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-iam + + +# [START iam_v3beta_generated_AccessPolicies_CreateAccessPolicy_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import iam_v3beta + + +def sample_create_access_policy(): + # Create a client + client = iam_v3beta.AccessPoliciesClient() + + # Initialize request argument(s) + request = iam_v3beta.CreateAccessPolicyRequest( + parent="parent_value", + access_policy_id="access_policy_id_value", + ) + + # Make the request + operation = client.create_access_policy(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + +# [END iam_v3beta_generated_AccessPolicies_CreateAccessPolicy_sync] diff --git a/packages/google-cloud-iam/samples/generated_samples/iam_v3beta_generated_access_policies_delete_access_policy_async.py b/packages/google-cloud-iam/samples/generated_samples/iam_v3beta_generated_access_policies_delete_access_policy_async.py new file mode 100644 index 000000000000..6490c0c9a5f1 --- /dev/null +++ b/packages/google-cloud-iam/samples/generated_samples/iam_v3beta_generated_access_policies_delete_access_policy_async.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteAccessPolicy +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-iam + + +# [START iam_v3beta_generated_AccessPolicies_DeleteAccessPolicy_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import iam_v3beta + + +async def sample_delete_access_policy(): + # Create a client + client = iam_v3beta.AccessPoliciesAsyncClient() + + # Initialize request argument(s) + request = iam_v3beta.DeleteAccessPolicyRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_access_policy(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + +# [END iam_v3beta_generated_AccessPolicies_DeleteAccessPolicy_async] diff --git a/packages/google-cloud-iam/samples/generated_samples/iam_v3beta_generated_access_policies_delete_access_policy_sync.py b/packages/google-cloud-iam/samples/generated_samples/iam_v3beta_generated_access_policies_delete_access_policy_sync.py new file mode 100644 index 000000000000..8c02810634e3 --- /dev/null +++ b/packages/google-cloud-iam/samples/generated_samples/iam_v3beta_generated_access_policies_delete_access_policy_sync.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteAccessPolicy +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-iam + + +# [START iam_v3beta_generated_AccessPolicies_DeleteAccessPolicy_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import iam_v3beta + + +def sample_delete_access_policy(): + # Create a client + client = iam_v3beta.AccessPoliciesClient() + + # Initialize request argument(s) + request = iam_v3beta.DeleteAccessPolicyRequest( + name="name_value", + ) + + # Make the request + operation = client.delete_access_policy(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + +# [END iam_v3beta_generated_AccessPolicies_DeleteAccessPolicy_sync] diff --git a/packages/google-cloud-iam/samples/generated_samples/iam_v3beta_generated_access_policies_get_access_policy_async.py b/packages/google-cloud-iam/samples/generated_samples/iam_v3beta_generated_access_policies_get_access_policy_async.py new file mode 100644 index 000000000000..b854b8a0e2bd --- /dev/null +++ b/packages/google-cloud-iam/samples/generated_samples/iam_v3beta_generated_access_policies_get_access_policy_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetAccessPolicy +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-iam + + +# [START iam_v3beta_generated_AccessPolicies_GetAccessPolicy_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import iam_v3beta + + +async def sample_get_access_policy(): + # Create a client + client = iam_v3beta.AccessPoliciesAsyncClient() + + # Initialize request argument(s) + request = iam_v3beta.GetAccessPolicyRequest( + name="name_value", + ) + + # Make the request + response = await client.get_access_policy(request=request) + + # Handle the response + print(response) + + +# [END iam_v3beta_generated_AccessPolicies_GetAccessPolicy_async] diff --git a/packages/google-cloud-iam/samples/generated_samples/iam_v3beta_generated_access_policies_get_access_policy_sync.py b/packages/google-cloud-iam/samples/generated_samples/iam_v3beta_generated_access_policies_get_access_policy_sync.py new file mode 100644 index 000000000000..7904dc68fe0d --- /dev/null +++ b/packages/google-cloud-iam/samples/generated_samples/iam_v3beta_generated_access_policies_get_access_policy_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetAccessPolicy +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-iam + + +# [START iam_v3beta_generated_AccessPolicies_GetAccessPolicy_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import iam_v3beta + + +def sample_get_access_policy(): + # Create a client + client = iam_v3beta.AccessPoliciesClient() + + # Initialize request argument(s) + request = iam_v3beta.GetAccessPolicyRequest( + name="name_value", + ) + + # Make the request + response = client.get_access_policy(request=request) + + # Handle the response + print(response) + + +# [END iam_v3beta_generated_AccessPolicies_GetAccessPolicy_sync] diff --git a/packages/google-cloud-iam/samples/generated_samples/iam_v3beta_generated_access_policies_list_access_policies_async.py b/packages/google-cloud-iam/samples/generated_samples/iam_v3beta_generated_access_policies_list_access_policies_async.py new file mode 100644 index 000000000000..16ef5bc8ffb2 --- /dev/null +++ b/packages/google-cloud-iam/samples/generated_samples/iam_v3beta_generated_access_policies_list_access_policies_async.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListAccessPolicies +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-iam + + +# [START iam_v3beta_generated_AccessPolicies_ListAccessPolicies_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import iam_v3beta + + +async def sample_list_access_policies(): + # Create a client + client = iam_v3beta.AccessPoliciesAsyncClient() + + # Initialize request argument(s) + request = iam_v3beta.ListAccessPoliciesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_access_policies(request=request) + + # Handle the response + async for response in page_result: + print(response) + + +# [END iam_v3beta_generated_AccessPolicies_ListAccessPolicies_async] diff --git a/packages/google-cloud-iam/samples/generated_samples/iam_v3beta_generated_access_policies_list_access_policies_sync.py b/packages/google-cloud-iam/samples/generated_samples/iam_v3beta_generated_access_policies_list_access_policies_sync.py new file mode 100644 index 000000000000..3503d195ae4a --- /dev/null +++ b/packages/google-cloud-iam/samples/generated_samples/iam_v3beta_generated_access_policies_list_access_policies_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListAccessPolicies +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-iam + + +# [START iam_v3beta_generated_AccessPolicies_ListAccessPolicies_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import iam_v3beta + + +def sample_list_access_policies(): + # Create a client + client = iam_v3beta.AccessPoliciesClient() + + # Initialize request argument(s) + request = iam_v3beta.ListAccessPoliciesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_access_policies(request=request) + + # Handle the response + for response in page_result: + print(response) + + +# [END iam_v3beta_generated_AccessPolicies_ListAccessPolicies_sync] diff --git a/packages/google-cloud-iam/samples/generated_samples/iam_v3beta_generated_access_policies_search_access_policy_bindings_async.py b/packages/google-cloud-iam/samples/generated_samples/iam_v3beta_generated_access_policies_search_access_policy_bindings_async.py new file mode 100644 index 000000000000..1e821ff12dbe --- /dev/null +++ b/packages/google-cloud-iam/samples/generated_samples/iam_v3beta_generated_access_policies_search_access_policy_bindings_async.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for SearchAccessPolicyBindings +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-iam + + +# [START iam_v3beta_generated_AccessPolicies_SearchAccessPolicyBindings_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import iam_v3beta + + +async def sample_search_access_policy_bindings(): + # Create a client + client = iam_v3beta.AccessPoliciesAsyncClient() + + # Initialize request argument(s) + request = iam_v3beta.SearchAccessPolicyBindingsRequest( + name="name_value", + ) + + # Make the request + page_result = client.search_access_policy_bindings(request=request) + + # Handle the response + async for response in page_result: + print(response) + + +# [END iam_v3beta_generated_AccessPolicies_SearchAccessPolicyBindings_async] diff --git a/packages/google-cloud-iam/samples/generated_samples/iam_v3beta_generated_access_policies_search_access_policy_bindings_sync.py b/packages/google-cloud-iam/samples/generated_samples/iam_v3beta_generated_access_policies_search_access_policy_bindings_sync.py new file mode 100644 index 000000000000..e8bf00c2a2b4 --- /dev/null +++ b/packages/google-cloud-iam/samples/generated_samples/iam_v3beta_generated_access_policies_search_access_policy_bindings_sync.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for SearchAccessPolicyBindings +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-iam + + +# [START iam_v3beta_generated_AccessPolicies_SearchAccessPolicyBindings_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import iam_v3beta + + +def sample_search_access_policy_bindings(): + # Create a client + client = iam_v3beta.AccessPoliciesClient() + + # Initialize request argument(s) + request = iam_v3beta.SearchAccessPolicyBindingsRequest( + name="name_value", + ) + + # Make the request + page_result = client.search_access_policy_bindings(request=request) + + # Handle the response + for response in page_result: + print(response) + + +# [END iam_v3beta_generated_AccessPolicies_SearchAccessPolicyBindings_sync] diff --git a/packages/google-cloud-iam/samples/generated_samples/iam_v3beta_generated_access_policies_update_access_policy_async.py b/packages/google-cloud-iam/samples/generated_samples/iam_v3beta_generated_access_policies_update_access_policy_async.py new file mode 100644 index 000000000000..8c275772f8f9 --- /dev/null +++ b/packages/google-cloud-iam/samples/generated_samples/iam_v3beta_generated_access_policies_update_access_policy_async.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateAccessPolicy +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-iam + + +# [START iam_v3beta_generated_AccessPolicies_UpdateAccessPolicy_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import iam_v3beta + + +async def sample_update_access_policy(): + # Create a client + client = iam_v3beta.AccessPoliciesAsyncClient() + + # Initialize request argument(s) + request = iam_v3beta.UpdateAccessPolicyRequest() + + # Make the request + operation = client.update_access_policy(request=request) + + print("Waiting for operation to complete...") + + response = (await operation).result() + + # Handle the response + print(response) + + +# [END iam_v3beta_generated_AccessPolicies_UpdateAccessPolicy_async] diff --git a/packages/google-cloud-iam/samples/generated_samples/iam_v3beta_generated_access_policies_update_access_policy_sync.py b/packages/google-cloud-iam/samples/generated_samples/iam_v3beta_generated_access_policies_update_access_policy_sync.py new file mode 100644 index 000000000000..b800ea208979 --- /dev/null +++ b/packages/google-cloud-iam/samples/generated_samples/iam_v3beta_generated_access_policies_update_access_policy_sync.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for UpdateAccessPolicy +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-iam + + +# [START iam_v3beta_generated_AccessPolicies_UpdateAccessPolicy_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import iam_v3beta + + +def sample_update_access_policy(): + # Create a client + client = iam_v3beta.AccessPoliciesClient() + + # Initialize request argument(s) + request = iam_v3beta.UpdateAccessPolicyRequest() + + # Make the request + operation = client.update_access_policy(request=request) + + print("Waiting for operation to complete...") + + response = operation.result() + + # Handle the response + print(response) + + +# [END iam_v3beta_generated_AccessPolicies_UpdateAccessPolicy_sync] diff --git a/packages/google-cloud-iam/samples/generated_samples/snippet_metadata_google.iam.v3beta.json b/packages/google-cloud-iam/samples/generated_samples/snippet_metadata_google.iam.v3beta.json index f4646378cff5..b6b65400e146 100644 --- a/packages/google-cloud-iam/samples/generated_samples/snippet_metadata_google.iam.v3beta.json +++ b/packages/google-cloud-iam/samples/generated_samples/snippet_metadata_google.iam.v3beta.json @@ -11,6 +11,980 @@ "version": "2.22.0" }, "snippets": [ + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.iam_v3beta.AccessPoliciesAsyncClient", + "shortName": "AccessPoliciesAsyncClient" + }, + "fullName": "google.cloud.iam_v3beta.AccessPoliciesAsyncClient.create_access_policy", + "method": { + "fullName": "google.iam.v3beta.AccessPolicies.CreateAccessPolicy", + "service": { + "fullName": "google.iam.v3beta.AccessPolicies", + "shortName": "AccessPolicies" + }, + "shortName": "CreateAccessPolicy" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.iam_v3beta.types.CreateAccessPolicyRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "access_policy", + "type": "google.cloud.iam_v3beta.types.AccessPolicy" + }, + { + "name": "access_policy_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "create_access_policy" + }, + "description": "Sample for CreateAccessPolicy", + "file": "iam_v3beta_generated_access_policies_create_access_policy_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "iam_v3beta_generated_AccessPolicies_CreateAccessPolicy_async", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "iam_v3beta_generated_access_policies_create_access_policy_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.iam_v3beta.AccessPoliciesClient", + "shortName": "AccessPoliciesClient" + }, + "fullName": "google.cloud.iam_v3beta.AccessPoliciesClient.create_access_policy", + "method": { + "fullName": "google.iam.v3beta.AccessPolicies.CreateAccessPolicy", + "service": { + "fullName": "google.iam.v3beta.AccessPolicies", + "shortName": "AccessPolicies" + }, + "shortName": "CreateAccessPolicy" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.iam_v3beta.types.CreateAccessPolicyRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "access_policy", + "type": "google.cloud.iam_v3beta.types.AccessPolicy" + }, + { + "name": "access_policy_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "create_access_policy" + }, + "description": "Sample for CreateAccessPolicy", + "file": "iam_v3beta_generated_access_policies_create_access_policy_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "iam_v3beta_generated_AccessPolicies_CreateAccessPolicy_sync", + "segments": [ + { + "end": 56, + "start": 27, + "type": "FULL" + }, + { + "end": 56, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 53, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 57, + "start": 54, + "type": "RESPONSE_HANDLING" + } + ], + "title": "iam_v3beta_generated_access_policies_create_access_policy_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.iam_v3beta.AccessPoliciesAsyncClient", + "shortName": "AccessPoliciesAsyncClient" + }, + "fullName": "google.cloud.iam_v3beta.AccessPoliciesAsyncClient.delete_access_policy", + "method": { + "fullName": "google.iam.v3beta.AccessPolicies.DeleteAccessPolicy", + "service": { + "fullName": "google.iam.v3beta.AccessPolicies", + "shortName": "AccessPolicies" + }, + "shortName": "DeleteAccessPolicy" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.iam_v3beta.types.DeleteAccessPolicyRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "delete_access_policy" + }, + "description": "Sample for DeleteAccessPolicy", + "file": "iam_v3beta_generated_access_policies_delete_access_policy_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "iam_v3beta_generated_AccessPolicies_DeleteAccessPolicy_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "iam_v3beta_generated_access_policies_delete_access_policy_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.iam_v3beta.AccessPoliciesClient", + "shortName": "AccessPoliciesClient" + }, + "fullName": "google.cloud.iam_v3beta.AccessPoliciesClient.delete_access_policy", + "method": { + "fullName": "google.iam.v3beta.AccessPolicies.DeleteAccessPolicy", + "service": { + "fullName": "google.iam.v3beta.AccessPolicies", + "shortName": "AccessPolicies" + }, + "shortName": "DeleteAccessPolicy" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.iam_v3beta.types.DeleteAccessPolicyRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "delete_access_policy" + }, + "description": "Sample for DeleteAccessPolicy", + "file": "iam_v3beta_generated_access_policies_delete_access_policy_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "iam_v3beta_generated_AccessPolicies_DeleteAccessPolicy_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "iam_v3beta_generated_access_policies_delete_access_policy_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.iam_v3beta.AccessPoliciesAsyncClient", + "shortName": "AccessPoliciesAsyncClient" + }, + "fullName": "google.cloud.iam_v3beta.AccessPoliciesAsyncClient.get_access_policy", + "method": { + "fullName": "google.iam.v3beta.AccessPolicies.GetAccessPolicy", + "service": { + "fullName": "google.iam.v3beta.AccessPolicies", + "shortName": "AccessPolicies" + }, + "shortName": "GetAccessPolicy" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.iam_v3beta.types.GetAccessPolicyRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.iam_v3beta.types.AccessPolicy", + "shortName": "get_access_policy" + }, + "description": "Sample for GetAccessPolicy", + "file": "iam_v3beta_generated_access_policies_get_access_policy_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "iam_v3beta_generated_AccessPolicies_GetAccessPolicy_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "iam_v3beta_generated_access_policies_get_access_policy_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.iam_v3beta.AccessPoliciesClient", + "shortName": "AccessPoliciesClient" + }, + "fullName": "google.cloud.iam_v3beta.AccessPoliciesClient.get_access_policy", + "method": { + "fullName": "google.iam.v3beta.AccessPolicies.GetAccessPolicy", + "service": { + "fullName": "google.iam.v3beta.AccessPolicies", + "shortName": "AccessPolicies" + }, + "shortName": "GetAccessPolicy" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.iam_v3beta.types.GetAccessPolicyRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.iam_v3beta.types.AccessPolicy", + "shortName": "get_access_policy" + }, + "description": "Sample for GetAccessPolicy", + "file": "iam_v3beta_generated_access_policies_get_access_policy_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "iam_v3beta_generated_AccessPolicies_GetAccessPolicy_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "iam_v3beta_generated_access_policies_get_access_policy_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.iam_v3beta.AccessPoliciesAsyncClient", + "shortName": "AccessPoliciesAsyncClient" + }, + "fullName": "google.cloud.iam_v3beta.AccessPoliciesAsyncClient.list_access_policies", + "method": { + "fullName": "google.iam.v3beta.AccessPolicies.ListAccessPolicies", + "service": { + "fullName": "google.iam.v3beta.AccessPolicies", + "shortName": "AccessPolicies" + }, + "shortName": "ListAccessPolicies" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.iam_v3beta.types.ListAccessPoliciesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.iam_v3beta.services.access_policies.pagers.ListAccessPoliciesAsyncPager", + "shortName": "list_access_policies" + }, + "description": "Sample for ListAccessPolicies", + "file": "iam_v3beta_generated_access_policies_list_access_policies_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "iam_v3beta_generated_AccessPolicies_ListAccessPolicies_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "iam_v3beta_generated_access_policies_list_access_policies_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.iam_v3beta.AccessPoliciesClient", + "shortName": "AccessPoliciesClient" + }, + "fullName": "google.cloud.iam_v3beta.AccessPoliciesClient.list_access_policies", + "method": { + "fullName": "google.iam.v3beta.AccessPolicies.ListAccessPolicies", + "service": { + "fullName": "google.iam.v3beta.AccessPolicies", + "shortName": "AccessPolicies" + }, + "shortName": "ListAccessPolicies" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.iam_v3beta.types.ListAccessPoliciesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.iam_v3beta.services.access_policies.pagers.ListAccessPoliciesPager", + "shortName": "list_access_policies" + }, + "description": "Sample for ListAccessPolicies", + "file": "iam_v3beta_generated_access_policies_list_access_policies_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "iam_v3beta_generated_AccessPolicies_ListAccessPolicies_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "iam_v3beta_generated_access_policies_list_access_policies_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.iam_v3beta.AccessPoliciesAsyncClient", + "shortName": "AccessPoliciesAsyncClient" + }, + "fullName": "google.cloud.iam_v3beta.AccessPoliciesAsyncClient.search_access_policy_bindings", + "method": { + "fullName": "google.iam.v3beta.AccessPolicies.SearchAccessPolicyBindings", + "service": { + "fullName": "google.iam.v3beta.AccessPolicies", + "shortName": "AccessPolicies" + }, + "shortName": "SearchAccessPolicyBindings" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.iam_v3beta.types.SearchAccessPolicyBindingsRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.iam_v3beta.services.access_policies.pagers.SearchAccessPolicyBindingsAsyncPager", + "shortName": "search_access_policy_bindings" + }, + "description": "Sample for SearchAccessPolicyBindings", + "file": "iam_v3beta_generated_access_policies_search_access_policy_bindings_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "iam_v3beta_generated_AccessPolicies_SearchAccessPolicyBindings_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "iam_v3beta_generated_access_policies_search_access_policy_bindings_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.iam_v3beta.AccessPoliciesClient", + "shortName": "AccessPoliciesClient" + }, + "fullName": "google.cloud.iam_v3beta.AccessPoliciesClient.search_access_policy_bindings", + "method": { + "fullName": "google.iam.v3beta.AccessPolicies.SearchAccessPolicyBindings", + "service": { + "fullName": "google.iam.v3beta.AccessPolicies", + "shortName": "AccessPolicies" + }, + "shortName": "SearchAccessPolicyBindings" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.iam_v3beta.types.SearchAccessPolicyBindingsRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.cloud.iam_v3beta.services.access_policies.pagers.SearchAccessPolicyBindingsPager", + "shortName": "search_access_policy_bindings" + }, + "description": "Sample for SearchAccessPolicyBindings", + "file": "iam_v3beta_generated_access_policies_search_access_policy_bindings_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "iam_v3beta_generated_AccessPolicies_SearchAccessPolicyBindings_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "iam_v3beta_generated_access_policies_search_access_policy_bindings_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.iam_v3beta.AccessPoliciesAsyncClient", + "shortName": "AccessPoliciesAsyncClient" + }, + "fullName": "google.cloud.iam_v3beta.AccessPoliciesAsyncClient.update_access_policy", + "method": { + "fullName": "google.iam.v3beta.AccessPolicies.UpdateAccessPolicy", + "service": { + "fullName": "google.iam.v3beta.AccessPolicies", + "shortName": "AccessPolicies" + }, + "shortName": "UpdateAccessPolicy" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.iam_v3beta.types.UpdateAccessPolicyRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation_async.AsyncOperation", + "shortName": "update_access_policy" + }, + "description": "Sample for UpdateAccessPolicy", + "file": "iam_v3beta_generated_access_policies_update_access_policy_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "iam_v3beta_generated_AccessPolicies_UpdateAccessPolicy_async", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 51, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 52, + "type": "RESPONSE_HANDLING" + } + ], + "title": "iam_v3beta_generated_access_policies_update_access_policy_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.iam_v3beta.AccessPoliciesClient", + "shortName": "AccessPoliciesClient" + }, + "fullName": "google.cloud.iam_v3beta.AccessPoliciesClient.update_access_policy", + "method": { + "fullName": "google.iam.v3beta.AccessPolicies.UpdateAccessPolicy", + "service": { + "fullName": "google.iam.v3beta.AccessPolicies", + "shortName": "AccessPolicies" + }, + "shortName": "UpdateAccessPolicy" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.iam_v3beta.types.UpdateAccessPolicyRequest" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "google.api_core.operation.Operation", + "shortName": "update_access_policy" + }, + "description": "Sample for UpdateAccessPolicy", + "file": "iam_v3beta_generated_access_policies_update_access_policy_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "iam_v3beta_generated_AccessPolicies_UpdateAccessPolicy_sync", + "segments": [ + { + "end": 54, + "start": 27, + "type": "FULL" + }, + { + "end": 54, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 44, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 51, + "start": 45, + "type": "REQUEST_EXECUTION" + }, + { + "end": 55, + "start": 52, + "type": "RESPONSE_HANDLING" + } + ], + "title": "iam_v3beta_generated_access_policies_update_access_policy_sync.py" + }, { "canonical": true, "clientMethod": { diff --git a/packages/google-cloud-iam/tests/unit/gapic/iam_v3beta/test_access_policies.py b/packages/google-cloud-iam/tests/unit/gapic/iam_v3beta/test_access_policies.py new file mode 100644 index 000000000000..0c441be68ddc --- /dev/null +++ b/packages/google-cloud-iam/tests/unit/gapic/iam_v3beta/test_access_policies.py @@ -0,0 +1,7620 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os + +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +import json +import math +from collections.abc import AsyncIterable, Iterable, Mapping, Sequence + +import grpc +import pytest +from google.api_core import api_core_version +from google.protobuf import json_format +from grpc.experimental import aio +from proto.marshal.rules import wrappers +from proto.marshal.rules.dates import DurationRule, TimestampRule +from requests import PreparedRequest, Request, Response +from requests.sessions import Session + +try: + from google.auth.aio import credentials as ga_credentials_async + + HAS_GOOGLE_AUTH_AIO = True +except ImportError: # pragma: NO COVER + HAS_GOOGLE_AUTH_AIO = False + +import google.api_core.operation_async as operation_async # type: ignore +import google.auth +import google.protobuf.empty_pb2 as empty_pb2 # type: ignore +import google.protobuf.timestamp_pb2 as timestamp_pb2 # type: ignore +import google.type.expr_pb2 as expr_pb2 # type: ignore +from google.api_core import ( + client_options, + future, + gapic_v1, + grpc_helpers, + grpc_helpers_async, + operation, + operations_v1, + path_template, +) +from google.api_core import exceptions as core_exceptions +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.cloud.location import locations_pb2 +from google.longrunning import operations_pb2 # type: ignore +from google.oauth2 import service_account + +from google.cloud.iam_v3beta.services.access_policies import ( + AccessPoliciesAsyncClient, + AccessPoliciesClient, + pagers, + transports, +) +from google.cloud.iam_v3beta.types import ( + access_policies_service, + access_policy_resources, + operation_metadata, + policy_binding_resources, +) + +CRED_INFO_JSON = { + "credential_source": "/path/to/file", + "credential_type": "service account credentials", + "principal": "service-account@example.com", +} +CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) + + +async def mock_async_gen(data, chunk_size=1): + for i in range(0, len(data)): # pragma: NO COVER + chunk = data[i : i + chunk_size] + yield chunk.encode("utf-8") + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# TODO: use async auth anon credentials by default once the minimum version of google-auth is upgraded. +# See related issue: https://github.com/googleapis/gapic-generator-python/issues/2107. +def async_anonymous_credentials(): + if HAS_GOOGLE_AUTH_AIO: + return ga_credentials_async.AnonymousCredentials() + return ga_credentials.AnonymousCredentials() + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return ( + "foo.googleapis.com" + if ("localhost" in client.DEFAULT_ENDPOINT) + else client.DEFAULT_ENDPOINT + ) + + +# If default endpoint template is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint template so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint_template(client): + return ( + "test.{UNIVERSE_DOMAIN}" + if ("localhost" in client._DEFAULT_ENDPOINT_TEMPLATE) + else client._DEFAULT_ENDPOINT_TEMPLATE + ) + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + custom_endpoint = ".custom" + + assert AccessPoliciesClient._get_default_mtls_endpoint(None) is None + assert ( + AccessPoliciesClient._get_default_mtls_endpoint(api_endpoint) + == api_mtls_endpoint + ) + assert ( + AccessPoliciesClient._get_default_mtls_endpoint(api_mtls_endpoint) + == api_mtls_endpoint + ) + assert ( + AccessPoliciesClient._get_default_mtls_endpoint(sandbox_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + AccessPoliciesClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) + == sandbox_mtls_endpoint + ) + assert ( + AccessPoliciesClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi + ) + assert ( + AccessPoliciesClient._get_default_mtls_endpoint(custom_endpoint) + == custom_endpoint + ) + + +def test__read_environment_variables(): + assert AccessPoliciesClient._read_environment_variables() == (False, "auto", None) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + assert AccessPoliciesClient._read_environment_variables() == ( + True, + "auto", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + assert AccessPoliciesClient._read_environment_variables() == ( + False, + "auto", + None, + ) + + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + if not hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with pytest.raises(ValueError) as excinfo: + AccessPoliciesClient._read_environment_variables() + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + else: + assert AccessPoliciesClient._read_environment_variables() == ( + False, + "auto", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + assert AccessPoliciesClient._read_environment_variables() == ( + False, + "never", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + assert AccessPoliciesClient._read_environment_variables() == ( + False, + "always", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}): + assert AccessPoliciesClient._read_environment_variables() == ( + False, + "auto", + None, + ) + + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + AccessPoliciesClient._read_environment_variables() + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + with mock.patch.dict(os.environ, {"GOOGLE_CLOUD_UNIVERSE_DOMAIN": "foo.com"}): + assert AccessPoliciesClient._read_environment_variables() == ( + False, + "auto", + "foo.com", + ) + + +def test_use_client_cert_effective(): + # Test case 1: Test when `should_use_client_cert` returns True. + # We mock the `should_use_client_cert` function to simulate a scenario where + # the google-auth library supports automatic mTLS and determines that a + # client certificate should be used. + if hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch( + "google.auth.transport.mtls.should_use_client_cert", return_value=True + ): + assert AccessPoliciesClient._use_client_cert_effective() is True + + # Test case 2: Test when `should_use_client_cert` returns False. + # We mock the `should_use_client_cert` function to simulate a scenario where + # the google-auth library supports automatic mTLS and determines that a + # client certificate should NOT be used. + if hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch( + "google.auth.transport.mtls.should_use_client_cert", return_value=False + ): + assert AccessPoliciesClient._use_client_cert_effective() is False + + # Test case 3: Test when `should_use_client_cert` is unavailable and the + # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is set to "true". + if not hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + assert AccessPoliciesClient._use_client_cert_effective() is True + + # Test case 4: Test when `should_use_client_cert` is unavailable and the + # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is set to "false". + if not hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"} + ): + assert AccessPoliciesClient._use_client_cert_effective() is False + + # Test case 5: Test when `should_use_client_cert` is unavailable and the + # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is set to "True". + if not hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "True"}): + assert AccessPoliciesClient._use_client_cert_effective() is True + + # Test case 6: Test when `should_use_client_cert` is unavailable and the + # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is set to "False". + if not hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "False"} + ): + assert AccessPoliciesClient._use_client_cert_effective() is False + + # Test case 7: Test when `should_use_client_cert` is unavailable and the + # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is set to "TRUE". + if not hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "TRUE"}): + assert AccessPoliciesClient._use_client_cert_effective() is True + + # Test case 8: Test when `should_use_client_cert` is unavailable and the + # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is set to "FALSE". + if not hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "FALSE"} + ): + assert AccessPoliciesClient._use_client_cert_effective() is False + + # Test case 9: Test when `should_use_client_cert` is unavailable and the + # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not set. + # In this case, the method should return False, which is the default value. + if not hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch.dict(os.environ, clear=True): + assert AccessPoliciesClient._use_client_cert_effective() is False + + # Test case 10: Test when `should_use_client_cert` is unavailable and the + # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is set to an invalid value. + # The method should raise a ValueError as the environment variable must be either + # "true" or "false". + if not hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "unsupported"} + ): + with pytest.raises(ValueError): + AccessPoliciesClient._use_client_cert_effective() + + # Test case 11: Test when `should_use_client_cert` is available and the + # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is set to an invalid value. + # The method should return False as the environment variable is set to an invalid value. + if hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "unsupported"} + ): + assert AccessPoliciesClient._use_client_cert_effective() is False + + # Test case 12: Test when `should_use_client_cert` is available and the + # `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is unset. Also, + # the GOOGLE_API_CONFIG environment variable is unset. + if hasattr(google.auth.transport.mtls, "should_use_client_cert"): + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": ""}): + with mock.patch.dict(os.environ, {"GOOGLE_API_CERTIFICATE_CONFIG": ""}): + assert AccessPoliciesClient._use_client_cert_effective() is False + + +def test__get_client_cert_source(): + mock_provided_cert_source = mock.Mock() + mock_default_cert_source = mock.Mock() + + assert AccessPoliciesClient._get_client_cert_source(None, False) is None + assert ( + AccessPoliciesClient._get_client_cert_source(mock_provided_cert_source, False) + is None + ) + assert ( + AccessPoliciesClient._get_client_cert_source(mock_provided_cert_source, True) + == mock_provided_cert_source + ) + + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", return_value=True + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_default_cert_source, + ): + assert ( + AccessPoliciesClient._get_client_cert_source(None, True) + is mock_default_cert_source + ) + assert ( + AccessPoliciesClient._get_client_cert_source( + mock_provided_cert_source, "true" + ) + is mock_provided_cert_source + ) + + +@mock.patch.object( + AccessPoliciesClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(AccessPoliciesClient), +) +@mock.patch.object( + AccessPoliciesAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(AccessPoliciesAsyncClient), +) +def test__get_api_endpoint(): + api_override = "foo.com" + mock_client_cert_source = mock.Mock() + default_universe = AccessPoliciesClient._DEFAULT_UNIVERSE + default_endpoint = AccessPoliciesClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=default_universe + ) + mock_universe = "bar.com" + mock_endpoint = AccessPoliciesClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=mock_universe + ) + + assert ( + AccessPoliciesClient._get_api_endpoint( + api_override, mock_client_cert_source, default_universe, "always" + ) + == api_override + ) + assert ( + AccessPoliciesClient._get_api_endpoint( + None, mock_client_cert_source, default_universe, "auto" + ) + == AccessPoliciesClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + AccessPoliciesClient._get_api_endpoint(None, None, default_universe, "auto") + == default_endpoint + ) + assert ( + AccessPoliciesClient._get_api_endpoint(None, None, default_universe, "always") + == AccessPoliciesClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + AccessPoliciesClient._get_api_endpoint( + None, mock_client_cert_source, default_universe, "always" + ) + == AccessPoliciesClient.DEFAULT_MTLS_ENDPOINT + ) + assert ( + AccessPoliciesClient._get_api_endpoint(None, None, mock_universe, "never") + == mock_endpoint + ) + assert ( + AccessPoliciesClient._get_api_endpoint(None, None, default_universe, "never") + == default_endpoint + ) + + with pytest.raises(MutualTLSChannelError) as excinfo: + AccessPoliciesClient._get_api_endpoint( + None, mock_client_cert_source, mock_universe, "auto" + ) + assert ( + str(excinfo.value) + == "mTLS is not supported in any universe other than googleapis.com." + ) + + +def test__get_universe_domain(): + client_universe_domain = "foo.com" + universe_domain_env = "bar.com" + + assert ( + AccessPoliciesClient._get_universe_domain( + client_universe_domain, universe_domain_env + ) + == client_universe_domain + ) + assert ( + AccessPoliciesClient._get_universe_domain(None, universe_domain_env) + == universe_domain_env + ) + assert ( + AccessPoliciesClient._get_universe_domain(None, None) + == AccessPoliciesClient._DEFAULT_UNIVERSE + ) + + with pytest.raises(ValueError) as excinfo: + AccessPoliciesClient._get_universe_domain("", None) + assert str(excinfo.value) == "Universe Domain cannot be an empty string." + + +@pytest.mark.parametrize( + "error_code,cred_info_json,show_cred_info", + [ + (401, CRED_INFO_JSON, True), + (403, CRED_INFO_JSON, True), + (404, CRED_INFO_JSON, True), + (500, CRED_INFO_JSON, False), + (401, None, False), + (403, None, False), + (404, None, False), + (500, None, False), + ], +) +def test__add_cred_info_for_auth_errors(error_code, cred_info_json, show_cred_info): + cred = mock.Mock(["get_cred_info"]) + cred.get_cred_info = mock.Mock(return_value=cred_info_json) + client = AccessPoliciesClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=["foo"]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + if show_cred_info: + assert error.details == ["foo", CRED_INFO_STRING] + else: + assert error.details == ["foo"] + + +@pytest.mark.parametrize("error_code", [401, 403, 404, 500]) +def test__add_cred_info_for_auth_errors_no_get_cred_info(error_code): + cred = mock.Mock([]) + assert not hasattr(cred, "get_cred_info") + client = AccessPoliciesClient(credentials=cred) + client._transport._credentials = cred + + error = core_exceptions.GoogleAPICallError("message", details=[]) + error.code = error_code + + client._add_cred_info_for_auth_errors(error) + assert error.details == [] + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (AccessPoliciesClient, "grpc"), + (AccessPoliciesAsyncClient, "grpc_asyncio"), + (AccessPoliciesClient, "rest"), + ], +) +def test_access_policies_client_from_service_account_info(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_info" + ) as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "iam.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://iam.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_class,transport_name", + [ + (transports.AccessPoliciesGrpcTransport, "grpc"), + (transports.AccessPoliciesGrpcAsyncIOTransport, "grpc_asyncio"), + (transports.AccessPoliciesRestTransport, "rest"), + ], +) +def test_access_policies_client_service_account_always_use_jwt( + transport_class, transport_name +): + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object( + service_account.Credentials, "with_always_use_jwt_access", create=True + ) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize( + "client_class,transport_name", + [ + (AccessPoliciesClient, "grpc"), + (AccessPoliciesAsyncClient, "grpc_asyncio"), + (AccessPoliciesClient, "rest"), + ], +) +def test_access_policies_client_from_service_account_file(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object( + service_account.Credentials, "from_service_account_file" + ) as factory: + factory.return_value = creds + client = client_class.from_service_account_file( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json( + "dummy/file/path.json", transport=transport_name + ) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + "iam.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://iam.googleapis.com" + ) + + +def test_access_policies_client_get_transport_class(): + transport = AccessPoliciesClient.get_transport_class() + available_transports = [ + transports.AccessPoliciesGrpcTransport, + transports.AccessPoliciesRestTransport, + ] + assert transport in available_transports + + transport = AccessPoliciesClient.get_transport_class("grpc") + assert transport == transports.AccessPoliciesGrpcTransport + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (AccessPoliciesClient, transports.AccessPoliciesGrpcTransport, "grpc"), + ( + AccessPoliciesAsyncClient, + transports.AccessPoliciesGrpcAsyncIOTransport, + "grpc_asyncio", + ), + (AccessPoliciesClient, transports.AccessPoliciesRestTransport, "rest"), + ], +) +@mock.patch.object( + AccessPoliciesClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(AccessPoliciesClient), +) +@mock.patch.object( + AccessPoliciesAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(AccessPoliciesAsyncClient), +) +def test_access_policies_client_client_options( + client_class, transport_class, transport_name +): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(AccessPoliciesClient, "get_transport_class") as gtc: + transport = transport_class(credentials=ga_credentials.AnonymousCredentials()) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(AccessPoliciesClient, "get_transport_class") as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + client = client_class(transport=transport_name) + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions( + api_audience="https://language.googleapis.com" + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com", + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,use_client_cert_env", + [ + (AccessPoliciesClient, transports.AccessPoliciesGrpcTransport, "grpc", "true"), + ( + AccessPoliciesAsyncClient, + transports.AccessPoliciesGrpcAsyncIOTransport, + "grpc_asyncio", + "true", + ), + (AccessPoliciesClient, transports.AccessPoliciesGrpcTransport, "grpc", "false"), + ( + AccessPoliciesAsyncClient, + transports.AccessPoliciesGrpcAsyncIOTransport, + "grpc_asyncio", + "false", + ), + (AccessPoliciesClient, transports.AccessPoliciesRestTransport, "rest", "true"), + (AccessPoliciesClient, transports.AccessPoliciesRestTransport, "rest", "false"), + ], +) +@mock.patch.object( + AccessPoliciesClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(AccessPoliciesClient), +) +@mock.patch.object( + AccessPoliciesAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(AccessPoliciesAsyncClient), +) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_access_policies_client_mtls_env_auto( + client_class, transport_class, transport_name, use_client_cert_env +): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + options = client_options.ClientOptions( + client_cert_source=client_cert_source_callback + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ) + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=client_cert_source_callback, + ): + if use_client_cert_env == "false": + expected_host = client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ) + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} + ): + with mock.patch.object(transport_class, "__init__") as patched: + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class", [AccessPoliciesClient, AccessPoliciesAsyncClient] +) +@mock.patch.object( + AccessPoliciesClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(AccessPoliciesClient), +) +@mock.patch.object( + AccessPoliciesAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(AccessPoliciesAsyncClient), +) +def test_access_policies_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "Unsupported". + with mock.patch.dict( + os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} + ): + if hasattr(google.auth.transport.mtls, "should_use_client_cert"): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, + api_endpoint=mock_api_endpoint, + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test cases for mTLS enablement when GOOGLE_API_USE_CLIENT_CERTIFICATE is unset. + test_cases = [ + ( + # With workloads present in config, mTLS is enabled. + { + "version": 1, + "cert_configs": { + "workload": { + "cert_path": "path/to/cert/file", + "key_path": "path/to/key/file", + } + }, + }, + mock_client_cert_source, + ), + ( + # With workloads not present in config, mTLS is disabled. + { + "version": 1, + "cert_configs": {}, + }, + None, + ), + ] + if hasattr(google.auth.transport.mtls, "should_use_client_cert"): + for config_data, expected_cert_source in test_cases: + env = os.environ.copy() + env.pop("GOOGLE_API_USE_CLIENT_CERTIFICATE", None) + with mock.patch.dict(os.environ, env, clear=True): + config_filename = "mock_certificate_config.json" + config_file_content = json.dumps(config_data) + m = mock.mock_open(read_data=config_file_content) + with mock.patch("builtins.open", m): + with mock.patch.dict( + os.environ, {"GOOGLE_API_CERTIFICATE_CONFIG": config_filename} + ): + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, + api_endpoint=mock_api_endpoint, + ) + api_endpoint, cert_source = ( + client_class.get_mtls_endpoint_and_cert_source(options) + ) + assert api_endpoint == mock_api_endpoint + assert cert_source is expected_cert_source + + # Test cases for mTLS enablement when GOOGLE_API_USE_CLIENT_CERTIFICATE is unset(empty). + test_cases = [ + ( + # With workloads present in config, mTLS is enabled. + { + "version": 1, + "cert_configs": { + "workload": { + "cert_path": "path/to/cert/file", + "key_path": "path/to/key/file", + } + }, + }, + mock_client_cert_source, + ), + ( + # With workloads not present in config, mTLS is disabled. + { + "version": 1, + "cert_configs": {}, + }, + None, + ), + ] + if hasattr(google.auth.transport.mtls, "should_use_client_cert"): + for config_data, expected_cert_source in test_cases: + env = os.environ.copy() + env.pop("GOOGLE_API_USE_CLIENT_CERTIFICATE", "") + with mock.patch.dict(os.environ, env, clear=True): + config_filename = "mock_certificate_config.json" + config_file_content = json.dumps(config_data) + m = mock.mock_open(read_data=config_file_content) + with mock.patch("builtins.open", m): + with mock.patch.dict( + os.environ, {"GOOGLE_API_CERTIFICATE_CONFIG": config_filename} + ): + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, + api_endpoint=mock_api_endpoint, + ) + api_endpoint, cert_source = ( + client_class.get_mtls_endpoint_and_cert_source(options) + ) + assert api_endpoint == mock_api_endpoint + assert cert_source is expected_cert_source + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_client_cert_source, + ): + api_endpoint, cert_source = ( + client_class.get_mtls_endpoint_and_cert_source() + ) + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError) as excinfo: + client_class.get_mtls_endpoint_and_cert_source() + + assert ( + str(excinfo.value) + == "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + +@pytest.mark.parametrize( + "client_class", [AccessPoliciesClient, AccessPoliciesAsyncClient] +) +@mock.patch.object( + AccessPoliciesClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(AccessPoliciesClient), +) +@mock.patch.object( + AccessPoliciesAsyncClient, + "_DEFAULT_ENDPOINT_TEMPLATE", + modify_default_endpoint_template(AccessPoliciesAsyncClient), +) +def test_access_policies_client_client_api_endpoint(client_class): + mock_client_cert_source = client_cert_source_callback + api_override = "foo.com" + default_universe = AccessPoliciesClient._DEFAULT_UNIVERSE + default_endpoint = AccessPoliciesClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=default_universe + ) + mock_universe = "bar.com" + mock_endpoint = AccessPoliciesClient._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=mock_universe + ) + + # If ClientOptions.api_endpoint is set and GOOGLE_API_USE_CLIENT_CERTIFICATE="true", + # use ClientOptions.api_endpoint as the api endpoint regardless. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ): + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=api_override + ) + client = client_class( + client_options=options, + credentials=ga_credentials.AnonymousCredentials(), + ) + assert client.api_endpoint == api_override + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == default_endpoint + + # If ClientOptions.api_endpoint is not set and GOOGLE_API_USE_MTLS_ENDPOINT="always", + # use the DEFAULT_MTLS_ENDPOINT as the api endpoint. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + client = client_class(credentials=ga_credentials.AnonymousCredentials()) + assert client.api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + + # If ClientOptions.api_endpoint is not set, GOOGLE_API_USE_MTLS_ENDPOINT="auto" (default), + # GOOGLE_API_USE_CLIENT_CERTIFICATE="false" (default), default cert source doesn't exist, + # and ClientOptions.universe_domain="bar.com", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with universe domain as the api endpoint. + options = client_options.ClientOptions() + universe_exists = hasattr(options, "universe_domain") + if universe_exists: + options = client_options.ClientOptions(universe_domain=mock_universe) + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + else: + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + assert client.api_endpoint == ( + mock_endpoint if universe_exists else default_endpoint + ) + assert client.universe_domain == ( + mock_universe if universe_exists else default_universe + ) + + # If ClientOptions does not have a universe domain attribute and GOOGLE_API_USE_MTLS_ENDPOINT="never", + # use the _DEFAULT_ENDPOINT_TEMPLATE populated with GDU as the api endpoint. + options = client_options.ClientOptions() + if hasattr(options, "universe_domain"): + delattr(options, "universe_domain") + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + client = client_class( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + assert client.api_endpoint == default_endpoint + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name", + [ + (AccessPoliciesClient, transports.AccessPoliciesGrpcTransport, "grpc"), + ( + AccessPoliciesAsyncClient, + transports.AccessPoliciesGrpcAsyncIOTransport, + "grpc_asyncio", + ), + (AccessPoliciesClient, transports.AccessPoliciesRestTransport, "rest"), + ], +) +def test_access_policies_client_client_options_scopes( + client_class, transport_class, transport_name +): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + AccessPoliciesClient, + transports.AccessPoliciesGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + AccessPoliciesAsyncClient, + transports.AccessPoliciesGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + (AccessPoliciesClient, transports.AccessPoliciesRestTransport, "rest", None), + ], +) +def test_access_policies_client_client_options_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +def test_access_policies_client_client_options_from_dict(): + with mock.patch( + "google.cloud.iam_v3beta.services.access_policies.transports.AccessPoliciesGrpcTransport.__init__" + ) as grpc_transport: + grpc_transport.return_value = None + client = AccessPoliciesClient( + client_options={"api_endpoint": "squid.clam.whelk"} + ) + grpc_transport.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + AccessPoliciesClient, + transports.AccessPoliciesGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + AccessPoliciesAsyncClient, + transports.AccessPoliciesGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + ], +) +def test_access_policies_client_create_channel_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # test that the credentials from file are saved and used as the credentials. + with ( + mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, + mock.patch.object(google.auth, "default", autospec=True) as adc, + mock.patch.object(grpc_helpers, "create_channel") as create_channel, + ): + creds = ga_credentials.AnonymousCredentials() + file_creds = ga_credentials.AnonymousCredentials() + load_creds.return_value = (file_creds, None) + adc.return_value = (creds, None) + client = client_class(client_options=options, transport=transport_name) + create_channel.assert_called_with( + "iam.googleapis.com:443", + credentials=file_creds, + credentials_file=None, + quota_project_id=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + scopes=None, + default_host="iam.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "request_type", + [ + access_policies_service.CreateAccessPolicyRequest, + dict, + ], +) +def test_create_access_policy(request_type, transport: str = "grpc"): + client = AccessPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_access_policy), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.create_access_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = access_policies_service.CreateAccessPolicyRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_create_access_policy_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = AccessPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = access_policies_service.CreateAccessPolicyRequest( + parent="parent_value", + access_policy_id="access_policy_id_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_access_policy), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.create_access_policy(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == access_policies_service.CreateAccessPolicyRequest( + parent="parent_value", + access_policy_id="access_policy_id_value", + ) + + +def test_create_access_policy_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = AccessPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.create_access_policy in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.create_access_policy] = ( + mock_rpc + ) + request = {} + client.create_access_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_access_policy(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_create_access_policy_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = AccessPoliciesAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.create_access_policy + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.create_access_policy + ] = mock_rpc + + request = {} + await client.create_access_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.create_access_policy(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_create_access_policy_async( + transport: str = "grpc_asyncio", + request_type=access_policies_service.CreateAccessPolicyRequest, +): + client = AccessPoliciesAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_access_policy), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.create_access_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = access_policies_service.CreateAccessPolicyRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_create_access_policy_async_from_dict(): + await test_create_access_policy_async(request_type=dict) + + +def test_create_access_policy_field_headers(): + client = AccessPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = access_policies_service.CreateAccessPolicyRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_access_policy), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.create_access_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_create_access_policy_field_headers_async(): + client = AccessPoliciesAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = access_policies_service.CreateAccessPolicyRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_access_policy), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.create_access_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_create_access_policy_flattened(): + client = AccessPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_access_policy), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_access_policy( + parent="parent_value", + access_policy=access_policy_resources.AccessPolicy(name="name_value"), + access_policy_id="access_policy_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].access_policy + mock_val = access_policy_resources.AccessPolicy(name="name_value") + assert arg == mock_val + arg = args[0].access_policy_id + mock_val = "access_policy_id_value" + assert arg == mock_val + + +def test_create_access_policy_flattened_error(): + client = AccessPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_access_policy( + access_policies_service.CreateAccessPolicyRequest(), + parent="parent_value", + access_policy=access_policy_resources.AccessPolicy(name="name_value"), + access_policy_id="access_policy_id_value", + ) + + +@pytest.mark.asyncio +async def test_create_access_policy_flattened_async(): + client = AccessPoliciesAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_access_policy), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_access_policy( + parent="parent_value", + access_policy=access_policy_resources.AccessPolicy(name="name_value"), + access_policy_id="access_policy_id_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].access_policy + mock_val = access_policy_resources.AccessPolicy(name="name_value") + assert arg == mock_val + arg = args[0].access_policy_id + mock_val = "access_policy_id_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_create_access_policy_flattened_error_async(): + client = AccessPoliciesAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_access_policy( + access_policies_service.CreateAccessPolicyRequest(), + parent="parent_value", + access_policy=access_policy_resources.AccessPolicy(name="name_value"), + access_policy_id="access_policy_id_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + access_policies_service.GetAccessPolicyRequest, + dict, + ], +) +def test_get_access_policy(request_type, transport: str = "grpc"): + client = AccessPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_access_policy), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = access_policy_resources.AccessPolicy( + name="name_value", + uid="uid_value", + etag="etag_value", + display_name="display_name_value", + ) + response = client.get_access_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = access_policies_service.GetAccessPolicyRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, access_policy_resources.AccessPolicy) + assert response.name == "name_value" + assert response.uid == "uid_value" + assert response.etag == "etag_value" + assert response.display_name == "display_name_value" + + +def test_get_access_policy_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = AccessPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = access_policies_service.GetAccessPolicyRequest( + name="name_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_access_policy), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.get_access_policy(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == access_policies_service.GetAccessPolicyRequest( + name="name_value", + ) + + +def test_get_access_policy_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = AccessPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_access_policy in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get_access_policy] = ( + mock_rpc + ) + request = {} + client.get_access_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_access_policy(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_get_access_policy_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = AccessPoliciesAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.get_access_policy + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.get_access_policy + ] = mock_rpc + + request = {} + await client.get_access_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.get_access_policy(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_get_access_policy_async( + transport: str = "grpc_asyncio", + request_type=access_policies_service.GetAccessPolicyRequest, +): + client = AccessPoliciesAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_access_policy), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + access_policy_resources.AccessPolicy( + name="name_value", + uid="uid_value", + etag="etag_value", + display_name="display_name_value", + ) + ) + response = await client.get_access_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = access_policies_service.GetAccessPolicyRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, access_policy_resources.AccessPolicy) + assert response.name == "name_value" + assert response.uid == "uid_value" + assert response.etag == "etag_value" + assert response.display_name == "display_name_value" + + +@pytest.mark.asyncio +async def test_get_access_policy_async_from_dict(): + await test_get_access_policy_async(request_type=dict) + + +def test_get_access_policy_field_headers(): + client = AccessPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = access_policies_service.GetAccessPolicyRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_access_policy), "__call__" + ) as call: + call.return_value = access_policy_resources.AccessPolicy() + client.get_access_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_access_policy_field_headers_async(): + client = AccessPoliciesAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = access_policies_service.GetAccessPolicyRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_access_policy), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + access_policy_resources.AccessPolicy() + ) + await client.get_access_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_get_access_policy_flattened(): + client = AccessPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_access_policy), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = access_policy_resources.AccessPolicy() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_access_policy( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_get_access_policy_flattened_error(): + client = AccessPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_access_policy( + access_policies_service.GetAccessPolicyRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_get_access_policy_flattened_async(): + client = AccessPoliciesAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_access_policy), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = access_policy_resources.AccessPolicy() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + access_policy_resources.AccessPolicy() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_access_policy( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_get_access_policy_flattened_error_async(): + client = AccessPoliciesAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_access_policy( + access_policies_service.GetAccessPolicyRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + access_policies_service.UpdateAccessPolicyRequest, + dict, + ], +) +def test_update_access_policy(request_type, transport: str = "grpc"): + client = AccessPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_access_policy), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.update_access_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = access_policies_service.UpdateAccessPolicyRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_update_access_policy_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = AccessPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = access_policies_service.UpdateAccessPolicyRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_access_policy), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.update_access_policy(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == access_policies_service.UpdateAccessPolicyRequest() + + +def test_update_access_policy_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = AccessPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.update_access_policy in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.update_access_policy] = ( + mock_rpc + ) + request = {} + client.update_access_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.update_access_policy(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_update_access_policy_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = AccessPoliciesAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.update_access_policy + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.update_access_policy + ] = mock_rpc + + request = {} + await client.update_access_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.update_access_policy(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_update_access_policy_async( + transport: str = "grpc_asyncio", + request_type=access_policies_service.UpdateAccessPolicyRequest, +): + client = AccessPoliciesAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_access_policy), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.update_access_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = access_policies_service.UpdateAccessPolicyRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_update_access_policy_async_from_dict(): + await test_update_access_policy_async(request_type=dict) + + +def test_update_access_policy_field_headers(): + client = AccessPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = access_policies_service.UpdateAccessPolicyRequest() + + request.access_policy.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_access_policy), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.update_access_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "access_policy.name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_update_access_policy_field_headers_async(): + client = AccessPoliciesAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = access_policies_service.UpdateAccessPolicyRequest() + + request.access_policy.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.update_access_policy), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.update_access_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "access_policy.name=name_value", + ) in kw["metadata"] + + +@pytest.mark.parametrize( + "request_type", + [ + access_policies_service.DeleteAccessPolicyRequest, + dict, + ], +) +def test_delete_access_policy(request_type, transport: str = "grpc"): + client = AccessPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_access_policy), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/spam") + response = client.delete_access_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = access_policies_service.DeleteAccessPolicyRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +def test_delete_access_policy_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = AccessPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = access_policies_service.DeleteAccessPolicyRequest( + name="name_value", + etag="etag_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_access_policy), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.delete_access_policy(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == access_policies_service.DeleteAccessPolicyRequest( + name="name_value", + etag="etag_value", + ) + + +def test_delete_access_policy_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = AccessPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.delete_access_policy in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.delete_access_policy] = ( + mock_rpc + ) + request = {} + client.delete_access_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_access_policy(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_delete_access_policy_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = AccessPoliciesAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.delete_access_policy + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.delete_access_policy + ] = mock_rpc + + request = {} + await client.delete_access_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods call wrapper_fn to build a cached + # client._transport.operations_client instance on first rpc call. + # Subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + await client.delete_access_policy(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_delete_access_policy_async( + transport: str = "grpc_asyncio", + request_type=access_policies_service.DeleteAccessPolicyRequest, +): + client = AccessPoliciesAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_access_policy), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + response = await client.delete_access_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = access_policies_service.DeleteAccessPolicyRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, future.Future) + + +@pytest.mark.asyncio +async def test_delete_access_policy_async_from_dict(): + await test_delete_access_policy_async(request_type=dict) + + +def test_delete_access_policy_field_headers(): + client = AccessPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = access_policies_service.DeleteAccessPolicyRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_access_policy), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.delete_access_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_delete_access_policy_field_headers_async(): + client = AccessPoliciesAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = access_policies_service.DeleteAccessPolicyRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_access_policy), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/op") + ) + await client.delete_access_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_delete_access_policy_flattened(): + client = AccessPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_access_policy), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_access_policy( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_delete_access_policy_flattened_error(): + client = AccessPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_access_policy( + access_policies_service.DeleteAccessPolicyRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_delete_access_policy_flattened_async(): + client = AccessPoliciesAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_access_policy), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation(name="operations/op") + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_access_policy( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_delete_access_policy_flattened_error_async(): + client = AccessPoliciesAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_access_policy( + access_policies_service.DeleteAccessPolicyRequest(), + name="name_value", + ) + + +@pytest.mark.parametrize( + "request_type", + [ + access_policies_service.ListAccessPoliciesRequest, + dict, + ], +) +def test_list_access_policies(request_type, transport: str = "grpc"): + client = AccessPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_access_policies), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = access_policies_service.ListAccessPoliciesResponse( + next_page_token="next_page_token_value", + ) + response = client.list_access_policies(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = access_policies_service.ListAccessPoliciesRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListAccessPoliciesPager) + assert response.next_page_token == "next_page_token_value" + + +def test_list_access_policies_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = AccessPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = access_policies_service.ListAccessPoliciesRequest( + parent="parent_value", + page_token="page_token_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_access_policies), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.list_access_policies(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == access_policies_service.ListAccessPoliciesRequest( + parent="parent_value", + page_token="page_token_value", + ) + + +def test_list_access_policies_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = AccessPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.list_access_policies in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.list_access_policies] = ( + mock_rpc + ) + request = {} + client.list_access_policies(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_access_policies(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_list_access_policies_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = AccessPoliciesAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.list_access_policies + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.list_access_policies + ] = mock_rpc + + request = {} + await client.list_access_policies(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.list_access_policies(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_list_access_policies_async( + transport: str = "grpc_asyncio", + request_type=access_policies_service.ListAccessPoliciesRequest, +): + client = AccessPoliciesAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_access_policies), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + access_policies_service.ListAccessPoliciesResponse( + next_page_token="next_page_token_value", + ) + ) + response = await client.list_access_policies(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = access_policies_service.ListAccessPoliciesRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListAccessPoliciesAsyncPager) + assert response.next_page_token == "next_page_token_value" + + +@pytest.mark.asyncio +async def test_list_access_policies_async_from_dict(): + await test_list_access_policies_async(request_type=dict) + + +def test_list_access_policies_field_headers(): + client = AccessPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = access_policies_service.ListAccessPoliciesRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_access_policies), "__call__" + ) as call: + call.return_value = access_policies_service.ListAccessPoliciesResponse() + client.list_access_policies(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_list_access_policies_field_headers_async(): + client = AccessPoliciesAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = access_policies_service.ListAccessPoliciesRequest() + + request.parent = "parent_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_access_policies), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + access_policies_service.ListAccessPoliciesResponse() + ) + await client.list_access_policies(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] + + +def test_list_access_policies_flattened(): + client = AccessPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_access_policies), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = access_policies_service.ListAccessPoliciesResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_access_policies( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +def test_list_access_policies_flattened_error(): + client = AccessPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_access_policies( + access_policies_service.ListAccessPoliciesRequest(), + parent="parent_value", + ) + + +@pytest.mark.asyncio +async def test_list_access_policies_flattened_async(): + client = AccessPoliciesAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_access_policies), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = access_policies_service.ListAccessPoliciesResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + access_policies_service.ListAccessPoliciesResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_access_policies( + parent="parent_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_list_access_policies_flattened_error_async(): + client = AccessPoliciesAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_access_policies( + access_policies_service.ListAccessPoliciesRequest(), + parent="parent_value", + ) + + +def test_list_access_policies_pager(transport_name: str = "grpc"): + client = AccessPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_access_policies), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + access_policies_service.ListAccessPoliciesResponse( + access_policies=[ + access_policy_resources.AccessPolicy(), + access_policy_resources.AccessPolicy(), + access_policy_resources.AccessPolicy(), + ], + next_page_token="abc", + ), + access_policies_service.ListAccessPoliciesResponse( + access_policies=[], + next_page_token="def", + ), + access_policies_service.ListAccessPoliciesResponse( + access_policies=[ + access_policy_resources.AccessPolicy(), + ], + next_page_token="ghi", + ), + access_policies_service.ListAccessPoliciesResponse( + access_policies=[ + access_policy_resources.AccessPolicy(), + access_policy_resources.AccessPolicy(), + ], + ), + RuntimeError, + ) + + expected_metadata = () + retry = retries.Retry() + timeout = 5 + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), + ) + pager = client.list_access_policies(request={}, retry=retry, timeout=timeout) + + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, access_policy_resources.AccessPolicy) for i in results) + + +def test_list_access_policies_pages(transport_name: str = "grpc"): + client = AccessPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_access_policies), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + access_policies_service.ListAccessPoliciesResponse( + access_policies=[ + access_policy_resources.AccessPolicy(), + access_policy_resources.AccessPolicy(), + access_policy_resources.AccessPolicy(), + ], + next_page_token="abc", + ), + access_policies_service.ListAccessPoliciesResponse( + access_policies=[], + next_page_token="def", + ), + access_policies_service.ListAccessPoliciesResponse( + access_policies=[ + access_policy_resources.AccessPolicy(), + ], + next_page_token="ghi", + ), + access_policies_service.ListAccessPoliciesResponse( + access_policies=[ + access_policy_resources.AccessPolicy(), + access_policy_resources.AccessPolicy(), + ], + ), + RuntimeError, + ) + pages = list(client.list_access_policies(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_list_access_policies_async_pager(): + client = AccessPoliciesAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_access_policies), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + access_policies_service.ListAccessPoliciesResponse( + access_policies=[ + access_policy_resources.AccessPolicy(), + access_policy_resources.AccessPolicy(), + access_policy_resources.AccessPolicy(), + ], + next_page_token="abc", + ), + access_policies_service.ListAccessPoliciesResponse( + access_policies=[], + next_page_token="def", + ), + access_policies_service.ListAccessPoliciesResponse( + access_policies=[ + access_policy_resources.AccessPolicy(), + ], + next_page_token="ghi", + ), + access_policies_service.ListAccessPoliciesResponse( + access_policies=[ + access_policy_resources.AccessPolicy(), + access_policy_resources.AccessPolicy(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_access_policies( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all( + isinstance(i, access_policy_resources.AccessPolicy) for i in responses + ) + + +@pytest.mark.asyncio +async def test_list_access_policies_async_pages(): + client = AccessPoliciesAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_access_policies), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + access_policies_service.ListAccessPoliciesResponse( + access_policies=[ + access_policy_resources.AccessPolicy(), + access_policy_resources.AccessPolicy(), + access_policy_resources.AccessPolicy(), + ], + next_page_token="abc", + ), + access_policies_service.ListAccessPoliciesResponse( + access_policies=[], + next_page_token="def", + ), + access_policies_service.ListAccessPoliciesResponse( + access_policies=[ + access_policy_resources.AccessPolicy(), + ], + next_page_token="ghi", + ), + access_policies_service.ListAccessPoliciesResponse( + access_policies=[ + access_policy_resources.AccessPolicy(), + access_policy_resources.AccessPolicy(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.list_access_policies(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.parametrize( + "request_type", + [ + access_policies_service.SearchAccessPolicyBindingsRequest, + dict, + ], +) +def test_search_access_policy_bindings(request_type, transport: str = "grpc"): + client = AccessPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.search_access_policy_bindings), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = access_policies_service.SearchAccessPolicyBindingsResponse( + next_page_token="next_page_token_value", + ) + response = client.search_access_policy_bindings(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + request = access_policies_service.SearchAccessPolicyBindingsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.SearchAccessPolicyBindingsPager) + assert response.next_page_token == "next_page_token_value" + + +def test_search_access_policy_bindings_non_empty_request_with_auto_populated_field(): + # This test is a coverage failsafe to make sure that UUID4 fields are + # automatically populated, according to AIP-4235, with non-empty requests. + client = AccessPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Populate all string fields in the request which are not UUID4 + # since we want to check that UUID4 are populated automatically + # if they meet the requirements of AIP 4235. + request = access_policies_service.SearchAccessPolicyBindingsRequest( + name="name_value", + page_token="page_token_value", + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.search_access_policy_bindings), "__call__" + ) as call: + call.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client.search_access_policy_bindings(request=request) + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == access_policies_service.SearchAccessPolicyBindingsRequest( + name="name_value", + page_token="page_token_value", + ) + + +def test_search_access_policy_bindings_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = AccessPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.search_access_policy_bindings + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.search_access_policy_bindings + ] = mock_rpc + request = {} + client.search_access_policy_bindings(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.search_access_policy_bindings(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_search_access_policy_bindings_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = AccessPoliciesAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.search_access_policy_bindings + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.search_access_policy_bindings + ] = mock_rpc + + request = {} + await client.search_access_policy_bindings(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.search_access_policy_bindings(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_search_access_policy_bindings_async( + transport: str = "grpc_asyncio", + request_type=access_policies_service.SearchAccessPolicyBindingsRequest, +): + client = AccessPoliciesAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.search_access_policy_bindings), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + access_policies_service.SearchAccessPolicyBindingsResponse( + next_page_token="next_page_token_value", + ) + ) + response = await client.search_access_policy_bindings(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + request = access_policies_service.SearchAccessPolicyBindingsRequest() + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.SearchAccessPolicyBindingsAsyncPager) + assert response.next_page_token == "next_page_token_value" + + +@pytest.mark.asyncio +async def test_search_access_policy_bindings_async_from_dict(): + await test_search_access_policy_bindings_async(request_type=dict) + + +def test_search_access_policy_bindings_field_headers(): + client = AccessPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = access_policies_service.SearchAccessPolicyBindingsRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.search_access_policy_bindings), "__call__" + ) as call: + call.return_value = access_policies_service.SearchAccessPolicyBindingsResponse() + client.search_access_policy_bindings(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_search_access_policy_bindings_field_headers_async(): + client = AccessPoliciesAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = access_policies_service.SearchAccessPolicyBindingsRequest() + + request.name = "name_value" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.search_access_policy_bindings), "__call__" + ) as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + access_policies_service.SearchAccessPolicyBindingsResponse() + ) + await client.search_access_policy_bindings(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] + + +def test_search_access_policy_bindings_flattened(): + client = AccessPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.search_access_policy_bindings), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = access_policies_service.SearchAccessPolicyBindingsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.search_access_policy_bindings( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +def test_search_access_policy_bindings_flattened_error(): + client = AccessPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.search_access_policy_bindings( + access_policies_service.SearchAccessPolicyBindingsRequest(), + name="name_value", + ) + + +@pytest.mark.asyncio +async def test_search_access_policy_bindings_flattened_async(): + client = AccessPoliciesAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.search_access_policy_bindings), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = access_policies_service.SearchAccessPolicyBindingsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + access_policies_service.SearchAccessPolicyBindingsResponse() + ) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.search_access_policy_bindings( + name="name_value", + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + + +@pytest.mark.asyncio +async def test_search_access_policy_bindings_flattened_error_async(): + client = AccessPoliciesAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.search_access_policy_bindings( + access_policies_service.SearchAccessPolicyBindingsRequest(), + name="name_value", + ) + + +def test_search_access_policy_bindings_pager(transport_name: str = "grpc"): + client = AccessPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.search_access_policy_bindings), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + access_policies_service.SearchAccessPolicyBindingsResponse( + policy_bindings=[ + policy_binding_resources.PolicyBinding(), + policy_binding_resources.PolicyBinding(), + policy_binding_resources.PolicyBinding(), + ], + next_page_token="abc", + ), + access_policies_service.SearchAccessPolicyBindingsResponse( + policy_bindings=[], + next_page_token="def", + ), + access_policies_service.SearchAccessPolicyBindingsResponse( + policy_bindings=[ + policy_binding_resources.PolicyBinding(), + ], + next_page_token="ghi", + ), + access_policies_service.SearchAccessPolicyBindingsResponse( + policy_bindings=[ + policy_binding_resources.PolicyBinding(), + policy_binding_resources.PolicyBinding(), + ], + ), + RuntimeError, + ) + + expected_metadata = () + retry = retries.Retry() + timeout = 5 + expected_metadata = tuple(expected_metadata) + ( + gapic_v1.routing_header.to_grpc_metadata((("name", ""),)), + ) + pager = client.search_access_policy_bindings( + request={}, retry=retry, timeout=timeout + ) + + assert pager._metadata == expected_metadata + assert pager._retry == retry + assert pager._timeout == timeout + + results = list(pager) + assert len(results) == 6 + assert all( + isinstance(i, policy_binding_resources.PolicyBinding) for i in results + ) + + +def test_search_access_policy_bindings_pages(transport_name: str = "grpc"): + client = AccessPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.search_access_policy_bindings), "__call__" + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + access_policies_service.SearchAccessPolicyBindingsResponse( + policy_bindings=[ + policy_binding_resources.PolicyBinding(), + policy_binding_resources.PolicyBinding(), + policy_binding_resources.PolicyBinding(), + ], + next_page_token="abc", + ), + access_policies_service.SearchAccessPolicyBindingsResponse( + policy_bindings=[], + next_page_token="def", + ), + access_policies_service.SearchAccessPolicyBindingsResponse( + policy_bindings=[ + policy_binding_resources.PolicyBinding(), + ], + next_page_token="ghi", + ), + access_policies_service.SearchAccessPolicyBindingsResponse( + policy_bindings=[ + policy_binding_resources.PolicyBinding(), + policy_binding_resources.PolicyBinding(), + ], + ), + RuntimeError, + ) + pages = list(client.search_access_policy_bindings(request={}).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +@pytest.mark.asyncio +async def test_search_access_policy_bindings_async_pager(): + client = AccessPoliciesAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.search_access_policy_bindings), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + access_policies_service.SearchAccessPolicyBindingsResponse( + policy_bindings=[ + policy_binding_resources.PolicyBinding(), + policy_binding_resources.PolicyBinding(), + policy_binding_resources.PolicyBinding(), + ], + next_page_token="abc", + ), + access_policies_service.SearchAccessPolicyBindingsResponse( + policy_bindings=[], + next_page_token="def", + ), + access_policies_service.SearchAccessPolicyBindingsResponse( + policy_bindings=[ + policy_binding_resources.PolicyBinding(), + ], + next_page_token="ghi", + ), + access_policies_service.SearchAccessPolicyBindingsResponse( + policy_bindings=[ + policy_binding_resources.PolicyBinding(), + policy_binding_resources.PolicyBinding(), + ], + ), + RuntimeError, + ) + async_pager = await client.search_access_policy_bindings( + request={}, + ) + assert async_pager.next_page_token == "abc" + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all( + isinstance(i, policy_binding_resources.PolicyBinding) for i in responses + ) + + +@pytest.mark.asyncio +async def test_search_access_policy_bindings_async_pages(): + client = AccessPoliciesAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.search_access_policy_bindings), + "__call__", + new_callable=mock.AsyncMock, + ) as call: + # Set the response to a series of pages. + call.side_effect = ( + access_policies_service.SearchAccessPolicyBindingsResponse( + policy_bindings=[ + policy_binding_resources.PolicyBinding(), + policy_binding_resources.PolicyBinding(), + policy_binding_resources.PolicyBinding(), + ], + next_page_token="abc", + ), + access_policies_service.SearchAccessPolicyBindingsResponse( + policy_bindings=[], + next_page_token="def", + ), + access_policies_service.SearchAccessPolicyBindingsResponse( + policy_bindings=[ + policy_binding_resources.PolicyBinding(), + ], + next_page_token="ghi", + ), + access_policies_service.SearchAccessPolicyBindingsResponse( + policy_bindings=[ + policy_binding_resources.PolicyBinding(), + policy_binding_resources.PolicyBinding(), + ], + ), + RuntimeError, + ) + pages = [] + # Workaround issue in python 3.9 related to code coverage by adding `# pragma: no branch` + # See https://github.com/googleapis/gapic-generator-python/pull/1174#issuecomment-1025132372 + async for page_ in ( # pragma: no branch + await client.search_access_policy_bindings(request={}) + ).pages: + pages.append(page_) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +def test_create_access_policy_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = AccessPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.create_access_policy in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.create_access_policy] = ( + mock_rpc + ) + + request = {} + client.create_access_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.create_access_policy(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_create_access_policy_rest_required_fields( + request_type=access_policies_service.CreateAccessPolicyRequest, +): + transport_class = transports.AccessPoliciesRestTransport + + request_init = {} + request_init["parent"] = "" + request_init["access_policy_id"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + assert "accessPolicyId" not in jsonified_request + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_access_policy._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + assert "accessPolicyId" in jsonified_request + assert jsonified_request["accessPolicyId"] == request_init["access_policy_id"] + + jsonified_request["parent"] = "parent_value" + jsonified_request["accessPolicyId"] = "access_policy_id_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).create_access_policy._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "access_policy_id", + "validate_only", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + assert "accessPolicyId" in jsonified_request + assert jsonified_request["accessPolicyId"] == "access_policy_id_value" + + client = AccessPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "post", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.create_access_policy(request) + + expected_params = [ + ( + "accessPolicyId", + "", + ), + ("$alt", "json;enum-encoding=int"), + ] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_create_access_policy_rest_unset_required_fields(): + transport = transports.AccessPoliciesRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.create_access_policy._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "accessPolicyId", + "validateOnly", + ) + ) + & set( + ( + "parent", + "accessPolicyId", + "accessPolicy", + ) + ) + ) + + +def test_create_access_policy_rest_flattened(): + client = AccessPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + access_policy=access_policy_resources.AccessPolicy(name="name_value"), + access_policy_id="access_policy_id_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.create_access_policy(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v3beta/{parent=projects/*/locations/*}/accessPolicies" + % client.transport._host, + args[1], + ) + + +def test_create_access_policy_rest_flattened_error(transport: str = "rest"): + client = AccessPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_access_policy( + access_policies_service.CreateAccessPolicyRequest(), + parent="parent_value", + access_policy=access_policy_resources.AccessPolicy(name="name_value"), + access_policy_id="access_policy_id_value", + ) + + +def test_get_access_policy_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = AccessPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert client._transport.get_access_policy in client._transport._wrapped_methods + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.get_access_policy] = ( + mock_rpc + ) + + request = {} + client.get_access_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.get_access_policy(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_get_access_policy_rest_required_fields( + request_type=access_policies_service.GetAccessPolicyRequest, +): + transport_class = transports.AccessPoliciesRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_access_policy._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).get_access_policy._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = AccessPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = access_policy_resources.AccessPolicy() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = access_policy_resources.AccessPolicy.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.get_access_policy(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_get_access_policy_rest_unset_required_fields(): + transport = transports.AccessPoliciesRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.get_access_policy._get_unset_required_fields({}) + assert set(unset_fields) == (set(()) & set(("name",))) + + +def test_get_access_policy_rest_flattened(): + client = AccessPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = access_policy_resources.AccessPolicy() + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/accessPolicies/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = access_policy_resources.AccessPolicy.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.get_access_policy(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v3beta/{name=projects/*/locations/*/accessPolicies/*}" + % client.transport._host, + args[1], + ) + + +def test_get_access_policy_rest_flattened_error(transport: str = "rest"): + client = AccessPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_access_policy( + access_policies_service.GetAccessPolicyRequest(), + name="name_value", + ) + + +def test_update_access_policy_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = AccessPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.update_access_policy in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.update_access_policy] = ( + mock_rpc + ) + + request = {} + client.update_access_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.update_access_policy(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_update_access_policy_rest_required_fields( + request_type=access_policies_service.UpdateAccessPolicyRequest, +): + transport_class = transports.AccessPoliciesRestTransport + + request_init = {} + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_access_policy._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).update_access_policy._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set(("validate_only",)) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + + client = AccessPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "patch", + "query_params": pb_request, + } + transcode_result["body"] = pb_request + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.update_access_policy(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_update_access_policy_rest_unset_required_fields(): + transport = transports.AccessPoliciesRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.update_access_policy._get_unset_required_fields({}) + assert set(unset_fields) == (set(("validateOnly",)) & set(("accessPolicy",))) + + +def test_delete_access_policy_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = AccessPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.delete_access_policy in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.delete_access_policy] = ( + mock_rpc + ) + + request = {} + client.delete_access_policy(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + # Operation methods build a cached wrapper on first rpc call + # subsequent calls should use the cached wrapper + wrapper_fn.reset_mock() + + client.delete_access_policy(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_delete_access_policy_rest_required_fields( + request_type=access_policies_service.DeleteAccessPolicyRequest, +): + transport_class = transports.AccessPoliciesRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_access_policy._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).delete_access_policy._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "etag", + "force", + "validate_only", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = AccessPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "delete", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.delete_access_policy(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_delete_access_policy_rest_unset_required_fields(): + transport = transports.AccessPoliciesRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.delete_access_policy._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "etag", + "force", + "validateOnly", + ) + ) + & set(("name",)) + ) + + +def test_delete_access_policy_rest_flattened(): + client = AccessPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "projects/sample1/locations/sample2/accessPolicies/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.delete_access_policy(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v3beta/{name=projects/*/locations/*/accessPolicies/*}" + % client.transport._host, + args[1], + ) + + +def test_delete_access_policy_rest_flattened_error(transport: str = "rest"): + client = AccessPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_access_policy( + access_policies_service.DeleteAccessPolicyRequest(), + name="name_value", + ) + + +def test_list_access_policies_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = AccessPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.list_access_policies in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[client._transport.list_access_policies] = ( + mock_rpc + ) + + request = {} + client.list_access_policies(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.list_access_policies(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_list_access_policies_rest_required_fields( + request_type=access_policies_service.ListAccessPoliciesRequest, +): + transport_class = transports.AccessPoliciesRestTransport + + request_init = {} + request_init["parent"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_access_policies._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["parent"] = "parent_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).list_access_policies._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "parent" in jsonified_request + assert jsonified_request["parent"] == "parent_value" + + client = AccessPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = access_policies_service.ListAccessPoliciesResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = access_policies_service.ListAccessPoliciesResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.list_access_policies(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_list_access_policies_rest_unset_required_fields(): + transport = transports.AccessPoliciesRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.list_access_policies._get_unset_required_fields({}) + assert set(unset_fields) == ( + set( + ( + "pageSize", + "pageToken", + ) + ) + & set(("parent",)) + ) + + +def test_list_access_policies_rest_flattened(): + client = AccessPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = access_policies_service.ListAccessPoliciesResponse() + + # get arguments that satisfy an http rule for this method + sample_request = {"parent": "projects/sample1/locations/sample2"} + + # get truthy value for each flattened field + mock_args = dict( + parent="parent_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = access_policies_service.ListAccessPoliciesResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.list_access_policies(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v3beta/{parent=projects/*/locations/*}/accessPolicies" + % client.transport._host, + args[1], + ) + + +def test_list_access_policies_rest_flattened_error(transport: str = "rest"): + client = AccessPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_access_policies( + access_policies_service.ListAccessPoliciesRequest(), + parent="parent_value", + ) + + +def test_list_access_policies_rest_pager(transport: str = "rest"): + client = AccessPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + access_policies_service.ListAccessPoliciesResponse( + access_policies=[ + access_policy_resources.AccessPolicy(), + access_policy_resources.AccessPolicy(), + access_policy_resources.AccessPolicy(), + ], + next_page_token="abc", + ), + access_policies_service.ListAccessPoliciesResponse( + access_policies=[], + next_page_token="def", + ), + access_policies_service.ListAccessPoliciesResponse( + access_policies=[ + access_policy_resources.AccessPolicy(), + ], + next_page_token="ghi", + ), + access_policies_service.ListAccessPoliciesResponse( + access_policies=[ + access_policy_resources.AccessPolicy(), + access_policy_resources.AccessPolicy(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + access_policies_service.ListAccessPoliciesResponse.to_json(x) + for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = {"parent": "projects/sample1/locations/sample2"} + + pager = client.list_access_policies(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, access_policy_resources.AccessPolicy) for i in results) + + pages = list(client.list_access_policies(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +def test_search_access_policy_bindings_rest_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = AccessPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.search_access_policy_bindings + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.search_access_policy_bindings + ] = mock_rpc + + request = {} + client.search_access_policy_bindings(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.search_access_policy_bindings(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +def test_search_access_policy_bindings_rest_required_fields( + request_type=access_policies_service.SearchAccessPolicyBindingsRequest, +): + transport_class = transports.AccessPoliciesRestTransport + + request_init = {} + request_init["name"] = "" + request = request_type(**request_init) + pb_request = request_type.pb(request) + jsonified_request = json.loads( + json_format.MessageToJson(pb_request, use_integers_for_enums=False) + ) + + # verify fields with default values are dropped + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).search_access_policy_bindings._get_unset_required_fields(jsonified_request) + jsonified_request.update(unset_fields) + + # verify required fields with default values are now present + + jsonified_request["name"] = "name_value" + + unset_fields = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ).search_access_policy_bindings._get_unset_required_fields(jsonified_request) + # Check that path parameters and body parameters are not mixing in. + assert not set(unset_fields) - set( + ( + "page_size", + "page_token", + ) + ) + jsonified_request.update(unset_fields) + + # verify required fields with non-default values are left alone + assert "name" in jsonified_request + assert jsonified_request["name"] == "name_value" + + client = AccessPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type(**request_init) + + # Designate an appropriate value for the returned response. + return_value = access_policies_service.SearchAccessPolicyBindingsResponse() + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # We need to mock transcode() because providing default values + # for required fields will fail the real version if the http_options + # expect actual values for those fields. + with mock.patch.object(path_template, "transcode") as transcode: + # A uri without fields and an empty body will force all the + # request fields to show up in the query_params. + pb_request = request_type.pb(request) + transcode_result = { + "uri": "v1/sample_method", + "method": "get", + "query_params": pb_request, + } + transcode.return_value = transcode_result + + response_value = Response() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = ( + access_policies_service.SearchAccessPolicyBindingsResponse.pb( + return_value + ) + ) + json_return_value = json_format.MessageToJson(return_value) + + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.search_access_policy_bindings(request) + + expected_params = [("$alt", "json;enum-encoding=int")] + actual_params = req.call_args.kwargs["params"] + assert expected_params == actual_params + + +def test_search_access_policy_bindings_rest_unset_required_fields(): + transport = transports.AccessPoliciesRestTransport( + credentials=ga_credentials.AnonymousCredentials + ) + + unset_fields = transport.search_access_policy_bindings._get_unset_required_fields( + {} + ) + assert set(unset_fields) == ( + set( + ( + "pageSize", + "pageToken", + ) + ) + & set(("name",)) + ) + + +def test_search_access_policy_bindings_rest_flattened(): + client = AccessPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = access_policies_service.SearchAccessPolicyBindingsResponse() + + # get arguments that satisfy an http rule for this method + sample_request = { + "name": "organizations/sample1/locations/sample2/accessPolicies/sample3" + } + + # get truthy value for each flattened field + mock_args = dict( + name="name_value", + ) + mock_args.update(sample_request) + + # Wrap the value into a proper Response obj + response_value = Response() + response_value.status_code = 200 + # Convert return value to protobuf type + return_value = access_policies_service.SearchAccessPolicyBindingsResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(return_value) + response_value._content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + client.search_access_policy_bindings(**mock_args) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(req.mock_calls) == 1 + _, args, _ = req.mock_calls[0] + assert path_template.validate( + "%s/v3beta/{name=organizations/*/locations/*/accessPolicies/*}:searchPolicyBindings" + % client.transport._host, + args[1], + ) + + +def test_search_access_policy_bindings_rest_flattened_error(transport: str = "rest"): + client = AccessPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.search_access_policy_bindings( + access_policies_service.SearchAccessPolicyBindingsRequest(), + name="name_value", + ) + + +def test_search_access_policy_bindings_rest_pager(transport: str = "rest"): + client = AccessPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # TODO(kbandes): remove this mock unless there's a good reason for it. + # with mock.patch.object(path_template, 'transcode') as transcode: + # Set the response as a series of pages + response = ( + access_policies_service.SearchAccessPolicyBindingsResponse( + policy_bindings=[ + policy_binding_resources.PolicyBinding(), + policy_binding_resources.PolicyBinding(), + policy_binding_resources.PolicyBinding(), + ], + next_page_token="abc", + ), + access_policies_service.SearchAccessPolicyBindingsResponse( + policy_bindings=[], + next_page_token="def", + ), + access_policies_service.SearchAccessPolicyBindingsResponse( + policy_bindings=[ + policy_binding_resources.PolicyBinding(), + ], + next_page_token="ghi", + ), + access_policies_service.SearchAccessPolicyBindingsResponse( + policy_bindings=[ + policy_binding_resources.PolicyBinding(), + policy_binding_resources.PolicyBinding(), + ], + ), + ) + # Two responses for two calls + response = response + response + + # Wrap the values into proper Response objs + response = tuple( + access_policies_service.SearchAccessPolicyBindingsResponse.to_json(x) + for x in response + ) + return_values = tuple(Response() for i in response) + for return_val, response_val in zip(return_values, response): + return_val._content = response_val.encode("UTF-8") + return_val.status_code = 200 + req.side_effect = return_values + + sample_request = { + "name": "organizations/sample1/locations/sample2/accessPolicies/sample3" + } + + pager = client.search_access_policy_bindings(request=sample_request) + + results = list(pager) + assert len(results) == 6 + assert all( + isinstance(i, policy_binding_resources.PolicyBinding) for i in results + ) + + pages = list(client.search_access_policy_bindings(request=sample_request).pages) + for page_, token in zip(pages, ["abc", "def", "ghi", ""]): + assert page_.raw_page.next_page_token == token + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.AccessPoliciesGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = AccessPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.AccessPoliciesGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = AccessPoliciesClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.AccessPoliciesGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = AccessPoliciesClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = AccessPoliciesClient( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.AccessPoliciesGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = AccessPoliciesClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.AccessPoliciesGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = AccessPoliciesClient(transport=transport) + assert client.transport is transport + + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.AccessPoliciesGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.AccessPoliciesGrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.AccessPoliciesGrpcTransport, + transports.AccessPoliciesGrpcAsyncIOTransport, + transports.AccessPoliciesRestTransport, + ], +) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + + +def test_transport_kind_grpc(): + transport = AccessPoliciesClient.get_transport_class("grpc")( + credentials=ga_credentials.AnonymousCredentials() + ) + assert transport.kind == "grpc" + + +def test_initialize_client_w_grpc(): + client = AccessPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), transport="grpc" + ) + assert client is not None + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_access_policy_empty_call_grpc(): + client = AccessPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_access_policy), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.create_access_policy(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = access_policies_service.CreateAccessPolicyRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_access_policy_empty_call_grpc(): + client = AccessPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_access_policy), "__call__" + ) as call: + call.return_value = access_policy_resources.AccessPolicy() + client.get_access_policy(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = access_policies_service.GetAccessPolicyRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_access_policy_empty_call_grpc(): + client = AccessPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_access_policy), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.update_access_policy(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = access_policies_service.UpdateAccessPolicyRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_access_policy_empty_call_grpc(): + client = AccessPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_access_policy), "__call__" + ) as call: + call.return_value = operations_pb2.Operation(name="operations/op") + client.delete_access_policy(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = access_policies_service.DeleteAccessPolicyRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_access_policies_empty_call_grpc(): + client = AccessPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_access_policies), "__call__" + ) as call: + call.return_value = access_policies_service.ListAccessPoliciesResponse() + client.list_access_policies(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = access_policies_service.ListAccessPoliciesRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_search_access_policy_bindings_empty_call_grpc(): + client = AccessPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.search_access_policy_bindings), "__call__" + ) as call: + call.return_value = access_policies_service.SearchAccessPolicyBindingsResponse() + client.search_access_policy_bindings(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = access_policies_service.SearchAccessPolicyBindingsRequest() + + assert args[0] == request_msg + + +def test_transport_kind_grpc_asyncio(): + transport = AccessPoliciesAsyncClient.get_transport_class("grpc_asyncio")( + credentials=async_anonymous_credentials() + ) + assert transport.kind == "grpc_asyncio" + + +def test_initialize_client_w_grpc_asyncio(): + client = AccessPoliciesAsyncClient( + credentials=async_anonymous_credentials(), transport="grpc_asyncio" + ) + assert client is not None + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_create_access_policy_empty_call_grpc_asyncio(): + client = AccessPoliciesAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_access_policy), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + await client.create_access_policy(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = access_policies_service.CreateAccessPolicyRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_get_access_policy_empty_call_grpc_asyncio(): + client = AccessPoliciesAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_access_policy), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + access_policy_resources.AccessPolicy( + name="name_value", + uid="uid_value", + etag="etag_value", + display_name="display_name_value", + ) + ) + await client.get_access_policy(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = access_policies_service.GetAccessPolicyRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_update_access_policy_empty_call_grpc_asyncio(): + client = AccessPoliciesAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_access_policy), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + await client.update_access_policy(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = access_policies_service.UpdateAccessPolicyRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_delete_access_policy_empty_call_grpc_asyncio(): + client = AccessPoliciesAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_access_policy), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation(name="operations/spam") + ) + await client.delete_access_policy(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = access_policies_service.DeleteAccessPolicyRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_list_access_policies_empty_call_grpc_asyncio(): + client = AccessPoliciesAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_access_policies), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + access_policies_service.ListAccessPoliciesResponse( + next_page_token="next_page_token_value", + ) + ) + await client.list_access_policies(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = access_policies_service.ListAccessPoliciesRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +@pytest.mark.asyncio +async def test_search_access_policy_bindings_empty_call_grpc_asyncio(): + client = AccessPoliciesAsyncClient( + credentials=async_anonymous_credentials(), + transport="grpc_asyncio", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.search_access_policy_bindings), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + access_policies_service.SearchAccessPolicyBindingsResponse( + next_page_token="next_page_token_value", + ) + ) + await client.search_access_policy_bindings(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = access_policies_service.SearchAccessPolicyBindingsRequest() + + assert args[0] == request_msg + + +def test_transport_kind_rest(): + transport = AccessPoliciesClient.get_transport_class("rest")( + credentials=ga_credentials.AnonymousCredentials() + ) + assert transport.kind == "rest" + + +def test_create_access_policy_rest_bad_request( + request_type=access_policies_service.CreateAccessPolicyRequest, +): + client = AccessPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with ( + mock.patch.object(Session, "request") as req, + pytest.raises(core_exceptions.BadRequest), + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.create_access_policy(request) + + +@pytest.mark.parametrize( + "request_type", + [ + access_policies_service.CreateAccessPolicyRequest, + dict, + ], +) +def test_create_access_policy_rest_call_success(request_type): + client = AccessPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request_init["access_policy"] = { + "name": "name_value", + "uid": "uid_value", + "etag": "etag_value", + "display_name": "display_name_value", + "annotations": {}, + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "details": { + "rules": [ + { + "description": "description_value", + "effect": 1, + "principals": ["principals_value1", "principals_value2"], + "excluded_principals": [ + "excluded_principals_value1", + "excluded_principals_value2", + ], + "operation": { + "permissions": ["permissions_value1", "permissions_value2"], + "excluded_permissions": [ + "excluded_permissions_value1", + "excluded_permissions_value2", + ], + }, + "conditions": {}, + } + ] + }, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = access_policies_service.CreateAccessPolicyRequest.meta.fields[ + "access_policy" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["access_policy"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["access_policy"][field])): + del request_init["access_policy"][field][i][subfield] + else: + del request_init["access_policy"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.create_access_policy(request) + + # Establish that the response is the type that we expect. + json_return_value = json_format.MessageToJson(return_value) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_create_access_policy_rest_interceptors(null_interceptor): + transport = transports.AccessPoliciesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.AccessPoliciesRestInterceptor(), + ) + client = AccessPoliciesClient(transport=transport) + + with ( + mock.patch.object(type(client.transport._session), "request") as req, + mock.patch.object(path_template, "transcode") as transcode, + mock.patch.object(operation.Operation, "_set_result_from_operation"), + mock.patch.object( + transports.AccessPoliciesRestInterceptor, "post_create_access_policy" + ) as post, + mock.patch.object( + transports.AccessPoliciesRestInterceptor, + "post_create_access_policy_with_metadata", + ) as post_with_metadata, + mock.patch.object( + transports.AccessPoliciesRestInterceptor, "pre_create_access_policy" + ) as pre, + ): + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = access_policies_service.CreateAccessPolicyRequest.pb( + access_policies_service.CreateAccessPolicyRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = json_format.MessageToJson(operations_pb2.Operation()) + req.return_value.content = return_value + + request = access_policies_service.CreateAccessPolicyRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata + + client.create_access_policy( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_get_access_policy_rest_bad_request( + request_type=access_policies_service.GetAccessPolicyRequest, +): + client = AccessPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/accessPolicies/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with ( + mock.patch.object(Session, "request") as req, + pytest.raises(core_exceptions.BadRequest), + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.get_access_policy(request) + + +@pytest.mark.parametrize( + "request_type", + [ + access_policies_service.GetAccessPolicyRequest, + dict, + ], +) +def test_get_access_policy_rest_call_success(request_type): + client = AccessPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/accessPolicies/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = access_policy_resources.AccessPolicy( + name="name_value", + uid="uid_value", + etag="etag_value", + display_name="display_name_value", + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = access_policy_resources.AccessPolicy.pb(return_value) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.get_access_policy(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, access_policy_resources.AccessPolicy) + assert response.name == "name_value" + assert response.uid == "uid_value" + assert response.etag == "etag_value" + assert response.display_name == "display_name_value" + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_get_access_policy_rest_interceptors(null_interceptor): + transport = transports.AccessPoliciesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.AccessPoliciesRestInterceptor(), + ) + client = AccessPoliciesClient(transport=transport) + + with ( + mock.patch.object(type(client.transport._session), "request") as req, + mock.patch.object(path_template, "transcode") as transcode, + mock.patch.object( + transports.AccessPoliciesRestInterceptor, "post_get_access_policy" + ) as post, + mock.patch.object( + transports.AccessPoliciesRestInterceptor, + "post_get_access_policy_with_metadata", + ) as post_with_metadata, + mock.patch.object( + transports.AccessPoliciesRestInterceptor, "pre_get_access_policy" + ) as pre, + ): + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = access_policies_service.GetAccessPolicyRequest.pb( + access_policies_service.GetAccessPolicyRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = access_policy_resources.AccessPolicy.to_json( + access_policy_resources.AccessPolicy() + ) + req.return_value.content = return_value + + request = access_policies_service.GetAccessPolicyRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = access_policy_resources.AccessPolicy() + post_with_metadata.return_value = ( + access_policy_resources.AccessPolicy(), + metadata, + ) + + client.get_access_policy( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_update_access_policy_rest_bad_request( + request_type=access_policies_service.UpdateAccessPolicyRequest, +): + client = AccessPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = { + "access_policy": { + "name": "projects/sample1/locations/sample2/accessPolicies/sample3" + } + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with ( + mock.patch.object(Session, "request") as req, + pytest.raises(core_exceptions.BadRequest), + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.update_access_policy(request) + + +@pytest.mark.parametrize( + "request_type", + [ + access_policies_service.UpdateAccessPolicyRequest, + dict, + ], +) +def test_update_access_policy_rest_call_success(request_type): + client = AccessPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = { + "access_policy": { + "name": "projects/sample1/locations/sample2/accessPolicies/sample3" + } + } + request_init["access_policy"] = { + "name": "projects/sample1/locations/sample2/accessPolicies/sample3", + "uid": "uid_value", + "etag": "etag_value", + "display_name": "display_name_value", + "annotations": {}, + "create_time": {"seconds": 751, "nanos": 543}, + "update_time": {}, + "details": { + "rules": [ + { + "description": "description_value", + "effect": 1, + "principals": ["principals_value1", "principals_value2"], + "excluded_principals": [ + "excluded_principals_value1", + "excluded_principals_value2", + ], + "operation": { + "permissions": ["permissions_value1", "permissions_value2"], + "excluded_permissions": [ + "excluded_permissions_value1", + "excluded_permissions_value2", + ], + }, + "conditions": {}, + } + ] + }, + } + # The version of a generated dependency at test runtime may differ from the version used during generation. + # Delete any fields which are not present in the current runtime dependency + # See https://github.com/googleapis/gapic-generator-python/issues/1748 + + # Determine if the message type is proto-plus or protobuf + test_field = access_policies_service.UpdateAccessPolicyRequest.meta.fields[ + "access_policy" + ] + + def get_message_fields(field): + # Given a field which is a message (composite type), return a list with + # all the fields of the message. + # If the field is not a composite type, return an empty list. + message_fields = [] + + if hasattr(field, "message") and field.message: + is_field_type_proto_plus_type = not hasattr(field.message, "DESCRIPTOR") + + if is_field_type_proto_plus_type: + message_fields = field.message.meta.fields.values() + # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types + else: # pragma: NO COVER + message_fields = field.message.DESCRIPTOR.fields + return message_fields + + runtime_nested_fields = [ + (field.name, nested_field.name) + for field in get_message_fields(test_field) + for nested_field in get_message_fields(field) + ] + + subfields_not_in_runtime = [] + + # For each item in the sample request, create a list of sub fields which are not present at runtime + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for field, value in request_init["access_policy"].items(): # pragma: NO COVER + result = None + is_repeated = False + # For repeated fields + if isinstance(value, list) and len(value): + is_repeated = True + result = value[0] + # For fields where the type is another message + if isinstance(value, dict): + result = value + + if result and hasattr(result, "keys"): + for subfield in result.keys(): + if (field, subfield) not in runtime_nested_fields: + subfields_not_in_runtime.append( + { + "field": field, + "subfield": subfield, + "is_repeated": is_repeated, + } + ) + + # Remove fields from the sample request which are not present in the runtime version of the dependency + # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + field = subfield_to_delete.get("field") + field_repeated = subfield_to_delete.get("is_repeated") + subfield = subfield_to_delete.get("subfield") + if subfield: + if field_repeated: + for i in range(0, len(request_init["access_policy"][field])): + del request_init["access_policy"][field][i][subfield] + else: + del request_init["access_policy"][field][subfield] + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.update_access_policy(request) + + # Establish that the response is the type that we expect. + json_return_value = json_format.MessageToJson(return_value) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_update_access_policy_rest_interceptors(null_interceptor): + transport = transports.AccessPoliciesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.AccessPoliciesRestInterceptor(), + ) + client = AccessPoliciesClient(transport=transport) + + with ( + mock.patch.object(type(client.transport._session), "request") as req, + mock.patch.object(path_template, "transcode") as transcode, + mock.patch.object(operation.Operation, "_set_result_from_operation"), + mock.patch.object( + transports.AccessPoliciesRestInterceptor, "post_update_access_policy" + ) as post, + mock.patch.object( + transports.AccessPoliciesRestInterceptor, + "post_update_access_policy_with_metadata", + ) as post_with_metadata, + mock.patch.object( + transports.AccessPoliciesRestInterceptor, "pre_update_access_policy" + ) as pre, + ): + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = access_policies_service.UpdateAccessPolicyRequest.pb( + access_policies_service.UpdateAccessPolicyRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = json_format.MessageToJson(operations_pb2.Operation()) + req.return_value.content = return_value + + request = access_policies_service.UpdateAccessPolicyRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata + + client.update_access_policy( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_delete_access_policy_rest_bad_request( + request_type=access_policies_service.DeleteAccessPolicyRequest, +): + client = AccessPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/accessPolicies/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with ( + mock.patch.object(Session, "request") as req, + pytest.raises(core_exceptions.BadRequest), + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.delete_access_policy(request) + + +@pytest.mark.parametrize( + "request_type", + [ + access_policies_service.DeleteAccessPolicyRequest, + dict, + ], +) +def test_delete_access_policy_rest_call_success(request_type): + client = AccessPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"name": "projects/sample1/locations/sample2/accessPolicies/sample3"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation(name="operations/spam") + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.delete_access_policy(request) + + # Establish that the response is the type that we expect. + json_return_value = json_format.MessageToJson(return_value) + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_delete_access_policy_rest_interceptors(null_interceptor): + transport = transports.AccessPoliciesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.AccessPoliciesRestInterceptor(), + ) + client = AccessPoliciesClient(transport=transport) + + with ( + mock.patch.object(type(client.transport._session), "request") as req, + mock.patch.object(path_template, "transcode") as transcode, + mock.patch.object(operation.Operation, "_set_result_from_operation"), + mock.patch.object( + transports.AccessPoliciesRestInterceptor, "post_delete_access_policy" + ) as post, + mock.patch.object( + transports.AccessPoliciesRestInterceptor, + "post_delete_access_policy_with_metadata", + ) as post_with_metadata, + mock.patch.object( + transports.AccessPoliciesRestInterceptor, "pre_delete_access_policy" + ) as pre, + ): + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = access_policies_service.DeleteAccessPolicyRequest.pb( + access_policies_service.DeleteAccessPolicyRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = json_format.MessageToJson(operations_pb2.Operation()) + req.return_value.content = return_value + + request = access_policies_service.DeleteAccessPolicyRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = operations_pb2.Operation() + post_with_metadata.return_value = operations_pb2.Operation(), metadata + + client.delete_access_policy( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_list_access_policies_rest_bad_request( + request_type=access_policies_service.ListAccessPoliciesRequest, +): + client = AccessPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with ( + mock.patch.object(Session, "request") as req, + pytest.raises(core_exceptions.BadRequest), + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.list_access_policies(request) + + +@pytest.mark.parametrize( + "request_type", + [ + access_policies_service.ListAccessPoliciesRequest, + dict, + ], +) +def test_list_access_policies_rest_call_success(request_type): + client = AccessPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = {"parent": "projects/sample1/locations/sample2"} + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = access_policies_service.ListAccessPoliciesResponse( + next_page_token="next_page_token_value", + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = access_policies_service.ListAccessPoliciesResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.list_access_policies(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListAccessPoliciesPager) + assert response.next_page_token == "next_page_token_value" + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_list_access_policies_rest_interceptors(null_interceptor): + transport = transports.AccessPoliciesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.AccessPoliciesRestInterceptor(), + ) + client = AccessPoliciesClient(transport=transport) + + with ( + mock.patch.object(type(client.transport._session), "request") as req, + mock.patch.object(path_template, "transcode") as transcode, + mock.patch.object( + transports.AccessPoliciesRestInterceptor, "post_list_access_policies" + ) as post, + mock.patch.object( + transports.AccessPoliciesRestInterceptor, + "post_list_access_policies_with_metadata", + ) as post_with_metadata, + mock.patch.object( + transports.AccessPoliciesRestInterceptor, "pre_list_access_policies" + ) as pre, + ): + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = access_policies_service.ListAccessPoliciesRequest.pb( + access_policies_service.ListAccessPoliciesRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = access_policies_service.ListAccessPoliciesResponse.to_json( + access_policies_service.ListAccessPoliciesResponse() + ) + req.return_value.content = return_value + + request = access_policies_service.ListAccessPoliciesRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = access_policies_service.ListAccessPoliciesResponse() + post_with_metadata.return_value = ( + access_policies_service.ListAccessPoliciesResponse(), + metadata, + ) + + client.list_access_policies( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_search_access_policy_bindings_rest_bad_request( + request_type=access_policies_service.SearchAccessPolicyBindingsRequest, +): + client = AccessPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # send a request that will satisfy transcoding + request_init = { + "name": "organizations/sample1/locations/sample2/accessPolicies/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a BadRequest error. + with ( + mock.patch.object(Session, "request") as req, + pytest.raises(core_exceptions.BadRequest), + ): + # Wrap the value into a proper Response obj + response_value = mock.Mock() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = mock.Mock() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.search_access_policy_bindings(request) + + +@pytest.mark.parametrize( + "request_type", + [ + access_policies_service.SearchAccessPolicyBindingsRequest, + dict, + ], +) +def test_search_access_policy_bindings_rest_call_success(request_type): + client = AccessPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + # send a request that will satisfy transcoding + request_init = { + "name": "organizations/sample1/locations/sample2/accessPolicies/sample3" + } + request = request_type(**request_init) + + # Mock the http request call within the method and fake a response. + with mock.patch.object(type(client.transport._session), "request") as req: + # Designate an appropriate value for the returned response. + return_value = access_policies_service.SearchAccessPolicyBindingsResponse( + next_page_token="next_page_token_value", + ) + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + + # Convert return value to protobuf type + return_value = access_policies_service.SearchAccessPolicyBindingsResponse.pb( + return_value + ) + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + response = client.search_access_policy_bindings(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.SearchAccessPolicyBindingsPager) + assert response.next_page_token == "next_page_token_value" + + +@pytest.mark.parametrize("null_interceptor", [True, False]) +def test_search_access_policy_bindings_rest_interceptors(null_interceptor): + transport = transports.AccessPoliciesRestTransport( + credentials=ga_credentials.AnonymousCredentials(), + interceptor=None + if null_interceptor + else transports.AccessPoliciesRestInterceptor(), + ) + client = AccessPoliciesClient(transport=transport) + + with ( + mock.patch.object(type(client.transport._session), "request") as req, + mock.patch.object(path_template, "transcode") as transcode, + mock.patch.object( + transports.AccessPoliciesRestInterceptor, + "post_search_access_policy_bindings", + ) as post, + mock.patch.object( + transports.AccessPoliciesRestInterceptor, + "post_search_access_policy_bindings_with_metadata", + ) as post_with_metadata, + mock.patch.object( + transports.AccessPoliciesRestInterceptor, + "pre_search_access_policy_bindings", + ) as pre, + ): + pre.assert_not_called() + post.assert_not_called() + post_with_metadata.assert_not_called() + pb_message = access_policies_service.SearchAccessPolicyBindingsRequest.pb( + access_policies_service.SearchAccessPolicyBindingsRequest() + ) + transcode.return_value = { + "method": "post", + "uri": "my_uri", + "body": pb_message, + "query_params": pb_message, + } + + req.return_value = mock.Mock() + req.return_value.status_code = 200 + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + return_value = ( + access_policies_service.SearchAccessPolicyBindingsResponse.to_json( + access_policies_service.SearchAccessPolicyBindingsResponse() + ) + ) + req.return_value.content = return_value + + request = access_policies_service.SearchAccessPolicyBindingsRequest() + metadata = [ + ("key", "val"), + ("cephalopod", "squid"), + ] + pre.return_value = request, metadata + post.return_value = access_policies_service.SearchAccessPolicyBindingsResponse() + post_with_metadata.return_value = ( + access_policies_service.SearchAccessPolicyBindingsResponse(), + metadata, + ) + + client.search_access_policy_bindings( + request, + metadata=[ + ("key", "val"), + ("cephalopod", "squid"), + ], + ) + + pre.assert_called_once() + post.assert_called_once() + post_with_metadata.assert_called_once() + + +def test_get_operation_rest_bad_request( + request_type=operations_pb2.GetOperationRequest, +): + client = AccessPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = request_type() + request = json_format.ParseDict( + {"name": "projects/sample1/locations/sample2/operations/sample3"}, request + ) + + # Mock the http request call within the method and fake a BadRequest error. + with ( + mock.patch.object(Session, "request") as req, + pytest.raises(core_exceptions.BadRequest), + ): + # Wrap the value into a proper Response obj + response_value = Response() + json_return_value = "" + response_value.json = mock.Mock(return_value={}) + response_value.status_code = 400 + response_value.request = Request() + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + client.get_operation(request) + + +@pytest.mark.parametrize( + "request_type", + [ + operations_pb2.GetOperationRequest, + dict, + ], +) +def test_get_operation_rest(request_type): + client = AccessPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + request_init = {"name": "projects/sample1/locations/sample2/operations/sample3"} + request = request_type(**request_init) + # Mock the http request call within the method and fake a response. + with mock.patch.object(Session, "request") as req: + # Designate an appropriate value for the returned response. + return_value = operations_pb2.Operation() + + # Wrap the value into a proper Response obj + response_value = mock.Mock() + response_value.status_code = 200 + json_return_value = json_format.MessageToJson(return_value) + response_value.content = json_return_value.encode("UTF-8") + + req.return_value = response_value + req.return_value.headers = {"header-1": "value-1", "header-2": "value-2"} + + response = client.get_operation(request) + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +def test_initialize_client_w_rest(): + client = AccessPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + assert client is not None + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_create_access_policy_empty_call_rest(): + client = AccessPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.create_access_policy), "__call__" + ) as call: + client.create_access_policy(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = access_policies_service.CreateAccessPolicyRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_get_access_policy_empty_call_rest(): + client = AccessPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.get_access_policy), "__call__" + ) as call: + client.get_access_policy(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = access_policies_service.GetAccessPolicyRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_update_access_policy_empty_call_rest(): + client = AccessPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.update_access_policy), "__call__" + ) as call: + client.update_access_policy(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = access_policies_service.UpdateAccessPolicyRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_delete_access_policy_empty_call_rest(): + client = AccessPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.delete_access_policy), "__call__" + ) as call: + client.delete_access_policy(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = access_policies_service.DeleteAccessPolicyRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_list_access_policies_empty_call_rest(): + client = AccessPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.list_access_policies), "__call__" + ) as call: + client.list_access_policies(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = access_policies_service.ListAccessPoliciesRequest() + + assert args[0] == request_msg + + +# This test is a coverage failsafe to make sure that totally empty calls, +# i.e. request == None and no flattened fields passed, work. +def test_search_access_policy_bindings_empty_call_rest(): + client = AccessPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + + # Mock the actual call, and fake the request. + with mock.patch.object( + type(client.transport.search_access_policy_bindings), "__call__" + ) as call: + client.search_access_policy_bindings(request=None) + + # Establish that the underlying stub method was called. + call.assert_called() + _, args, _ = call.mock_calls[0] + request_msg = access_policies_service.SearchAccessPolicyBindingsRequest() + + assert args[0] == request_msg + + +def test_access_policies_rest_lro_client(): + client = AccessPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + transport = client.transport + + # Ensure that we have an api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.AbstractOperationsClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = AccessPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.AccessPoliciesGrpcTransport, + ) + + +def test_access_policies_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.AccessPoliciesTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json", + ) + + +def test_access_policies_base_transport(): + # Instantiate the base transport. + with mock.patch( + "google.cloud.iam_v3beta.services.access_policies.transports.AccessPoliciesTransport.__init__" + ) as Transport: + Transport.return_value = None + transport = transports.AccessPoliciesTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + "create_access_policy", + "get_access_policy", + "update_access_policy", + "delete_access_policy", + "list_access_policies", + "search_access_policy_bindings", + "get_operation", + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Additionally, the LRO client (a property) should + # also raise NotImplementedError + with pytest.raises(NotImplementedError): + transport.operations_client + + # Catch all for all remaining methods and properties + remainder = [ + "kind", + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_access_policies_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with ( + mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, + mock.patch( + "google.cloud.iam_v3beta.services.access_policies.transports.AccessPoliciesTransport._prep_wrapped_messages" + ) as Transport, + ): + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.AccessPoliciesTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with( + "credentials.json", + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) + + +def test_access_policies_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with ( + mock.patch.object(google.auth, "default", autospec=True) as adc, + mock.patch( + "google.cloud.iam_v3beta.services.access_policies.transports.AccessPoliciesTransport._prep_wrapped_messages" + ) as Transport, + ): + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.AccessPoliciesTransport() + adc.assert_called_once() + + +def test_access_policies_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + AccessPoliciesClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id=None, + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.AccessPoliciesGrpcTransport, + transports.AccessPoliciesGrpcAsyncIOTransport, + ], +) +def test_access_policies_transport_auth_adc(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + quota_project_id="octopus", + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.AccessPoliciesGrpcTransport, + transports.AccessPoliciesGrpcAsyncIOTransport, + transports.AccessPoliciesRestTransport, + ], +) +def test_access_policies_transport_auth_gdch_credentials(transport_class): + host = "https://language.com" + api_audience_tests = [None, "https://language2.com"] + api_audience_expect = [host, "https://language2.com"] + for t, e in zip(api_audience_tests, api_audience_expect): + with mock.patch.object(google.auth, "default", autospec=True) as adc: + gdch_mock = mock.MagicMock() + type(gdch_mock).with_gdch_audience = mock.PropertyMock( + return_value=gdch_mock + ) + adc.return_value = (gdch_mock, None) + transport_class(host=host, api_audience=t) + gdch_mock.with_gdch_audience.assert_called_once_with(e) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.AccessPoliciesGrpcTransport, grpc_helpers), + (transports.AccessPoliciesGrpcAsyncIOTransport, grpc_helpers_async), + ], +) +def test_access_policies_transport_create_channel(transport_class, grpc_helpers): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with ( + mock.patch.object(google.auth, "default", autospec=True) as adc, + mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel, + ): + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + + create_channel.assert_called_with( + "iam.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + scopes=["1", "2"], + default_host="iam.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.AccessPoliciesGrpcTransport, + transports.AccessPoliciesGrpcAsyncIOTransport, + ], +) +def test_access_policies_grpc_transport_client_cert_source_for_mtls(transport_class): + cred = ga_credentials.AnonymousCredentials() + + # Check ssl_channel_credentials is used if provided. + with mock.patch.object(transport_class, "create_channel") as mock_create_channel: + mock_ssl_channel_creds = mock.Mock() + transport_class( + host="squid.clam.whelk", + credentials=cred, + ssl_channel_credentials=mock_ssl_channel_creds, + ) + mock_create_channel.assert_called_once_with( + "squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_channel_creds, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls + # is used. + with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): + with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: + transport_class( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback, + ) + expected_cert, expected_key = client_cert_source_callback() + mock_ssl_cred.assert_called_once_with( + certificate_chain=expected_cert, private_key=expected_key + ) + + +def test_access_policies_http_transport_client_cert_source_for_mtls(): + cred = ga_credentials.AnonymousCredentials() + with mock.patch( + "google.auth.transport.requests.AuthorizedSession.configure_mtls_channel" + ) as mock_configure_mtls_channel: + transports.AccessPoliciesRestTransport( + credentials=cred, client_cert_source_for_mtls=client_cert_source_callback + ) + mock_configure_mtls_channel.assert_called_once_with(client_cert_source_callback) + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_access_policies_host_no_port(transport_name): + client = AccessPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint="iam.googleapis.com"), + transport=transport_name, + ) + assert client.transport._host == ( + "iam.googleapis.com:443" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://iam.googleapis.com" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + "rest", + ], +) +def test_access_policies_host_with_port(transport_name): + client = AccessPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions( + api_endpoint="iam.googleapis.com:8000" + ), + transport=transport_name, + ) + assert client.transport._host == ( + "iam.googleapis.com:8000" + if transport_name in ["grpc", "grpc_asyncio"] + else "https://iam.googleapis.com:8000" + ) + + +@pytest.mark.parametrize( + "transport_name", + [ + "rest", + ], +) +def test_access_policies_client_transport_session_collision(transport_name): + creds1 = ga_credentials.AnonymousCredentials() + creds2 = ga_credentials.AnonymousCredentials() + client1 = AccessPoliciesClient( + credentials=creds1, + transport=transport_name, + ) + client2 = AccessPoliciesClient( + credentials=creds2, + transport=transport_name, + ) + session1 = client1.transport.create_access_policy._session + session2 = client2.transport.create_access_policy._session + assert session1 != session2 + session1 = client1.transport.get_access_policy._session + session2 = client2.transport.get_access_policy._session + assert session1 != session2 + session1 = client1.transport.update_access_policy._session + session2 = client2.transport.update_access_policy._session + assert session1 != session2 + session1 = client1.transport.delete_access_policy._session + session2 = client2.transport.delete_access_policy._session + assert session1 != session2 + session1 = client1.transport.list_access_policies._session + session2 = client2.transport.list_access_policies._session + assert session1 != session2 + session1 = client1.transport.search_access_policy_bindings._session + session2 = client2.transport.search_access_policy_bindings._session + assert session1 != session2 + + +def test_access_policies_grpc_transport_channel(): + channel = grpc.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.AccessPoliciesGrpcTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +def test_access_policies_grpc_asyncio_transport_channel(): + channel = aio.secure_channel("http://localhost/", grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.AccessPoliciesGrpcAsyncIOTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.filterwarnings("ignore::FutureWarning") +@pytest.mark.parametrize( + "transport_class", + [ + transports.AccessPoliciesGrpcTransport, + transports.AccessPoliciesGrpcAsyncIOTransport, + ], +) +def test_access_policies_transport_channel_mtls_with_client_cert_source( + transport_class, +): + with mock.patch( + "grpc.ssl_channel_credentials", autospec=True + ) as grpc_ssl_channel_cred: + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + cred = ga_credentials.AnonymousCredentials() + with pytest.warns(DeprecationWarning): + with mock.patch.object(google.auth, "default") as adc: + adc.return_value = (cred, None) + transport = transport_class( + host="squid.clam.whelk", + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + adc.assert_called_once() + + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + assert transport._ssl_channel_credentials == mock_ssl_cred + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize( + "transport_class", + [ + transports.AccessPoliciesGrpcTransport, + transports.AccessPoliciesGrpcAsyncIOTransport, + ], +) +def test_access_policies_transport_channel_mtls_with_adc(transport_class): + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + with mock.patch.object( + transport_class, "create_channel" + ) as grpc_create_channel: + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + mock_cred = mock.Mock() + + with pytest.warns(DeprecationWarning): + transport = transport_class( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=None, + ) + + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + + +def test_access_policies_grpc_lro_client(): + client = AccessPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + transport = client.transport + + # Ensure that we have a api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.OperationsClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + +def test_access_policies_grpc_lro_async_client(): + client = AccessPoliciesAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + transport = client.transport + + # Ensure that we have a api-core operations client. + assert isinstance( + transport.operations_client, + operations_v1.OperationsAsyncClient, + ) + + # Ensure that subsequent calls to the property send the exact same object. + assert transport.operations_client is transport.operations_client + + +def test_access_policy_path(): + organization = "squid" + location = "clam" + access_policy = "whelk" + expected = "organizations/{organization}/locations/{location}/accessPolicies/{access_policy}".format( + organization=organization, + location=location, + access_policy=access_policy, + ) + actual = AccessPoliciesClient.access_policy_path( + organization, location, access_policy + ) + assert expected == actual + + +def test_parse_access_policy_path(): + expected = { + "organization": "octopus", + "location": "oyster", + "access_policy": "nudibranch", + } + path = AccessPoliciesClient.access_policy_path(**expected) + + # Check that the path construction is reversible. + actual = AccessPoliciesClient.parse_access_policy_path(path) + assert expected == actual + + +def test_policy_binding_path(): + organization = "cuttlefish" + location = "mussel" + policy_binding = "winkle" + expected = "organizations/{organization}/locations/{location}/policyBindings/{policy_binding}".format( + organization=organization, + location=location, + policy_binding=policy_binding, + ) + actual = AccessPoliciesClient.policy_binding_path( + organization, location, policy_binding + ) + assert expected == actual + + +def test_parse_policy_binding_path(): + expected = { + "organization": "nautilus", + "location": "scallop", + "policy_binding": "abalone", + } + path = AccessPoliciesClient.policy_binding_path(**expected) + + # Check that the path construction is reversible. + actual = AccessPoliciesClient.parse_policy_binding_path(path) + assert expected == actual + + +def test_common_billing_account_path(): + billing_account = "squid" + expected = "billingAccounts/{billing_account}".format( + billing_account=billing_account, + ) + actual = AccessPoliciesClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "clam", + } + path = AccessPoliciesClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = AccessPoliciesClient.parse_common_billing_account_path(path) + assert expected == actual + + +def test_common_folder_path(): + folder = "whelk" + expected = "folders/{folder}".format( + folder=folder, + ) + actual = AccessPoliciesClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "octopus", + } + path = AccessPoliciesClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = AccessPoliciesClient.parse_common_folder_path(path) + assert expected == actual + + +def test_common_organization_path(): + organization = "oyster" + expected = "organizations/{organization}".format( + organization=organization, + ) + actual = AccessPoliciesClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "nudibranch", + } + path = AccessPoliciesClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = AccessPoliciesClient.parse_common_organization_path(path) + assert expected == actual + + +def test_common_project_path(): + project = "cuttlefish" + expected = "projects/{project}".format( + project=project, + ) + actual = AccessPoliciesClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "mussel", + } + path = AccessPoliciesClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = AccessPoliciesClient.parse_common_project_path(path) + assert expected == actual + + +def test_common_location_path(): + project = "winkle" + location = "nautilus" + expected = "projects/{project}/locations/{location}".format( + project=project, + location=location, + ) + actual = AccessPoliciesClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "scallop", + "location": "abalone", + } + path = AccessPoliciesClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = AccessPoliciesClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object( + transports.AccessPoliciesTransport, "_prep_wrapped_messages" + ) as prep: + client = AccessPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object( + transports.AccessPoliciesTransport, "_prep_wrapped_messages" + ) as prep: + transport_class = AccessPoliciesClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + +def test_get_operation(transport: str = "grpc"): + client = AccessPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.GetOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + response = client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +@pytest.mark.asyncio +async def test_get_operation_async(transport: str = "grpc_asyncio"): + client = AccessPoliciesAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = operations_pb2.GetOperationRequest() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the response is the type that we expect. + assert isinstance(response, operations_pb2.Operation) + + +def test_get_operation_field_headers(): + client = AccessPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.GetOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + call.return_value = operations_pb2.Operation() + + client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +@pytest.mark.asyncio +async def test_get_operation_field_headers_async(): + client = AccessPoliciesAsyncClient( + credentials=async_anonymous_credentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = operations_pb2.GetOperationRequest() + request.name = "locations" + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + await client.get_operation(request) + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + "x-goog-request-params", + "name=locations", + ) in kw["metadata"] + + +def test_get_operation_from_dict(): + client = AccessPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + + response = client.get_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +@pytest.mark.asyncio +async def test_get_operation_from_dict_async(): + client = AccessPoliciesAsyncClient( + credentials=async_anonymous_credentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + response = await client.get_operation( + request={ + "name": "locations", + } + ) + call.assert_called() + + +def test_get_operation_flattened(): + client = AccessPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = operations_pb2.Operation() + + client.get_operation() + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == operations_pb2.GetOperationRequest() + + +@pytest.mark.asyncio +async def test_get_operation_flattened_async(): + client = AccessPoliciesAsyncClient( + credentials=async_anonymous_credentials(), + ) + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object(type(client.transport.get_operation), "__call__") as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( + operations_pb2.Operation() + ) + await client.get_operation() + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == operations_pb2.GetOperationRequest() + + +def test_transport_close_grpc(): + client = AccessPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), transport="grpc" + ) + with mock.patch.object( + type(getattr(client.transport, "_grpc_channel")), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +@pytest.mark.asyncio +async def test_transport_close_grpc_asyncio(): + client = AccessPoliciesAsyncClient( + credentials=async_anonymous_credentials(), transport="grpc_asyncio" + ) + with mock.patch.object( + type(getattr(client.transport, "_grpc_channel")), "close" + ) as close: + async with client: + close.assert_not_called() + close.assert_called_once() + + +def test_transport_close_rest(): + client = AccessPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + with mock.patch.object( + type(getattr(client.transport, "_session")), "close" + ) as close: + with client: + close.assert_not_called() + close.assert_called_once() + + +def test_client_ctx(): + transports = [ + "rest", + "grpc", + ] + for transport in transports: + client = AccessPoliciesClient( + credentials=ga_credentials.AnonymousCredentials(), transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + + +@pytest.mark.parametrize( + "client_class,transport_class", + [ + (AccessPoliciesClient, transports.AccessPoliciesGrpcTransport), + (AccessPoliciesAsyncClient, transports.AccessPoliciesGrpcAsyncIOTransport), + ], +) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client._DEFAULT_ENDPOINT_TEMPLATE.format( + UNIVERSE_DOMAIN=client._DEFAULT_UNIVERSE + ), + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) diff --git a/packages/google-cloud-iam/tests/unit/gapic/iam_v3beta/test_policy_bindings.py b/packages/google-cloud-iam/tests/unit/gapic/iam_v3beta/test_policy_bindings.py index cf0ddea97119..b0f1112b889c 100644 --- a/packages/google-cloud-iam/tests/unit/gapic/iam_v3beta/test_policy_bindings.py +++ b/packages/google-cloud-iam/tests/unit/gapic/iam_v3beta/test_policy_bindings.py @@ -3375,6 +3375,7 @@ def test_search_target_policy_bindings_non_empty_request_with_auto_populated_fie target="target_value", page_token="page_token_value", parent="parent_value", + filter="filter_value", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -3391,6 +3392,7 @@ def test_search_target_policy_bindings_non_empty_request_with_auto_populated_fie target="target_value", page_token="page_token_value", parent="parent_value", + filter="filter_value", ) @@ -5035,6 +5037,7 @@ def test_search_target_policy_bindings_rest_required_fields( # Check that path parameters and body parameters are not mixing in. assert not set(unset_fields) - set( ( + "filter", "page_size", "page_token", "target", @@ -5111,6 +5114,7 @@ def test_search_target_policy_bindings_rest_unset_required_fields(): assert set(unset_fields) == ( set( ( + "filter", "pageSize", "pageToken", "target", @@ -5738,7 +5742,10 @@ def test_create_policy_binding_rest_call_success(request_type): "etag": "etag_value", "display_name": "display_name_value", "annotations": {}, - "target": {"principal_set": "principal_set_value"}, + "target": { + "principal_set": "principal_set_value", + "resource": "resource_value", + }, "policy_kind": 1, "policy": "policy_value", "policy_uid": "policy_uid_value", @@ -6111,7 +6118,10 @@ def test_update_policy_binding_rest_call_success(request_type): "etag": "etag_value", "display_name": "display_name_value", "annotations": {}, - "target": {"principal_set": "principal_set_value"}, + "target": { + "principal_set": "principal_set_value", + "resource": "resource_value", + }, "policy_kind": 1, "policy": "policy_value", "policy_uid": "policy_uid_value", diff --git a/packages/google-cloud-kms/google/cloud/kms_v1/types/hsm_management.py b/packages/google-cloud-kms/google/cloud/kms_v1/types/hsm_management.py index 0c926bb42141..644ff4d7eba4 100644 --- a/packages/google-cloud-kms/google/cloud/kms_v1/types/hsm_management.py +++ b/packages/google-cloud-kms/google/cloud/kms_v1/types/hsm_management.py @@ -83,7 +83,7 @@ class SingleTenantHsmInstance(proto.Message): Output only. The system-defined duration that an instance can remain unrefreshed until it is automatically disabled. This will have a value - of 120 days. + of 730 days. disable_time (google.protobuf.timestamp_pb2.Timestamp): Output only. The time at which the instance will be automatically disabled if not refreshed. This field is @@ -94,6 +94,13 @@ class SingleTenantHsmInstance(proto.Message): before this time otherwise the [SingleTenantHsmInstance][google.cloud.kms.v1.SingleTenantHsmInstance] will become disabled. + key_portability_enabled (bool): + Optional. Immutable. Indicates whether key portability is + enabled for the + [SingleTenantHsmInstance][google.cloud.kms.v1.SingleTenantHsmInstance]. + This can only be set at creation time. Key portability + features are disabled by default and not yet available in + GA. """ class State(proto.Enum): @@ -228,6 +235,10 @@ class QuorumAuth(proto.Message): number=7, message=timestamp_pb2.Timestamp, ) + key_portability_enabled: bool = proto.Field( + proto.BOOL, + number=8, + ) class SingleTenantHsmInstanceProposal(proto.Message): diff --git a/packages/google-cloud-kms/google/cloud/kms_v1/types/resources.py b/packages/google-cloud-kms/google/cloud/kms_v1/types/resources.py index 074f66bcbc28..6a56330c52d1 100644 --- a/packages/google-cloud-kms/google/cloud/kms_v1/types/resources.py +++ b/packages/google-cloud-kms/google/cloud/kms_v1/types/resources.py @@ -319,18 +319,19 @@ class CryptoKey(proto.Message): [ProtectionLevels][google.cloud.kms.v1.ProtectionLevel] in the future. key_access_justifications_policy (google.cloud.kms_v1.types.KeyAccessJustificationsPolicy): - Optional. The policy used for Key Access - Justifications Policy Enforcement. If this field - is present and this key is enrolled in Key - Access Justifications Policy Enforcement, the - policy will be evaluated in encrypt, decrypt, - and sign operations, and the operation will fail - if rejected by the policy. The policy is defined - by specifying zero or more allowed justification - codes. + Optional. The policy used for Key Access Justifications + Policy Enforcement. If this field is present and this key is + enrolled in Key Access Justifications Policy Enforcement, + the policy will be evaluated in encrypt, decrypt, and sign + operations, and the operation will fail if rejected by the + policy. The policy is defined by specifying zero or more + allowed justification codes. https://cloud.google.com/assured-workloads/key-access-justifications/docs/justification-codes - By default, this field is absent, and all - justification codes are allowed. + By default, this field is absent, and all justification + codes are allowed. If the + ``key_access_justifications_policy.allowed_access_reasons`` + is empty (zero allowed justification code), all encrypt, + decrypt, and sign operations will fail. """ class CryptoKeyPurpose(proto.Enum): @@ -1573,16 +1574,20 @@ class KeyAccessJustificationsPolicy(proto.Message): specifies zero or more allowed [AccessReason][google.cloud.kms.v1.AccessReason] values for encrypt, decrypt, and sign operations on a - [CryptoKey][google.cloud.kms.v1.CryptoKey]. + [CryptoKey][google.cloud.kms.v1.CryptoKey] or + [KeyAccessJustificationsPolicyConfig][google.cloud.kms.v1.KeyAccessJustificationsPolicyConfig] + (the default Key Access Justifications policy). Attributes: allowed_access_reasons (MutableSequence[google.cloud.kms_v1.types.AccessReason]): The list of allowed reasons for access to a - [CryptoKey][google.cloud.kms.v1.CryptoKey]. Zero allowed - access reasons means all encrypt, decrypt, and sign - operations for the - [CryptoKey][google.cloud.kms.v1.CryptoKey] associated with - this policy will fail. + [CryptoKey][google.cloud.kms.v1.CryptoKey]. Note that empty + allowed_access_reasons has a different meaning depending on + where this message appears. If this is under + [KeyAccessJustificationsPolicyConfig][google.cloud.kms.v1.KeyAccessJustificationsPolicyConfig], + it means allow-all. If this is under + [CryptoKey][google.cloud.kms.v1.CryptoKey], it means + deny-all. """ allowed_access_reasons: MutableSequence["AccessReason"] = proto.RepeatedField( diff --git a/packages/google-cloud-kms/google/cloud/kms_v1/types/service.py b/packages/google-cloud-kms/google/cloud/kms_v1/types/service.py index 968a427d8d7e..ee5d43db6ed2 100644 --- a/packages/google-cloud-kms/google/cloud/kms_v1/types/service.py +++ b/packages/google-cloud-kms/google/cloud/kms_v1/types/service.py @@ -2784,6 +2784,15 @@ class Digest(proto.Message): A message digest produced with the SHA-512 algorithm. + This field is a member of `oneof`_ ``digest``. + external_mu (bytes): + A message digest produced with SHAKE-256, to + be used with ML-DSA external-μ algorithms only. + See "message representative" note in section + 6.2, algorithm 7 of the FIPS-204 standard: + + https://doi.org/10.6028/nist.fips.204 + This field is a member of `oneof`_ ``digest``. """ @@ -2802,6 +2811,11 @@ class Digest(proto.Message): number=3, oneof="digest", ) + external_mu: bytes = proto.Field( + proto.BYTES, + number=4, + oneof="digest", + ) class LocationMetadata(proto.Message): diff --git a/packages/google-cloud-kms/tests/unit/gapic/kms_v1/test_hsm_management.py b/packages/google-cloud-kms/tests/unit/gapic/kms_v1/test_hsm_management.py index ef3575c4eb1c..a5c6590c3a2f 100644 --- a/packages/google-cloud-kms/tests/unit/gapic/kms_v1/test_hsm_management.py +++ b/packages/google-cloud-kms/tests/unit/gapic/kms_v1/test_hsm_management.py @@ -1911,6 +1911,7 @@ def test_get_single_tenant_hsm_instance(request_type, transport: str = "grpc"): call.return_value = hsm_management.SingleTenantHsmInstance( name="name_value", state=hsm_management.SingleTenantHsmInstance.State.CREATING, + key_portability_enabled=True, ) response = client.get_single_tenant_hsm_instance(request) @@ -1924,6 +1925,7 @@ def test_get_single_tenant_hsm_instance(request_type, transport: str = "grpc"): assert isinstance(response, hsm_management.SingleTenantHsmInstance) assert response.name == "name_value" assert response.state == hsm_management.SingleTenantHsmInstance.State.CREATING + assert response.key_portability_enabled is True def test_get_single_tenant_hsm_instance_non_empty_request_with_auto_populated_field(): @@ -2061,6 +2063,7 @@ async def test_get_single_tenant_hsm_instance_async( hsm_management.SingleTenantHsmInstance( name="name_value", state=hsm_management.SingleTenantHsmInstance.State.CREATING, + key_portability_enabled=True, ) ) response = await client.get_single_tenant_hsm_instance(request) @@ -2075,6 +2078,7 @@ async def test_get_single_tenant_hsm_instance_async( assert isinstance(response, hsm_management.SingleTenantHsmInstance) assert response.name == "name_value" assert response.state == hsm_management.SingleTenantHsmInstance.State.CREATING + assert response.key_portability_enabled is True @pytest.mark.asyncio @@ -7347,6 +7351,7 @@ async def test_get_single_tenant_hsm_instance_empty_call_grpc_asyncio(): hsm_management.SingleTenantHsmInstance( name="name_value", state=hsm_management.SingleTenantHsmInstance.State.CREATING, + key_portability_enabled=True, ) ) await client.get_single_tenant_hsm_instance(request=None) @@ -7754,6 +7759,7 @@ def test_get_single_tenant_hsm_instance_rest_call_success(request_type): return_value = hsm_management.SingleTenantHsmInstance( name="name_value", state=hsm_management.SingleTenantHsmInstance.State.CREATING, + key_portability_enabled=True, ) # Wrap the value into a proper Response obj @@ -7772,6 +7778,7 @@ def test_get_single_tenant_hsm_instance_rest_call_success(request_type): assert isinstance(response, hsm_management.SingleTenantHsmInstance) assert response.name == "name_value" assert response.state == hsm_management.SingleTenantHsmInstance.State.CREATING + assert response.key_portability_enabled is True @pytest.mark.parametrize("null_interceptor", [True, False]) @@ -7901,6 +7908,7 @@ def test_create_single_tenant_hsm_instance_rest_call_success(request_type): "delete_time": {}, "unrefreshed_duration_until_disable": {"seconds": 751, "nanos": 543}, "disable_time": {}, + "key_portability_enabled": True, } # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency diff --git a/packages/google-cloud-modelarmor/google/cloud/modelarmor_v1beta/__init__.py b/packages/google-cloud-modelarmor/google/cloud/modelarmor_v1beta/__init__.py index 7569e6d78783..a3309568682f 100644 --- a/packages/google-cloud-modelarmor/google/cloud/modelarmor_v1beta/__init__.py +++ b/packages/google-cloud-modelarmor/google/cloud/modelarmor_v1beta/__init__.py @@ -50,6 +50,7 @@ ListTemplatesResponse, MaliciousUriFilterResult, MaliciousUriFilterSettings, + McpServerFloorSetting, MessageItem, MultiLanguageDetectionMetadata, PiAndJailbreakFilterResult, @@ -71,6 +72,7 @@ SdpFinding, SdpFindingLikelihood, SdpInspectResult, + StreamingMode, Template, UpdateFloorSettingRequest, UpdateTemplateRequest, @@ -193,6 +195,7 @@ def _get_version(dependency_name): "ListTemplatesResponse", "MaliciousUriFilterResult", "MaliciousUriFilterSettings", + "McpServerFloorSetting", "MessageItem", "ModelArmorClient", "MultiLanguageDetectionMetadata", @@ -215,6 +218,7 @@ def _get_version(dependency_name): "SdpFinding", "SdpFindingLikelihood", "SdpInspectResult", + "StreamingMode", "Template", "UpdateFloorSettingRequest", "UpdateTemplateRequest", diff --git a/packages/google-cloud-modelarmor/google/cloud/modelarmor_v1beta/gapic_metadata.json b/packages/google-cloud-modelarmor/google/cloud/modelarmor_v1beta/gapic_metadata.json index 5bea4566b606..0d876aebbb2b 100644 --- a/packages/google-cloud-modelarmor/google/cloud/modelarmor_v1beta/gapic_metadata.json +++ b/packages/google-cloud-modelarmor/google/cloud/modelarmor_v1beta/gapic_metadata.json @@ -45,6 +45,16 @@ "sanitize_user_prompt" ] }, + "StreamSanitizeModelResponse": { + "methods": [ + "stream_sanitize_model_response" + ] + }, + "StreamSanitizeUserPrompt": { + "methods": [ + "stream_sanitize_user_prompt" + ] + }, "UpdateFloorSetting": { "methods": [ "update_floor_setting" @@ -95,6 +105,16 @@ "sanitize_user_prompt" ] }, + "StreamSanitizeModelResponse": { + "methods": [ + "stream_sanitize_model_response" + ] + }, + "StreamSanitizeUserPrompt": { + "methods": [ + "stream_sanitize_user_prompt" + ] + }, "UpdateFloorSetting": { "methods": [ "update_floor_setting" @@ -145,6 +165,16 @@ "sanitize_user_prompt" ] }, + "StreamSanitizeModelResponse": { + "methods": [ + "stream_sanitize_model_response" + ] + }, + "StreamSanitizeUserPrompt": { + "methods": [ + "stream_sanitize_user_prompt" + ] + }, "UpdateFloorSetting": { "methods": [ "update_floor_setting" diff --git a/packages/google-cloud-modelarmor/google/cloud/modelarmor_v1beta/services/model_armor/async_client.py b/packages/google-cloud-modelarmor/google/cloud/modelarmor_v1beta/services/model_armor/async_client.py index 576de808f241..e5f089e3d522 100644 --- a/packages/google-cloud-modelarmor/google/cloud/modelarmor_v1beta/services/model_armor/async_client.py +++ b/packages/google-cloud-modelarmor/google/cloud/modelarmor_v1beta/services/model_armor/async_client.py @@ -17,6 +17,9 @@ import re from collections import OrderedDict from typing import ( + AsyncIterable, + AsyncIterator, + Awaitable, Callable, Dict, Mapping, @@ -1280,6 +1283,180 @@ async def sample_sanitize_model_response(): # Done; return the response. return response + def stream_sanitize_user_prompt( + self, + requests: Optional[AsyncIterator[service.SanitizeUserPromptRequest]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> Awaitable[AsyncIterable[service.SanitizeUserPromptResponse]]: + r"""Streaming version of Sanitize User Prompt. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import modelarmor_v1beta + + async def sample_stream_sanitize_user_prompt(): + # Create a client + client = modelarmor_v1beta.ModelArmorAsyncClient() + + # Initialize request argument(s) + user_prompt_data = modelarmor_v1beta.DataItem() + user_prompt_data.text = "text_value" + + request = modelarmor_v1beta.SanitizeUserPromptRequest( + name="name_value", + user_prompt_data=user_prompt_data, + ) + + # This method expects an iterator which contains + # 'modelarmor_v1beta.SanitizeUserPromptRequest' objects + # Here we create a generator that yields a single `request` for + # demonstrative purposes. + requests = [request] + + def request_generator(): + for request in requests: + yield request + + # Make the request + stream = await client.stream_sanitize_user_prompt(requests=request_generator()) + + # Handle the response + async for response in stream: + print(response) + + Args: + requests (AsyncIterator[`google.cloud.modelarmor_v1beta.types.SanitizeUserPromptRequest`]): + The request object AsyncIterator. Sanitize User Prompt request. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + AsyncIterable[google.cloud.modelarmor_v1beta.types.SanitizeUserPromptResponse]: + Sanitized User Prompt Response. + """ + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.stream_sanitize_user_prompt + ] + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = rpc( + requests, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def stream_sanitize_model_response( + self, + requests: Optional[AsyncIterator[service.SanitizeModelResponseRequest]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> Awaitable[AsyncIterable[service.SanitizeModelResponseResponse]]: + r"""Streaming version of Sanitizes Model Response. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import modelarmor_v1beta + + async def sample_stream_sanitize_model_response(): + # Create a client + client = modelarmor_v1beta.ModelArmorAsyncClient() + + # Initialize request argument(s) + model_response_data = modelarmor_v1beta.DataItem() + model_response_data.text = "text_value" + + request = modelarmor_v1beta.SanitizeModelResponseRequest( + name="name_value", + model_response_data=model_response_data, + ) + + # This method expects an iterator which contains + # 'modelarmor_v1beta.SanitizeModelResponseRequest' objects + # Here we create a generator that yields a single `request` for + # demonstrative purposes. + requests = [request] + + def request_generator(): + for request in requests: + yield request + + # Make the request + stream = await client.stream_sanitize_model_response(requests=request_generator()) + + # Handle the response + async for response in stream: + print(response) + + Args: + requests (AsyncIterator[`google.cloud.modelarmor_v1beta.types.SanitizeModelResponseRequest`]): + The request object AsyncIterator. Sanitize Model Response request. + retry (google.api_core.retry_async.AsyncRetry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + AsyncIterable[google.cloud.modelarmor_v1beta.types.SanitizeModelResponseResponse]: + Sanitized Model Response Response. + """ + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._client._transport._wrapped_methods[ + self._client._transport.stream_sanitize_model_response + ] + + # Validate the universe domain. + self._client._validate_universe_domain() + + # Send the request. + response = rpc( + requests, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + async def get_location( self, request: Optional[Union[locations_pb2.GetLocationRequest, dict]] = None, diff --git a/packages/google-cloud-modelarmor/google/cloud/modelarmor_v1beta/services/model_armor/client.py b/packages/google-cloud-modelarmor/google/cloud/modelarmor_v1beta/services/model_armor/client.py index 425fde2d03fd..99d85586d930 100644 --- a/packages/google-cloud-modelarmor/google/cloud/modelarmor_v1beta/services/model_armor/client.py +++ b/packages/google-cloud-modelarmor/google/cloud/modelarmor_v1beta/services/model_armor/client.py @@ -23,6 +23,8 @@ from typing import ( Callable, Dict, + Iterable, + Iterator, Mapping, MutableMapping, MutableSequence, @@ -1710,6 +1712,180 @@ def sample_sanitize_model_response(): # Done; return the response. return response + def stream_sanitize_user_prompt( + self, + requests: Optional[Iterator[service.SanitizeUserPromptRequest]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> Iterable[service.SanitizeUserPromptResponse]: + r"""Streaming version of Sanitize User Prompt. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import modelarmor_v1beta + + def sample_stream_sanitize_user_prompt(): + # Create a client + client = modelarmor_v1beta.ModelArmorClient() + + # Initialize request argument(s) + user_prompt_data = modelarmor_v1beta.DataItem() + user_prompt_data.text = "text_value" + + request = modelarmor_v1beta.SanitizeUserPromptRequest( + name="name_value", + user_prompt_data=user_prompt_data, + ) + + # This method expects an iterator which contains + # 'modelarmor_v1beta.SanitizeUserPromptRequest' objects + # Here we create a generator that yields a single `request` for + # demonstrative purposes. + requests = [request] + + def request_generator(): + for request in requests: + yield request + + # Make the request + stream = client.stream_sanitize_user_prompt(requests=request_generator()) + + # Handle the response + for response in stream: + print(response) + + Args: + requests (Iterator[google.cloud.modelarmor_v1beta.types.SanitizeUserPromptRequest]): + The request object iterator. Sanitize User Prompt request. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + Iterable[google.cloud.modelarmor_v1beta.types.SanitizeUserPromptResponse]: + Sanitized User Prompt Response. + """ + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.stream_sanitize_user_prompt + ] + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + requests, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def stream_sanitize_model_response( + self, + requests: Optional[Iterator[service.SanitizeModelResponseRequest]] = None, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> Iterable[service.SanitizeModelResponseResponse]: + r"""Streaming version of Sanitizes Model Response. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud import modelarmor_v1beta + + def sample_stream_sanitize_model_response(): + # Create a client + client = modelarmor_v1beta.ModelArmorClient() + + # Initialize request argument(s) + model_response_data = modelarmor_v1beta.DataItem() + model_response_data.text = "text_value" + + request = modelarmor_v1beta.SanitizeModelResponseRequest( + name="name_value", + model_response_data=model_response_data, + ) + + # This method expects an iterator which contains + # 'modelarmor_v1beta.SanitizeModelResponseRequest' objects + # Here we create a generator that yields a single `request` for + # demonstrative purposes. + requests = [request] + + def request_generator(): + for request in requests: + yield request + + # Make the request + stream = client.stream_sanitize_model_response(requests=request_generator()) + + # Handle the response + for response in stream: + print(response) + + Args: + requests (Iterator[google.cloud.modelarmor_v1beta.types.SanitizeModelResponseRequest]): + The request object iterator. Sanitize Model Response request. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, Union[str, bytes]]]): Key/value pairs which should be + sent along with the request as metadata. Normally, each value must be of type `str`, + but for metadata keys ending with the suffix `-bin`, the corresponding values must + be of type `bytes`. + + Returns: + Iterable[google.cloud.modelarmor_v1beta.types.SanitizeModelResponseResponse]: + Sanitized Model Response Response. + """ + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[ + self._transport.stream_sanitize_model_response + ] + + # Validate the universe domain. + self._validate_universe_domain() + + # Send the request. + response = rpc( + requests, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + def __enter__(self) -> "ModelArmorClient": return self diff --git a/packages/google-cloud-modelarmor/google/cloud/modelarmor_v1beta/services/model_armor/transports/base.py b/packages/google-cloud-modelarmor/google/cloud/modelarmor_v1beta/services/model_armor/transports/base.py index cc54cc6d7a84..a6a18d038782 100644 --- a/packages/google-cloud-modelarmor/google/cloud/modelarmor_v1beta/services/model_armor/transports/base.py +++ b/packages/google-cloud-modelarmor/google/cloud/modelarmor_v1beta/services/model_armor/transports/base.py @@ -234,6 +234,16 @@ def _prep_wrapped_messages(self, client_info): default_timeout=60.0, client_info=client_info, ), + self.stream_sanitize_user_prompt: gapic_v1.method.wrap_method( + self.stream_sanitize_user_prompt, + default_timeout=None, + client_info=client_info, + ), + self.stream_sanitize_model_response: gapic_v1.method.wrap_method( + self.stream_sanitize_model_response, + default_timeout=None, + client_info=client_info, + ), self.get_location: gapic_v1.method.wrap_method( self.get_location, default_timeout=None, @@ -342,6 +352,30 @@ def sanitize_model_response( ]: raise NotImplementedError() + @property + def stream_sanitize_user_prompt( + self, + ) -> Callable[ + [service.SanitizeUserPromptRequest], + Union[ + service.SanitizeUserPromptResponse, + Awaitable[service.SanitizeUserPromptResponse], + ], + ]: + raise NotImplementedError() + + @property + def stream_sanitize_model_response( + self, + ) -> Callable[ + [service.SanitizeModelResponseRequest], + Union[ + service.SanitizeModelResponseResponse, + Awaitable[service.SanitizeModelResponseResponse], + ], + ]: + raise NotImplementedError() + @property def get_location( self, diff --git a/packages/google-cloud-modelarmor/google/cloud/modelarmor_v1beta/services/model_armor/transports/grpc.py b/packages/google-cloud-modelarmor/google/cloud/modelarmor_v1beta/services/model_armor/transports/grpc.py index 6e233fc46771..c064b6271a80 100644 --- a/packages/google-cloud-modelarmor/google/cloud/modelarmor_v1beta/services/model_armor/transports/grpc.py +++ b/packages/google-cloud-modelarmor/google/cloud/modelarmor_v1beta/services/model_armor/transports/grpc.py @@ -565,6 +565,66 @@ def sanitize_model_response( ) return self._stubs["sanitize_model_response"] + @property + def stream_sanitize_user_prompt( + self, + ) -> Callable[ + [service.SanitizeUserPromptRequest], service.SanitizeUserPromptResponse + ]: + r"""Return a callable for the stream sanitize user prompt method over gRPC. + + Streaming version of Sanitize User Prompt. + + Returns: + Callable[[~.SanitizeUserPromptRequest], + ~.SanitizeUserPromptResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "stream_sanitize_user_prompt" not in self._stubs: + self._stubs["stream_sanitize_user_prompt"] = ( + self._logged_channel.stream_stream( + "/google.cloud.modelarmor.v1beta.ModelArmor/StreamSanitizeUserPrompt", + request_serializer=service.SanitizeUserPromptRequest.serialize, + response_deserializer=service.SanitizeUserPromptResponse.deserialize, + ) + ) + return self._stubs["stream_sanitize_user_prompt"] + + @property + def stream_sanitize_model_response( + self, + ) -> Callable[ + [service.SanitizeModelResponseRequest], service.SanitizeModelResponseResponse + ]: + r"""Return a callable for the stream sanitize model response method over gRPC. + + Streaming version of Sanitizes Model Response. + + Returns: + Callable[[~.SanitizeModelResponseRequest], + ~.SanitizeModelResponseResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "stream_sanitize_model_response" not in self._stubs: + self._stubs["stream_sanitize_model_response"] = ( + self._logged_channel.stream_stream( + "/google.cloud.modelarmor.v1beta.ModelArmor/StreamSanitizeModelResponse", + request_serializer=service.SanitizeModelResponseRequest.serialize, + response_deserializer=service.SanitizeModelResponseResponse.deserialize, + ) + ) + return self._stubs["stream_sanitize_model_response"] + def close(self): self._logged_channel.close() diff --git a/packages/google-cloud-modelarmor/google/cloud/modelarmor_v1beta/services/model_armor/transports/grpc_asyncio.py b/packages/google-cloud-modelarmor/google/cloud/modelarmor_v1beta/services/model_armor/transports/grpc_asyncio.py index 458735299e46..72de4b81cf5c 100644 --- a/packages/google-cloud-modelarmor/google/cloud/modelarmor_v1beta/services/model_armor/transports/grpc_asyncio.py +++ b/packages/google-cloud-modelarmor/google/cloud/modelarmor_v1beta/services/model_armor/transports/grpc_asyncio.py @@ -579,6 +579,68 @@ def sanitize_model_response( ) return self._stubs["sanitize_model_response"] + @property + def stream_sanitize_user_prompt( + self, + ) -> Callable[ + [service.SanitizeUserPromptRequest], + Awaitable[service.SanitizeUserPromptResponse], + ]: + r"""Return a callable for the stream sanitize user prompt method over gRPC. + + Streaming version of Sanitize User Prompt. + + Returns: + Callable[[~.SanitizeUserPromptRequest], + Awaitable[~.SanitizeUserPromptResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "stream_sanitize_user_prompt" not in self._stubs: + self._stubs["stream_sanitize_user_prompt"] = ( + self._logged_channel.stream_stream( + "/google.cloud.modelarmor.v1beta.ModelArmor/StreamSanitizeUserPrompt", + request_serializer=service.SanitizeUserPromptRequest.serialize, + response_deserializer=service.SanitizeUserPromptResponse.deserialize, + ) + ) + return self._stubs["stream_sanitize_user_prompt"] + + @property + def stream_sanitize_model_response( + self, + ) -> Callable[ + [service.SanitizeModelResponseRequest], + Awaitable[service.SanitizeModelResponseResponse], + ]: + r"""Return a callable for the stream sanitize model response method over gRPC. + + Streaming version of Sanitizes Model Response. + + Returns: + Callable[[~.SanitizeModelResponseRequest], + Awaitable[~.SanitizeModelResponseResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if "stream_sanitize_model_response" not in self._stubs: + self._stubs["stream_sanitize_model_response"] = ( + self._logged_channel.stream_stream( + "/google.cloud.modelarmor.v1beta.ModelArmor/StreamSanitizeModelResponse", + request_serializer=service.SanitizeModelResponseRequest.serialize, + response_deserializer=service.SanitizeModelResponseResponse.deserialize, + ) + ) + return self._stubs["stream_sanitize_model_response"] + def _prep_wrapped_messages(self, client_info): """Precompute the wrapped methods, overriding the base class method to use async wrappers.""" self._wrapped_methods = { @@ -672,6 +734,16 @@ def _prep_wrapped_messages(self, client_info): default_timeout=60.0, client_info=client_info, ), + self.stream_sanitize_user_prompt: self._wrap_method( + self.stream_sanitize_user_prompt, + default_timeout=None, + client_info=client_info, + ), + self.stream_sanitize_model_response: self._wrap_method( + self.stream_sanitize_model_response, + default_timeout=None, + client_info=client_info, + ), self.get_location: self._wrap_method( self.get_location, default_timeout=None, diff --git a/packages/google-cloud-modelarmor/google/cloud/modelarmor_v1beta/services/model_armor/transports/rest.py b/packages/google-cloud-modelarmor/google/cloud/modelarmor_v1beta/services/model_armor/transports/rest.py index ccbaf1ffe414..98f88506356f 100644 --- a/packages/google-cloud-modelarmor/google/cloud/modelarmor_v1beta/services/model_armor/transports/rest.py +++ b/packages/google-cloud-modelarmor/google/cloud/modelarmor_v1beta/services/model_armor/transports/rest.py @@ -1691,6 +1691,43 @@ def __call__( ) return resp + class _StreamSanitizeModelResponse( + _BaseModelArmorRestTransport._BaseStreamSanitizeModelResponse, + ModelArmorRestStub, + ): + def __hash__(self): + return hash("ModelArmorRestTransport.StreamSanitizeModelResponse") + + def __call__( + self, + request: service.SanitizeModelResponseRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> rest_streaming.ResponseIterator: + raise NotImplementedError( + "Method StreamSanitizeModelResponse is not available over REST transport" + ) + + class _StreamSanitizeUserPrompt( + _BaseModelArmorRestTransport._BaseStreamSanitizeUserPrompt, ModelArmorRestStub + ): + def __hash__(self): + return hash("ModelArmorRestTransport.StreamSanitizeUserPrompt") + + def __call__( + self, + request: service.SanitizeUserPromptRequest, + *, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Optional[float] = None, + metadata: Sequence[Tuple[str, Union[str, bytes]]] = (), + ) -> rest_streaming.ResponseIterator: + raise NotImplementedError( + "Method StreamSanitizeUserPrompt is not available over REST transport" + ) + class _UpdateFloorSetting( _BaseModelArmorRestTransport._BaseUpdateFloorSetting, ModelArmorRestStub ): @@ -2057,6 +2094,30 @@ def sanitize_user_prompt( # In C++ this would require a dynamic_cast return self._SanitizeUserPrompt(self._session, self._host, self._interceptor) # type: ignore + @property + def stream_sanitize_model_response( + self, + ) -> Callable[ + [service.SanitizeModelResponseRequest], service.SanitizeModelResponseResponse + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._StreamSanitizeModelResponse( + self._session, self._host, self._interceptor + ) # type: ignore + + @property + def stream_sanitize_user_prompt( + self, + ) -> Callable[ + [service.SanitizeUserPromptRequest], service.SanitizeUserPromptResponse + ]: + # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. + # In C++ this would require a dynamic_cast + return self._StreamSanitizeUserPrompt( + self._session, self._host, self._interceptor + ) # type: ignore + @property def update_floor_setting( self, diff --git a/packages/google-cloud-modelarmor/google/cloud/modelarmor_v1beta/services/model_armor/transports/rest_base.py b/packages/google-cloud-modelarmor/google/cloud/modelarmor_v1beta/services/model_armor/transports/rest_base.py index 57689b39a61e..3349b0e05d61 100644 --- a/packages/google-cloud-modelarmor/google/cloud/modelarmor_v1beta/services/model_armor/transports/rest_base.py +++ b/packages/google-cloud-modelarmor/google/cloud/modelarmor_v1beta/services/model_armor/transports/rest_base.py @@ -458,6 +458,14 @@ def _get_query_params_json(transcoded_request): query_params["$alt"] = "json;enum-encoding=int" return query_params + class _BaseStreamSanitizeModelResponse: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + + class _BaseStreamSanitizeUserPrompt: + def __hash__(self): # pragma: NO COVER + return NotImplementedError("__hash__ must be implemented.") + class _BaseUpdateFloorSetting: def __hash__(self): # pragma: NO COVER return NotImplementedError("__hash__ must be implemented.") diff --git a/packages/google-cloud-modelarmor/google/cloud/modelarmor_v1beta/types/__init__.py b/packages/google-cloud-modelarmor/google/cloud/modelarmor_v1beta/types/__init__.py index 8442cbffbb53..817144cdf16c 100644 --- a/packages/google-cloud-modelarmor/google/cloud/modelarmor_v1beta/types/__init__.py +++ b/packages/google-cloud-modelarmor/google/cloud/modelarmor_v1beta/types/__init__.py @@ -33,6 +33,7 @@ ListTemplatesResponse, MaliciousUriFilterResult, MaliciousUriFilterSettings, + McpServerFloorSetting, MessageItem, MultiLanguageDetectionMetadata, PiAndJailbreakFilterResult, @@ -54,6 +55,7 @@ SdpFinding, SdpFindingLikelihood, SdpInspectResult, + StreamingMode, Template, UpdateFloorSettingRequest, UpdateTemplateRequest, @@ -77,6 +79,7 @@ "ListTemplatesResponse", "MaliciousUriFilterResult", "MaliciousUriFilterSettings", + "McpServerFloorSetting", "MessageItem", "MultiLanguageDetectionMetadata", "PiAndJailbreakFilterResult", @@ -107,4 +110,5 @@ "InvocationResult", "RaiFilterType", "SdpFindingLikelihood", + "StreamingMode", ) diff --git a/packages/google-cloud-modelarmor/google/cloud/modelarmor_v1beta/types/service.py b/packages/google-cloud-modelarmor/google/cloud/modelarmor_v1beta/types/service.py index 7a62d26439c3..8679b3fc0650 100644 --- a/packages/google-cloud-modelarmor/google/cloud/modelarmor_v1beta/types/service.py +++ b/packages/google-cloud-modelarmor/google/cloud/modelarmor_v1beta/types/service.py @@ -30,8 +30,10 @@ "DetectionConfidenceLevel", "SdpFindingLikelihood", "InvocationResult", + "StreamingMode", "Template", "FloorSetting", + "McpServerFloorSetting", "AiPlatformFloorSetting", "ListTemplatesRequest", "ListTemplatesResponse", @@ -205,6 +207,23 @@ class InvocationResult(proto.Enum): FAILURE = 3 +class StreamingMode(proto.Enum): + r"""Streaming Mode for Sanitize\* API. + + Values: + STREAMING_MODE_UNSPECIFIED (0): + Default value. + STREAMING_MODE_BUFFERED (1): + Buffered Streaming mode. + STREAMING_MODE_REALTIME (2): + Real Time Streaming mode. + """ + + STREAMING_MODE_UNSPECIFIED = 0 + STREAMING_MODE_BUFFERED = 1 + STREAMING_MODE_REALTIME = 2 + + class Template(proto.Message): r"""Message describing Template resource @@ -393,6 +412,10 @@ class FloorSetting(proto.Message): This field is a member of `oneof`_ ``_ai_platform_floor_setting``. floor_setting_metadata (google.cloud.modelarmor_v1beta.types.FloorSetting.FloorSettingMetadata): Optional. Metadata for FloorSetting + google_mcp_server_floor_setting (google.cloud.modelarmor_v1beta.types.McpServerFloorSetting): + Optional. Google MCP Server floor setting. + + This field is a member of `oneof`_ ``_google_mcp_server_floor_setting``. """ class IntegratedService(proto.Enum): @@ -403,10 +426,14 @@ class IntegratedService(proto.Enum): Unspecified integrated service. AI_PLATFORM (1): AI Platform. + GOOGLE_MCP_SERVER (2): + Google MCP Server (via Shim Service + Extension) """ INTEGRATED_SERVICE_UNSPECIFIED = 0 AI_PLATFORM = 1 + GOOGLE_MCP_SERVER = 2 class FloorSettingMetadata(proto.Message): r"""message describing FloorSetting Metadata @@ -478,6 +505,67 @@ class MultiLanguageDetection(proto.Message): number=8, message=FloorSettingMetadata, ) + google_mcp_server_floor_setting: "McpServerFloorSetting" = proto.Field( + proto.MESSAGE, + number=9, + optional=True, + message="McpServerFloorSetting", + ) + + +class McpServerFloorSetting(proto.Message): + r"""Message describing MCP Server Floor Setting. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + inspect_only (bool): + Optional. If true, Model Armor filters will + be run in inspect only mode. No action will be + taken on the request. + + This field is a member of `oneof`_ ``enforcement_type``. + inspect_and_block (bool): + Optional. If true, Model Armor filters will + be run in inspect and block mode. Requests that + trip Model Armor filters will be blocked. + + This field is a member of `oneof`_ ``enforcement_type``. + enable_cloud_logging (bool): + Optional. If true, log Model Armor filter + results to Cloud Logging. + apis (MutableSequence[str]): + Optional. List of MCP servers for which the + MCP floor setting is applicable. Examples: + "bigquery.googleapis.com/mcp", + "run.googleapis.com/mcp" Empty list denotes that + the floor setting is applicable to all MCP + servers. + """ + + inspect_only: bool = proto.Field( + proto.BOOL, + number=1, + oneof="enforcement_type", + ) + inspect_and_block: bool = proto.Field( + proto.BOOL, + number=2, + oneof="enforcement_type", + ) + enable_cloud_logging: bool = proto.Field( + proto.BOOL, + number=3, + ) + apis: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=4, + ) class AiPlatformFloorSetting(proto.Message): @@ -861,7 +949,7 @@ class PiAndJailbreakFilterEnforcement(proto.Enum): ENABLED (1): Enabled DISABLED (2): - Enabled + Disabled """ PI_AND_JAILBREAK_FILTER_ENFORCEMENT_UNSPECIFIED = 0 @@ -1076,6 +1164,8 @@ class SdpAdvancedConfig(proto.Message): class SanitizeUserPromptRequest(proto.Message): r"""Sanitize User Prompt request. + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + Attributes: name (str): Required. Represents resource name of @@ -1086,6 +1176,10 @@ class SanitizeUserPromptRequest(proto.Message): multi_language_detection_metadata (google.cloud.modelarmor_v1beta.types.MultiLanguageDetectionMetadata): Optional. Metadata related to Multi Language Detection. + streaming_mode (google.cloud.modelarmor_v1beta.types.StreamingMode): + Optional. Streaming Mode for StreamSanitize\* API. + + This field is a member of `oneof`_ ``_streaming_mode``. """ name: str = proto.Field( @@ -1102,11 +1196,19 @@ class SanitizeUserPromptRequest(proto.Message): number=6, message="MultiLanguageDetectionMetadata", ) + streaming_mode: "StreamingMode" = proto.Field( + proto.ENUM, + number=7, + optional=True, + enum="StreamingMode", + ) class SanitizeModelResponseRequest(proto.Message): r"""Sanitize Model Response request. + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + Attributes: name (str): Required. Represents resource name of @@ -1120,6 +1222,10 @@ class SanitizeModelResponseRequest(proto.Message): multi_language_detection_metadata (google.cloud.modelarmor_v1beta.types.MultiLanguageDetectionMetadata): Optional. Metadata related for multi language detection. + streaming_mode (google.cloud.modelarmor_v1beta.types.StreamingMode): + Optional. Streaming Mode for StreamSanitize\* API. + + This field is a member of `oneof`_ ``_streaming_mode``. """ name: str = proto.Field( @@ -1140,6 +1246,12 @@ class SanitizeModelResponseRequest(proto.Message): number=7, message="MultiLanguageDetectionMetadata", ) + streaming_mode: "StreamingMode" = proto.Field( + proto.ENUM, + number=8, + optional=True, + enum="StreamingMode", + ) class SanitizeUserPromptResponse(proto.Message): @@ -1260,11 +1372,20 @@ class MultiLanguageDetectionMetadata(proto.Message): Attributes: source_language (str): - Optional. Optional Source language of the - user prompt. - If multi-language detection is enabled but - language is not set in that case we would - automatically detect the source language. + Optional. Optional Source language of the user prompt. + + If multi-language detection is enabled and this field is not + set, the source language will be automatically detected. + When a source language is provided, Model Armor uses it to + sanitize the input. In that case the system does not perform + auto-detection and relies solely on the specified language. + + This string field accepts a language code from the ISO-639 + standard. For a list of languages supported by Model Armor, + see [Model Armor supported languages] + (https://cloud.google.com/security-command-center/docs/model-armor-overview#languages-supported). + For a comprehensive list of language codes, see + `ISO-639 `__. enable_multi_language_detection (bool): Optional. Enable detection of multi-language prompts and responses. @@ -1377,7 +1498,7 @@ class RaiFilterResult(proto.Message): rai_filter_type_results (MutableMapping[str, google.cloud.modelarmor_v1beta.types.RaiFilterResult.RaiFilterTypeResult]): The map of RAI filter results where key is RAI filter type - either of "sexually_explicit", "hate_speech", "harassment", - "dangerous". + "dangerous", "violence", "sexually_suggestive". """ class RaiFilterTypeResult(proto.Message): @@ -1568,6 +1689,9 @@ class ByteDataItem(proto.Message): Required. The type of byte data byte_data (bytes): Required. Bytes Data + file_label (str): + Optional. Label of the file. This is used to + identify the file in the response. """ class ByteItemType(proto.Enum): @@ -1590,6 +1714,8 @@ class ByteItemType(proto.Enum): TXT CSV (7): CSV + ZIP (9): + ZIP """ BYTE_ITEM_TYPE_UNSPECIFIED = 0 @@ -1600,6 +1726,7 @@ class ByteItemType(proto.Enum): POWERPOINT_DOCUMENT = 5 TXT = 6 CSV = 7 + ZIP = 9 byte_data_type: ByteItemType = proto.Field( proto.ENUM, @@ -1610,6 +1737,10 @@ class ByteItemType(proto.Enum): proto.BYTES, number=2, ) + file_label: str = proto.Field( + proto.STRING, + number=3, + ) class SdpDeidentifyResult(proto.Message): @@ -1694,11 +1825,13 @@ class SdpFindingLocation(proto.Message): containing element. Note that when the content is not textual, this references the UTF-8 encoded textual representation of the content. + Note: Omitted if content is an image. codepoint_range (google.cloud.modelarmor_v1beta.types.RangeInfo): Unicode character offsets delimiting the finding. These are relative to the finding's containing element. Provided when the content is text. + Note: Omitted if content is an image. """ byte_range: "RangeInfo" = proto.Field( diff --git a/packages/google-cloud-modelarmor/samples/generated_samples/modelarmor_v1beta_generated_model_armor_stream_sanitize_model_response_async.py b/packages/google-cloud-modelarmor/samples/generated_samples/modelarmor_v1beta_generated_model_armor_stream_sanitize_model_response_async.py new file mode 100644 index 000000000000..ee815a2994c5 --- /dev/null +++ b/packages/google-cloud-modelarmor/samples/generated_samples/modelarmor_v1beta_generated_model_armor_stream_sanitize_model_response_async.py @@ -0,0 +1,68 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for StreamSanitizeModelResponse +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-modelarmor + + +# [START modelarmor_v1beta_generated_ModelArmor_StreamSanitizeModelResponse_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import modelarmor_v1beta + + +async def sample_stream_sanitize_model_response(): + # Create a client + client = modelarmor_v1beta.ModelArmorAsyncClient() + + # Initialize request argument(s) + model_response_data = modelarmor_v1beta.DataItem() + model_response_data.text = "text_value" + + request = modelarmor_v1beta.SanitizeModelResponseRequest( + name="name_value", + model_response_data=model_response_data, + ) + + # This method expects an iterator which contains + # 'modelarmor_v1beta.SanitizeModelResponseRequest' objects + # Here we create a generator that yields a single `request` for + # demonstrative purposes. + requests = [request] + + def request_generator(): + for request in requests: + yield request + + # Make the request + stream = await client.stream_sanitize_model_response(requests=request_generator()) + + # Handle the response + async for response in stream: + print(response) + + +# [END modelarmor_v1beta_generated_ModelArmor_StreamSanitizeModelResponse_async] diff --git a/packages/google-cloud-modelarmor/samples/generated_samples/modelarmor_v1beta_generated_model_armor_stream_sanitize_model_response_sync.py b/packages/google-cloud-modelarmor/samples/generated_samples/modelarmor_v1beta_generated_model_armor_stream_sanitize_model_response_sync.py new file mode 100644 index 000000000000..2f3b368a2ee5 --- /dev/null +++ b/packages/google-cloud-modelarmor/samples/generated_samples/modelarmor_v1beta_generated_model_armor_stream_sanitize_model_response_sync.py @@ -0,0 +1,68 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for StreamSanitizeModelResponse +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-modelarmor + + +# [START modelarmor_v1beta_generated_ModelArmor_StreamSanitizeModelResponse_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import modelarmor_v1beta + + +def sample_stream_sanitize_model_response(): + # Create a client + client = modelarmor_v1beta.ModelArmorClient() + + # Initialize request argument(s) + model_response_data = modelarmor_v1beta.DataItem() + model_response_data.text = "text_value" + + request = modelarmor_v1beta.SanitizeModelResponseRequest( + name="name_value", + model_response_data=model_response_data, + ) + + # This method expects an iterator which contains + # 'modelarmor_v1beta.SanitizeModelResponseRequest' objects + # Here we create a generator that yields a single `request` for + # demonstrative purposes. + requests = [request] + + def request_generator(): + for request in requests: + yield request + + # Make the request + stream = client.stream_sanitize_model_response(requests=request_generator()) + + # Handle the response + for response in stream: + print(response) + + +# [END modelarmor_v1beta_generated_ModelArmor_StreamSanitizeModelResponse_sync] diff --git a/packages/google-cloud-modelarmor/samples/generated_samples/modelarmor_v1beta_generated_model_armor_stream_sanitize_user_prompt_async.py b/packages/google-cloud-modelarmor/samples/generated_samples/modelarmor_v1beta_generated_model_armor_stream_sanitize_user_prompt_async.py new file mode 100644 index 000000000000..fdb950bba49b --- /dev/null +++ b/packages/google-cloud-modelarmor/samples/generated_samples/modelarmor_v1beta_generated_model_armor_stream_sanitize_user_prompt_async.py @@ -0,0 +1,68 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for StreamSanitizeUserPrompt +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-modelarmor + + +# [START modelarmor_v1beta_generated_ModelArmor_StreamSanitizeUserPrompt_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import modelarmor_v1beta + + +async def sample_stream_sanitize_user_prompt(): + # Create a client + client = modelarmor_v1beta.ModelArmorAsyncClient() + + # Initialize request argument(s) + user_prompt_data = modelarmor_v1beta.DataItem() + user_prompt_data.text = "text_value" + + request = modelarmor_v1beta.SanitizeUserPromptRequest( + name="name_value", + user_prompt_data=user_prompt_data, + ) + + # This method expects an iterator which contains + # 'modelarmor_v1beta.SanitizeUserPromptRequest' objects + # Here we create a generator that yields a single `request` for + # demonstrative purposes. + requests = [request] + + def request_generator(): + for request in requests: + yield request + + # Make the request + stream = await client.stream_sanitize_user_prompt(requests=request_generator()) + + # Handle the response + async for response in stream: + print(response) + + +# [END modelarmor_v1beta_generated_ModelArmor_StreamSanitizeUserPrompt_async] diff --git a/packages/google-cloud-modelarmor/samples/generated_samples/modelarmor_v1beta_generated_model_armor_stream_sanitize_user_prompt_sync.py b/packages/google-cloud-modelarmor/samples/generated_samples/modelarmor_v1beta_generated_model_armor_stream_sanitize_user_prompt_sync.py new file mode 100644 index 000000000000..7f812e8d3b86 --- /dev/null +++ b/packages/google-cloud-modelarmor/samples/generated_samples/modelarmor_v1beta_generated_model_armor_stream_sanitize_user_prompt_sync.py @@ -0,0 +1,68 @@ +# -*- coding: utf-8 -*- +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for StreamSanitizeUserPrompt +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-modelarmor + + +# [START modelarmor_v1beta_generated_ModelArmor_StreamSanitizeUserPrompt_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud import modelarmor_v1beta + + +def sample_stream_sanitize_user_prompt(): + # Create a client + client = modelarmor_v1beta.ModelArmorClient() + + # Initialize request argument(s) + user_prompt_data = modelarmor_v1beta.DataItem() + user_prompt_data.text = "text_value" + + request = modelarmor_v1beta.SanitizeUserPromptRequest( + name="name_value", + user_prompt_data=user_prompt_data, + ) + + # This method expects an iterator which contains + # 'modelarmor_v1beta.SanitizeUserPromptRequest' objects + # Here we create a generator that yields a single `request` for + # demonstrative purposes. + requests = [request] + + def request_generator(): + for request in requests: + yield request + + # Make the request + stream = client.stream_sanitize_user_prompt(requests=request_generator()) + + # Handle the response + for response in stream: + print(response) + + +# [END modelarmor_v1beta_generated_ModelArmor_StreamSanitizeUserPrompt_sync] diff --git a/packages/google-cloud-modelarmor/samples/generated_samples/snippet_metadata_google.cloud.modelarmor.v1beta.json b/packages/google-cloud-modelarmor/samples/generated_samples/snippet_metadata_google.cloud.modelarmor.v1beta.json index 95afcc22fd54..510586f26564 100644 --- a/packages/google-cloud-modelarmor/samples/generated_samples/snippet_metadata_google.cloud.modelarmor.v1beta.json +++ b/packages/google-cloud-modelarmor/samples/generated_samples/snippet_metadata_google.cloud.modelarmor.v1beta.json @@ -1132,6 +1132,312 @@ ], "title": "modelarmor_v1beta_generated_model_armor_sanitize_user_prompt_sync.py" }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.modelarmor_v1beta.ModelArmorAsyncClient", + "shortName": "ModelArmorAsyncClient" + }, + "fullName": "google.cloud.modelarmor_v1beta.ModelArmorAsyncClient.stream_sanitize_model_response", + "method": { + "fullName": "google.cloud.modelarmor.v1beta.ModelArmor.StreamSanitizeModelResponse", + "service": { + "fullName": "google.cloud.modelarmor.v1beta.ModelArmor", + "shortName": "ModelArmor" + }, + "shortName": "StreamSanitizeModelResponse" + }, + "parameters": [ + { + "name": "requests", + "type": "Iterator[google.cloud.modelarmor_v1beta.types.SanitizeModelResponseRequest]" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "Iterable[google.cloud.modelarmor_v1beta.types.SanitizeModelResponseResponse]", + "shortName": "stream_sanitize_model_response" + }, + "description": "Sample for StreamSanitizeModelResponse", + "file": "modelarmor_v1beta_generated_model_armor_stream_sanitize_model_response_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "modelarmor_v1beta_generated_ModelArmor_StreamSanitizeModelResponse_async", + "segments": [ + { + "end": 66, + "start": 27, + "type": "FULL" + }, + { + "end": 66, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 59, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 62, + "start": 60, + "type": "REQUEST_EXECUTION" + }, + { + "end": 67, + "start": 63, + "type": "RESPONSE_HANDLING" + } + ], + "title": "modelarmor_v1beta_generated_model_armor_stream_sanitize_model_response_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.modelarmor_v1beta.ModelArmorClient", + "shortName": "ModelArmorClient" + }, + "fullName": "google.cloud.modelarmor_v1beta.ModelArmorClient.stream_sanitize_model_response", + "method": { + "fullName": "google.cloud.modelarmor.v1beta.ModelArmor.StreamSanitizeModelResponse", + "service": { + "fullName": "google.cloud.modelarmor.v1beta.ModelArmor", + "shortName": "ModelArmor" + }, + "shortName": "StreamSanitizeModelResponse" + }, + "parameters": [ + { + "name": "requests", + "type": "Iterator[google.cloud.modelarmor_v1beta.types.SanitizeModelResponseRequest]" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "Iterable[google.cloud.modelarmor_v1beta.types.SanitizeModelResponseResponse]", + "shortName": "stream_sanitize_model_response" + }, + "description": "Sample for StreamSanitizeModelResponse", + "file": "modelarmor_v1beta_generated_model_armor_stream_sanitize_model_response_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "modelarmor_v1beta_generated_ModelArmor_StreamSanitizeModelResponse_sync", + "segments": [ + { + "end": 66, + "start": 27, + "type": "FULL" + }, + { + "end": 66, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 59, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 62, + "start": 60, + "type": "REQUEST_EXECUTION" + }, + { + "end": 67, + "start": 63, + "type": "RESPONSE_HANDLING" + } + ], + "title": "modelarmor_v1beta_generated_model_armor_stream_sanitize_model_response_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.modelarmor_v1beta.ModelArmorAsyncClient", + "shortName": "ModelArmorAsyncClient" + }, + "fullName": "google.cloud.modelarmor_v1beta.ModelArmorAsyncClient.stream_sanitize_user_prompt", + "method": { + "fullName": "google.cloud.modelarmor.v1beta.ModelArmor.StreamSanitizeUserPrompt", + "service": { + "fullName": "google.cloud.modelarmor.v1beta.ModelArmor", + "shortName": "ModelArmor" + }, + "shortName": "StreamSanitizeUserPrompt" + }, + "parameters": [ + { + "name": "requests", + "type": "Iterator[google.cloud.modelarmor_v1beta.types.SanitizeUserPromptRequest]" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "Iterable[google.cloud.modelarmor_v1beta.types.SanitizeUserPromptResponse]", + "shortName": "stream_sanitize_user_prompt" + }, + "description": "Sample for StreamSanitizeUserPrompt", + "file": "modelarmor_v1beta_generated_model_armor_stream_sanitize_user_prompt_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "modelarmor_v1beta_generated_ModelArmor_StreamSanitizeUserPrompt_async", + "segments": [ + { + "end": 66, + "start": 27, + "type": "FULL" + }, + { + "end": 66, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 59, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 62, + "start": 60, + "type": "REQUEST_EXECUTION" + }, + { + "end": 67, + "start": 63, + "type": "RESPONSE_HANDLING" + } + ], + "title": "modelarmor_v1beta_generated_model_armor_stream_sanitize_user_prompt_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.modelarmor_v1beta.ModelArmorClient", + "shortName": "ModelArmorClient" + }, + "fullName": "google.cloud.modelarmor_v1beta.ModelArmorClient.stream_sanitize_user_prompt", + "method": { + "fullName": "google.cloud.modelarmor.v1beta.ModelArmor.StreamSanitizeUserPrompt", + "service": { + "fullName": "google.cloud.modelarmor.v1beta.ModelArmor", + "shortName": "ModelArmor" + }, + "shortName": "StreamSanitizeUserPrompt" + }, + "parameters": [ + { + "name": "requests", + "type": "Iterator[google.cloud.modelarmor_v1beta.types.SanitizeUserPromptRequest]" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, Union[str, bytes]]]" + } + ], + "resultType": "Iterable[google.cloud.modelarmor_v1beta.types.SanitizeUserPromptResponse]", + "shortName": "stream_sanitize_user_prompt" + }, + "description": "Sample for StreamSanitizeUserPrompt", + "file": "modelarmor_v1beta_generated_model_armor_stream_sanitize_user_prompt_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "modelarmor_v1beta_generated_ModelArmor_StreamSanitizeUserPrompt_sync", + "segments": [ + { + "end": 66, + "start": 27, + "type": "FULL" + }, + { + "end": 66, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 59, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 62, + "start": 60, + "type": "REQUEST_EXECUTION" + }, + { + "end": 67, + "start": 63, + "type": "RESPONSE_HANDLING" + } + ], + "title": "modelarmor_v1beta_generated_model_armor_stream_sanitize_user_prompt_sync.py" + }, { "canonical": true, "clientMethod": { diff --git a/packages/google-cloud-modelarmor/tests/unit/gapic/modelarmor_v1beta/test_model_armor.py b/packages/google-cloud-modelarmor/tests/unit/gapic/modelarmor_v1beta/test_model_armor.py index 6a789f86f832..be5c443f7f6a 100644 --- a/packages/google-cloud-modelarmor/tests/unit/gapic/modelarmor_v1beta/test_model_armor.py +++ b/packages/google-cloud-modelarmor/tests/unit/gapic/modelarmor_v1beta/test_model_armor.py @@ -4312,6 +4312,322 @@ async def test_sanitize_model_response_field_headers_async(): ) in kw["metadata"] +@pytest.mark.parametrize( + "request_type", + [ + service.SanitizeUserPromptRequest, + dict, + ], +) +def test_stream_sanitize_user_prompt(request_type, transport: str = "grpc"): + client = ModelArmorClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + requests = [request] + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.stream_sanitize_user_prompt), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = iter([service.SanitizeUserPromptResponse()]) + response = client.stream_sanitize_user_prompt(iter(requests)) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert next(args[0]) == request + + # Establish that the response is the type that we expect. + for message in response: + assert isinstance(message, service.SanitizeUserPromptResponse) + + +def test_stream_sanitize_user_prompt_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ModelArmorClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.stream_sanitize_user_prompt + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.stream_sanitize_user_prompt + ] = mock_rpc + request = [{}] + client.stream_sanitize_user_prompt(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.stream_sanitize_user_prompt(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_stream_sanitize_user_prompt_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = ModelArmorAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.stream_sanitize_user_prompt + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.stream_sanitize_user_prompt + ] = mock_rpc + + request = [{}] + await client.stream_sanitize_user_prompt(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.stream_sanitize_user_prompt(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_stream_sanitize_user_prompt_async( + transport: str = "grpc_asyncio", request_type=service.SanitizeUserPromptRequest +): + client = ModelArmorAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + requests = [request] + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.stream_sanitize_user_prompt), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = mock.Mock(aio.StreamStreamCall, autospec=True) + call.return_value.read = mock.AsyncMock( + side_effect=[service.SanitizeUserPromptResponse()] + ) + response = await client.stream_sanitize_user_prompt(iter(requests)) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert next(args[0]) == request + + # Establish that the response is the type that we expect. + message = await response.read() + assert isinstance(message, service.SanitizeUserPromptResponse) + + +@pytest.mark.asyncio +async def test_stream_sanitize_user_prompt_async_from_dict(): + await test_stream_sanitize_user_prompt_async(request_type=dict) + + +@pytest.mark.parametrize( + "request_type", + [ + service.SanitizeModelResponseRequest, + dict, + ], +) +def test_stream_sanitize_model_response(request_type, transport: str = "grpc"): + client = ModelArmorClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + requests = [request] + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.stream_sanitize_model_response), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = iter([service.SanitizeModelResponseResponse()]) + response = client.stream_sanitize_model_response(iter(requests)) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert next(args[0]) == request + + # Establish that the response is the type that we expect. + for message in response: + assert isinstance(message, service.SanitizeModelResponseResponse) + + +def test_stream_sanitize_model_response_use_cached_wrapped_rpc(): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method.wrap_method") as wrapper_fn: + client = ModelArmorClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._transport.stream_sanitize_model_response + in client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.Mock() + mock_rpc.return_value.name = ( + "foo" # operation_request.operation in compute client(s) expect a string. + ) + client._transport._wrapped_methods[ + client._transport.stream_sanitize_model_response + ] = mock_rpc + request = [{}] + client.stream_sanitize_model_response(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + client.stream_sanitize_model_response(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_stream_sanitize_model_response_async_use_cached_wrapped_rpc( + transport: str = "grpc_asyncio", +): + # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, + # instead of constructing them on each call + with mock.patch("google.api_core.gapic_v1.method_async.wrap_method") as wrapper_fn: + client = ModelArmorAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Should wrap all calls on client creation + assert wrapper_fn.call_count > 0 + wrapper_fn.reset_mock() + + # Ensure method has been cached + assert ( + client._client._transport.stream_sanitize_model_response + in client._client._transport._wrapped_methods + ) + + # Replace cached wrapped function with mock + mock_rpc = mock.AsyncMock() + mock_rpc.return_value = mock.Mock() + client._client._transport._wrapped_methods[ + client._client._transport.stream_sanitize_model_response + ] = mock_rpc + + request = [{}] + await client.stream_sanitize_model_response(request) + + # Establish that the underlying gRPC stub method was called. + assert mock_rpc.call_count == 1 + + await client.stream_sanitize_model_response(request) + + # Establish that a new wrapper was not created for this call + assert wrapper_fn.call_count == 0 + assert mock_rpc.call_count == 2 + + +@pytest.mark.asyncio +async def test_stream_sanitize_model_response_async( + transport: str = "grpc_asyncio", request_type=service.SanitizeModelResponseRequest +): + client = ModelArmorAsyncClient( + credentials=async_anonymous_credentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + requests = [request] + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.stream_sanitize_model_response), "__call__" + ) as call: + # Designate an appropriate return value for the call. + call.return_value = mock.Mock(aio.StreamStreamCall, autospec=True) + call.return_value.read = mock.AsyncMock( + side_effect=[service.SanitizeModelResponseResponse()] + ) + response = await client.stream_sanitize_model_response(iter(requests)) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert next(args[0]) == request + + # Establish that the response is the type that we expect. + message = await response.read() + assert isinstance(message, service.SanitizeModelResponseResponse) + + +@pytest.mark.asyncio +async def test_stream_sanitize_model_response_async_from_dict(): + await test_stream_sanitize_model_response_async(request_type=dict) + + def test_list_templates_rest_use_cached_wrapped_rpc(): # Clients should use _prep_wrapped_messages to create cached wrapped rpcs, # instead of constructing them on each call @@ -5969,6 +6285,56 @@ def test_sanitize_model_response_rest_unset_required_fields(): ) +def test_stream_sanitize_user_prompt_rest_no_http_options(): + client = ModelArmorClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = service.SanitizeUserPromptRequest() + requests = [request] + with pytest.raises(RuntimeError): + client.stream_sanitize_user_prompt(requests) + + +def test_stream_sanitize_model_response_rest_no_http_options(): + client = ModelArmorClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="rest", + ) + request = service.SanitizeModelResponseRequest() + requests = [request] + with pytest.raises(RuntimeError): + client.stream_sanitize_model_response(requests) + + +def test_stream_sanitize_user_prompt_rest_error(): + client = ModelArmorClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # Since a `google.api.http` annotation is required for using a rest transport + # method, this should error. + with pytest.raises(NotImplementedError) as not_implemented_error: + client.stream_sanitize_user_prompt({}) + assert ( + "Method StreamSanitizeUserPrompt is not available over REST transport" + in str(not_implemented_error.value) + ) + + +def test_stream_sanitize_model_response_rest_error(): + client = ModelArmorClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + # Since a `google.api.http` annotation is required for using a rest transport + # method, this should error. + with pytest.raises(NotImplementedError) as not_implemented_error: + client.stream_sanitize_model_response({}) + assert ( + "Method StreamSanitizeModelResponse is not available over REST transport" + in str(not_implemented_error.value) + ) + + def test_credentials_transport_error(): # It is an error to provide credentials and a transport instance. transport = transports.ModelArmorGrpcTransport( @@ -7574,6 +7940,12 @@ def test_update_floor_setting_rest_call_success(request_type): "floor_setting_metadata": { "multi_language_detection": {"enable_multi_language_detection": True} }, + "google_mcp_server_floor_setting": { + "inspect_only": True, + "inspect_and_block": True, + "enable_cloud_logging": True, + "apis": ["apis_value1", "apis_value2"], + }, } # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency @@ -8002,6 +8374,32 @@ def test_sanitize_model_response_rest_interceptors(null_interceptor): post_with_metadata.assert_called_once() +def test_stream_sanitize_user_prompt_rest_error(): + client = ModelArmorClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + with pytest.raises(NotImplementedError) as not_implemented_error: + client.stream_sanitize_user_prompt({}) + assert ( + "Method StreamSanitizeUserPrompt is not available over REST transport" + in str(not_implemented_error.value) + ) + + +def test_stream_sanitize_model_response_rest_error(): + client = ModelArmorClient( + credentials=ga_credentials.AnonymousCredentials(), transport="rest" + ) + + with pytest.raises(NotImplementedError) as not_implemented_error: + client.stream_sanitize_model_response({}) + assert ( + "Method StreamSanitizeModelResponse is not available over REST transport" + in str(not_implemented_error.value) + ) + + def test_get_location_rest_bad_request(request_type=locations_pb2.GetLocationRequest): client = ModelArmorClient( credentials=ga_credentials.AnonymousCredentials(), @@ -8361,6 +8759,8 @@ def test_model_armor_base_transport(): "update_floor_setting", "sanitize_user_prompt", "sanitize_model_response", + "stream_sanitize_user_prompt", + "stream_sanitize_model_response", "get_location", "list_locations", ) @@ -8652,6 +9052,12 @@ def test_model_armor_client_transport_session_collision(transport_name): session1 = client1.transport.sanitize_model_response._session session2 = client2.transport.sanitize_model_response._session assert session1 != session2 + session1 = client1.transport.stream_sanitize_user_prompt._session + session2 = client2.transport.stream_sanitize_user_prompt._session + assert session1 != session2 + session1 = client1.transport.stream_sanitize_model_response._session + session2 = client2.transport.stream_sanitize_model_response._session + assert session1 != session2 def test_model_armor_grpc_transport_channel(): diff --git a/packages/google-cloud-netapp/google/cloud/netapp/__init__.py b/packages/google-cloud-netapp/google/cloud/netapp/__init__.py index 459f8699750f..46ab6d83879f 100644 --- a/packages/google-cloud-netapp/google/cloud/netapp/__init__.py +++ b/packages/google-cloud-netapp/google/cloud/netapp/__init__.py @@ -65,6 +65,7 @@ LocationMetadata, OsType, QosType, + ScaleType, ServiceLevel, StoragePoolType, UserCommands, @@ -162,6 +163,7 @@ GetVolumeRequest, HourlySchedule, HybridReplicationParameters, + LargeCapacityConfig, ListVolumesRequest, ListVolumesResponse, MonthlySchedule, @@ -222,6 +224,7 @@ "HybridReplicationSchedule", "OsType", "QosType", + "ScaleType", "ServiceLevel", "StoragePoolType", "CreateHostGroupRequest", @@ -301,6 +304,7 @@ "GetVolumeRequest", "HourlySchedule", "HybridReplicationParameters", + "LargeCapacityConfig", "ListVolumesRequest", "ListVolumesResponse", "MonthlySchedule", diff --git a/packages/google-cloud-netapp/google/cloud/netapp_v1/__init__.py b/packages/google-cloud-netapp/google/cloud/netapp_v1/__init__.py index 7c74a1f75b86..808b2b41df35 100644 --- a/packages/google-cloud-netapp/google/cloud/netapp_v1/__init__.py +++ b/packages/google-cloud-netapp/google/cloud/netapp_v1/__init__.py @@ -75,6 +75,7 @@ LocationMetadata, OsType, QosType, + ScaleType, ServiceLevel, StoragePoolType, UserCommands, @@ -172,6 +173,7 @@ GetVolumeRequest, HourlySchedule, HybridReplicationParameters, + LargeCapacityConfig, ListVolumesRequest, ListVolumesResponse, MonthlySchedule, @@ -354,6 +356,7 @@ def _get_version(dependency_name): "HybridReplicationParameters", "HybridReplicationSchedule", "KmsConfig", + "LargeCapacityConfig", "ListActiveDirectoriesRequest", "ListActiveDirectoriesResponse", "ListBackupPoliciesRequest", @@ -395,6 +398,7 @@ def _get_version(dependency_name): "ReverseReplicationDirectionRequest", "RevertVolumeRequest", "SMBSettings", + "ScaleType", "SecurityStyle", "ServiceLevel", "SimpleExportPolicyRule", diff --git a/packages/google-cloud-netapp/google/cloud/netapp_v1/types/__init__.py b/packages/google-cloud-netapp/google/cloud/netapp_v1/types/__init__.py index 12463aa5ac52..e79491627c34 100644 --- a/packages/google-cloud-netapp/google/cloud/netapp_v1/types/__init__.py +++ b/packages/google-cloud-netapp/google/cloud/netapp_v1/types/__init__.py @@ -60,6 +60,7 @@ LocationMetadata, OsType, QosType, + ScaleType, ServiceLevel, StoragePoolType, UserCommands, @@ -157,6 +158,7 @@ GetVolumeRequest, HourlySchedule, HybridReplicationParameters, + LargeCapacityConfig, ListVolumesRequest, ListVolumesResponse, MonthlySchedule, @@ -215,6 +217,7 @@ "HybridReplicationSchedule", "OsType", "QosType", + "ScaleType", "ServiceLevel", "StoragePoolType", "CreateHostGroupRequest", @@ -294,6 +297,7 @@ "GetVolumeRequest", "HourlySchedule", "HybridReplicationParameters", + "LargeCapacityConfig", "ListVolumesRequest", "ListVolumesResponse", "MonthlySchedule", diff --git a/packages/google-cloud-netapp/google/cloud/netapp_v1/types/common.py b/packages/google-cloud-netapp/google/cloud/netapp_v1/types/common.py index e3fed6d7e48d..f3e79945ea18 100644 --- a/packages/google-cloud-netapp/google/cloud/netapp_v1/types/common.py +++ b/packages/google-cloud-netapp/google/cloud/netapp_v1/types/common.py @@ -27,6 +27,7 @@ "EncryptionType", "DirectoryServiceType", "StoragePoolType", + "ScaleType", "HybridReplicationSchedule", "QosType", "OsType", @@ -127,6 +128,27 @@ class StoragePoolType(proto.Enum): UNIFIED = 2 +class ScaleType(proto.Enum): + r"""Defines the scale-type of a UNIFIED Storage Pool. + + Values: + SCALE_TYPE_UNSPECIFIED (0): + Unspecified scale type. + SCALE_TYPE_DEFAULT (1): + Represents standard capacity and performance + scale-type. Suitable for general purpose + workloads. + SCALE_TYPE_SCALEOUT (2): + Represents higher capacity and performance + scale-type. Suitable for more demanding + workloads. + """ + + SCALE_TYPE_UNSPECIFIED = 0 + SCALE_TYPE_DEFAULT = 1 + SCALE_TYPE_SCALEOUT = 2 + + class HybridReplicationSchedule(proto.Enum): r"""Schedule for Hybrid Replication. New enum values may be added in future to support different diff --git a/packages/google-cloud-netapp/google/cloud/netapp_v1/types/storage_pool.py b/packages/google-cloud-netapp/google/cloud/netapp_v1/types/storage_pool.py index 5ee5df497b78..d59c2a9263da 100644 --- a/packages/google-cloud-netapp/google/cloud/netapp_v1/types/storage_pool.py +++ b/packages/google-cloud-netapp/google/cloud/netapp_v1/types/storage_pool.py @@ -361,6 +361,9 @@ class StoragePool(proto.Message): specified during creation, it defaults to ``DEFAULT``. This field is a member of `oneof`_ ``_mode``. + scale_type (google.cloud.netapp_v1.types.ScaleType): + Optional. The scale type of the storage pool. Defaults to + ``SCALE_TYPE_DEFAULT`` if not specified. """ class State(proto.Enum): @@ -538,6 +541,11 @@ class State(proto.Enum): optional=True, enum="Mode", ) + scale_type: common.ScaleType = proto.Field( + proto.ENUM, + number=38, + enum=common.ScaleType, + ) class ValidateDirectoryServiceRequest(proto.Message): diff --git a/packages/google-cloud-netapp/google/cloud/netapp_v1/types/volume.py b/packages/google-cloud-netapp/google/cloud/netapp_v1/types/volume.py index 71966c3fa2d8..3b4d6dd3085b 100644 --- a/packages/google-cloud-netapp/google/cloud/netapp_v1/types/volume.py +++ b/packages/google-cloud-netapp/google/cloud/netapp_v1/types/volume.py @@ -39,6 +39,7 @@ "DeleteVolumeRequest", "RevertVolumeRequest", "Volume", + "LargeCapacityConfig", "ExportPolicy", "SimpleExportPolicyRule", "SnapshotPolicy", @@ -464,8 +465,11 @@ class Volume(proto.Message): Optional. List of actions that are restricted on this volume. large_capacity (bool): - Optional. Flag indicating if the volume will - be a large capacity volume or a regular volume. + Optional. Flag indicating if the volume will be a large + capacity volume or a regular volume. This field is used for + legacy FILE pools. For Unified pools, use the + ``large_capacity_config`` field instead. This field and + ``large_capacity_config`` are mutually exclusive. multiple_endpoints (bool): Optional. Flag indicating if the volume will have an IP address per node for volumes supporting multiple IP @@ -499,6 +503,12 @@ class Volume(proto.Message): Optional. Block devices for the volume. Currently, only one block device is permitted per Volume. + large_capacity_config (google.cloud.netapp_v1.types.LargeCapacityConfig): + Optional. Large capacity config for the volume. Enables and + configures large capacity for volumes in Unified pools with + File protocols. Not applicable for Block protocols in + Unified pools. This field and the legacy ``large_capacity`` + boolean field are mutually exclusive. clone_details (google.cloud.netapp_v1.types.Volume.CloneDetails): Output only. If this volume is a clone, this field contains details about the clone. @@ -767,6 +777,11 @@ class CloneDetails(proto.Message): number=45, message="BlockDevice", ) + large_capacity_config: "LargeCapacityConfig" = proto.Field( + proto.MESSAGE, + number=46, + message="LargeCapacityConfig", + ) clone_details: CloneDetails = proto.Field( proto.MESSAGE, number=47, @@ -774,6 +789,25 @@ class CloneDetails(proto.Message): ) +class LargeCapacityConfig(proto.Message): + r"""Configuration for a Large Capacity Volume. A Large Capacity + Volume supports sizes ranging from 4.8 TiB to 20 PiB, it is + composed of multiple internal constituents, and must be created + in a large capacity pool. + + Attributes: + constituent_count (int): + Optional. The number of internal constituents + (e.g., FlexVols) for this large volume. The + minimum number of constituents is 2. + """ + + constituent_count: int = proto.Field( + proto.INT32, + number=1, + ) + + class ExportPolicy(proto.Message): r"""Defines the export policy for the volume. diff --git a/packages/google-cloud-netapp/tests/unit/gapic/netapp_v1/test_net_app.py b/packages/google-cloud-netapp/tests/unit/gapic/netapp_v1/test_net_app.py index 3bb31971e4c7..9d163ef5f56c 100644 --- a/packages/google-cloud-netapp/tests/unit/gapic/netapp_v1/test_net_app.py +++ b/packages/google-cloud-netapp/tests/unit/gapic/netapp_v1/test_net_app.py @@ -2251,6 +2251,7 @@ def test_get_storage_pool(request_type, transport: str = "grpc"): hot_tier_size_used_gib=2329, type_=common.StoragePoolType.FILE, mode=storage_pool.Mode.DEFAULT, + scale_type=common.ScaleType.SCALE_TYPE_DEFAULT, ) response = client.get_storage_pool(request) @@ -2293,6 +2294,7 @@ def test_get_storage_pool(request_type, transport: str = "grpc"): assert response.hot_tier_size_used_gib == 2329 assert response.type_ == common.StoragePoolType.FILE assert response.mode == storage_pool.Mode.DEFAULT + assert response.scale_type == common.ScaleType.SCALE_TYPE_DEFAULT def test_get_storage_pool_non_empty_request_with_auto_populated_field(): @@ -2451,6 +2453,7 @@ async def test_get_storage_pool_async( hot_tier_size_used_gib=2329, type_=common.StoragePoolType.FILE, mode=storage_pool.Mode.DEFAULT, + scale_type=common.ScaleType.SCALE_TYPE_DEFAULT, ) ) response = await client.get_storage_pool(request) @@ -2494,6 +2497,7 @@ async def test_get_storage_pool_async( assert response.hot_tier_size_used_gib == 2329 assert response.type_ == common.StoragePoolType.FILE assert response.mode == storage_pool.Mode.DEFAULT + assert response.scale_type == common.ScaleType.SCALE_TYPE_DEFAULT @pytest.mark.asyncio @@ -42080,6 +42084,7 @@ async def test_get_storage_pool_empty_call_grpc_asyncio(): hot_tier_size_used_gib=2329, type_=common.StoragePoolType.FILE, mode=storage_pool.Mode.DEFAULT, + scale_type=common.ScaleType.SCALE_TYPE_DEFAULT, ) ) await client.get_storage_pool(request=None) @@ -44253,6 +44258,7 @@ def test_create_storage_pool_rest_call_success(request_type): "hot_tier_size_used_gib": 2329, "type_": 1, "mode": 1, + "scale_type": 1, } # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency @@ -44481,6 +44487,7 @@ def test_get_storage_pool_rest_call_success(request_type): hot_tier_size_used_gib=2329, type_=common.StoragePoolType.FILE, mode=storage_pool.Mode.DEFAULT, + scale_type=common.ScaleType.SCALE_TYPE_DEFAULT, ) # Wrap the value into a proper Response obj @@ -44528,6 +44535,7 @@ def test_get_storage_pool_rest_call_success(request_type): assert response.hot_tier_size_used_gib == 2329 assert response.type_ == common.StoragePoolType.FILE assert response.mode == storage_pool.Mode.DEFAULT + assert response.scale_type == common.ScaleType.SCALE_TYPE_DEFAULT @pytest.mark.parametrize("null_interceptor", [True, False]) @@ -44674,6 +44682,7 @@ def test_update_storage_pool_rest_call_success(request_type): "hot_tier_size_used_gib": 2329, "type_": 1, "mode": 1, + "scale_type": 1, } # The version of a generated dependency at test runtime may differ from the version used during generation. # Delete any fields which are not present in the current runtime dependency @@ -45692,6 +45701,7 @@ def test_create_volume_rest_call_success(request_type): "os_type": 1, } ], + "large_capacity_config": {"constituent_count": 1864}, "clone_details": { "source_snapshot": "source_snapshot_value", "source_volume": "source_volume_value", @@ -46040,6 +46050,7 @@ def test_update_volume_rest_call_success(request_type): "os_type": 1, } ], + "large_capacity_config": {"constituent_count": 1864}, "clone_details": { "source_snapshot": "source_snapshot_value", "source_volume": "source_volume_value", diff --git a/packages/google-cloud-network-management/google/cloud/network_management_v1/types/trace.py b/packages/google-cloud-network-management/google/cloud/network_management_v1/types/trace.py index 430f34f8a4f4..fa94f84c929e 100644 --- a/packages/google-cloud-network-management/google/cloud/network_management_v1/types/trace.py +++ b/packages/google-cloud-network-management/google/cloud/network_management_v1/types/trace.py @@ -1167,7 +1167,9 @@ class RouteInfo(proto.Message): Region of the route. DYNAMIC, PEERING_DYNAMIC, POLICY_BASED and ADVERTISED routes only. If set for POLICY_BASED route, this is a region of VLAN attachments for Cloud Interconnect - the route applies to. + the route applies to. If set to "all" for POLICY_BASED + route, the route applies to VLAN attachments of Cloud + Interconnect in all regions. dest_ip_range (str): Destination IP range of the route. next_hop (str): diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/base.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/base.py index 287199eeee76..b2084af639ec 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/base.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/base.py @@ -284,9 +284,9 @@ def _prep_wrapped_messages(self, client_info): core_exceptions.ResourceExhausted, core_exceptions.ServiceUnavailable, ), - deadline=900.0, + deadline=1800.0, ), - default_timeout=900.0, + default_timeout=1800.0, client_info=client_info, ), self.modify_push_config: gapic_v1.method.wrap_method( diff --git a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/grpc_asyncio.py b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/grpc_asyncio.py index 608398e2105d..9c2b70010c3a 100644 --- a/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/grpc_asyncio.py +++ b/packages/google-cloud-pubsub/google/pubsub_v1/services/subscriber/transports/grpc_asyncio.py @@ -992,9 +992,9 @@ def _prep_wrapped_messages(self, client_info): core_exceptions.ResourceExhausted, core_exceptions.ServiceUnavailable, ), - deadline=900.0, + deadline=1800.0, ), - default_timeout=900.0, + default_timeout=1800.0, client_info=client_info, ), self.modify_push_config: self._wrap_method( diff --git a/packages/google-cloud-saasplatform-saasservicemgmt/google/cloud/saasplatform_saasservicemgmt_v1beta1/types/deployments_resources.py b/packages/google-cloud-saasplatform-saasservicemgmt/google/cloud/saasplatform_saasservicemgmt_v1beta1/types/deployments_resources.py index 5960409b517e..b4c87090ae9d 100644 --- a/packages/google-cloud-saasplatform-saasservicemgmt/google/cloud/saasplatform_saasservicemgmt_v1beta1/types/deployments_resources.py +++ b/packages/google-cloud-saasplatform-saasservicemgmt/google/cloud/saasplatform_saasservicemgmt_v1beta1/types/deployments_resources.py @@ -179,15 +179,16 @@ class Tenant(proto.Message): representing. The relationship with a consumer resource can be - used by SaaS Runtime for retrieving + used by App Lifecycle Manager for retrieving consumer-defined settings and policies such as maintenance policies (using Unified Maintenance Policy API). saas (str): Required. Immutable. A reference to the Saas that defines the product (managed service) that - the producer wants to manage with SaaS Runtime. - Part of the SaaS Runtime common data model. + the producer wants to manage with App Lifecycle + Manager. Part of the App Lifecycle Manager + common data model. labels (MutableMapping[str, str]): Optional. The labels on the resource, which can be used for categorization. similar to @@ -307,9 +308,9 @@ class UnitKind(proto.Message): saas (str): Required. Immutable. A reference to the Saas that defines the product (managed service) that - the producer wants to manage with SaaS Runtime. - Part of the SaaS Runtime common data model. - Immutable once set. + the producer wants to manage with App Lifecycle + Manager. Part of the App Lifecycle Manager + common data model. Immutable once set. labels (MutableMapping[str, str]): Optional. The labels on the resource, which can be used for categorization. similar to @@ -1298,8 +1299,8 @@ class ToMapping(proto.Message): Required. Name of the inputVariable on the dependency ignore_for_lookup (bool): - Optional. Tells SaaS Runtime if this mapping - should be used during lookup or not + Optional. Tells App Lifecycle Manager if this + mapping should be used during lookup or not """ dependency: str = proto.Field( diff --git a/packages/google-shopping-merchant-inventories/google/shopping/merchant_inventories/__init__.py b/packages/google-shopping-merchant-inventories/google/shopping/merchant_inventories/__init__.py index 84b0b20b3292..38d71201a540 100644 --- a/packages/google-shopping-merchant-inventories/google/shopping/merchant_inventories/__init__.py +++ b/packages/google-shopping-merchant-inventories/google/shopping/merchant_inventories/__init__.py @@ -31,6 +31,7 @@ RegionalInventoryServiceClient, ) from google.shopping.merchant_inventories_v1.types.inventories_common import ( + InventoryLoyaltyProgram, LocalInventoryAttributes, RegionalInventoryAttributes, ) @@ -54,6 +55,7 @@ "LocalInventoryServiceAsyncClient", "RegionalInventoryServiceClient", "RegionalInventoryServiceAsyncClient", + "InventoryLoyaltyProgram", "LocalInventoryAttributes", "RegionalInventoryAttributes", "DeleteLocalInventoryRequest", diff --git a/packages/google-shopping-merchant-inventories/google/shopping/merchant_inventories_v1/__init__.py b/packages/google-shopping-merchant-inventories/google/shopping/merchant_inventories_v1/__init__.py index b33aadb2f07c..23b66e27d971 100644 --- a/packages/google-shopping-merchant-inventories/google/shopping/merchant_inventories_v1/__init__.py +++ b/packages/google-shopping-merchant-inventories/google/shopping/merchant_inventories_v1/__init__.py @@ -38,6 +38,7 @@ RegionalInventoryServiceClient, ) from .types.inventories_common import ( + InventoryLoyaltyProgram, LocalInventoryAttributes, RegionalInventoryAttributes, ) @@ -157,6 +158,7 @@ def _get_version(dependency_name): "DeleteRegionalInventoryRequest", "InsertLocalInventoryRequest", "InsertRegionalInventoryRequest", + "InventoryLoyaltyProgram", "ListLocalInventoriesRequest", "ListLocalInventoriesResponse", "ListRegionalInventoriesRequest", diff --git a/packages/google-shopping-merchant-inventories/google/shopping/merchant_inventories_v1/services/local_inventory_service/async_client.py b/packages/google-shopping-merchant-inventories/google/shopping/merchant_inventories_v1/services/local_inventory_service/async_client.py index c2b3d3839ca0..4f849c107ebf 100644 --- a/packages/google-shopping-merchant-inventories/google/shopping/merchant_inventories_v1/services/local_inventory_service/async_client.py +++ b/packages/google-shopping-merchant-inventories/google/shopping/merchant_inventories_v1/services/local_inventory_service/async_client.py @@ -364,6 +364,48 @@ async def sample_list_local_inventories(): local inventories for. Format: ``accounts/{account}/products/{product}`` + The ``{product}`` segment is a unique identifier for the + product. This identifier must be unique within a + merchant account and generally follows the structure: + ``content_language~feed_label~offer_id``. Example: + ``en~US~sku123`` For legacy local products, the + structure is: + ``local~content_language~feed_label~offer_id``. Example: + ``local~en~US~sku123`` + + The format of the ``{product}`` segment in the URL is + automatically detected by the server, supporting two + options: + + 1. **Encoded Format**: The ``{product}`` segment is an + unpadded base64url encoded string (RFC 4648 Section + 5). The decoded string must result in the + ``content_language~feed_label~offer_id`` structure. + This encoding MUST be used if any part of the product + identifier (like ``offer_id``) contains characters + such as ``/``, ``%``, or ``~``. + + - Example: To represent the product ID + ``en~US~sku/123``, the ``{product}`` segment must + be the unpadded base64url encoding of this string, + which is ``ZW5-VVN-c2t1LzEyMw``. The full resource + name for the product would be + ``accounts/123/products/ZW5-VVN-c2t1LzEyMw``. + + 2. **Plain Format**: The ``{product}`` segment is the + tilde-separated string + ``content_language~feed_label~offer_id``. This format + is suitable only when ``content_language``, + ``feed_label``, and ``offer_id`` do not contain + URL-problematic characters like ``/``, ``%``, or + ``~``. + + We recommend using the **Encoded Format** for all + product IDs to ensure correct parsing, especially those + containing special characters. The presence of tilde + (``~``) characters in the ``{product}`` segment is used + to differentiate between the two formats. + This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -595,6 +637,49 @@ async def sample_delete_local_inventory(): product to delete. Format: ``accounts/{account}/products/{product}/localInventories/{store_code}`` + The ``{product}`` segment is a unique identifier for the + product. This identifier must be unique within a + merchant account and generally follows the structure: + ``content_language~feed_label~offer_id``. Example: + ``en~US~sku123`` For legacy local products, the + structure is: + ``local~content_language~feed_label~offer_id``. Example: + ``local~en~US~sku123`` + + The format of the ``{product}`` segment in the URL is + automatically detected by the server, supporting two + options: + + 1. **Encoded Format**: The ``{product}`` segment is an + unpadded base64url encoded string (RFC 4648 Section + 5). The decoded string must result in the + ``content_language~feed_label~offer_id`` structure. + This encoding MUST be used if any part of the product + identifier (like ``offer_id``) contains characters + such as ``/``, ``%``, or ``~``. + + - Example: To represent the product ID + ``en~US~sku/123`` for ``store_code`` "store123", + the ``{product}`` segment must be the unpadded + base64url encoding of this string, which is + ``ZW5-VVN-c2t1LzEyMw``. The full resource name for + the local inventory would be + ``accounts/123/products/ZW5-VVN-c2t1LzEyMw/localInventories/store123``. + + 2. **Plain Format**: The ``{product}`` segment is the + tilde-separated string + ``content_language~feed_label~offer_id``. This format + is suitable only when ``content_language``, + ``feed_label``, and ``offer_id`` do not contain + URL-problematic characters like ``/``, ``%``, or + ``~``. + + We recommend using the **Encoded Format** for all + product IDs to ensure correct parsing, especially those + containing special characters. The presence of tilde + (``~``) characters in the ``{product}`` segment is used + to differentiate between the two formats. + This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. diff --git a/packages/google-shopping-merchant-inventories/google/shopping/merchant_inventories_v1/services/local_inventory_service/client.py b/packages/google-shopping-merchant-inventories/google/shopping/merchant_inventories_v1/services/local_inventory_service/client.py index e6f80ce40c99..7d533c758b3b 100644 --- a/packages/google-shopping-merchant-inventories/google/shopping/merchant_inventories_v1/services/local_inventory_service/client.py +++ b/packages/google-shopping-merchant-inventories/google/shopping/merchant_inventories_v1/services/local_inventory_service/client.py @@ -793,6 +793,48 @@ def sample_list_local_inventories(): local inventories for. Format: ``accounts/{account}/products/{product}`` + The ``{product}`` segment is a unique identifier for the + product. This identifier must be unique within a + merchant account and generally follows the structure: + ``content_language~feed_label~offer_id``. Example: + ``en~US~sku123`` For legacy local products, the + structure is: + ``local~content_language~feed_label~offer_id``. Example: + ``local~en~US~sku123`` + + The format of the ``{product}`` segment in the URL is + automatically detected by the server, supporting two + options: + + 1. **Encoded Format**: The ``{product}`` segment is an + unpadded base64url encoded string (RFC 4648 Section + 5). The decoded string must result in the + ``content_language~feed_label~offer_id`` structure. + This encoding MUST be used if any part of the product + identifier (like ``offer_id``) contains characters + such as ``/``, ``%``, or ``~``. + + - Example: To represent the product ID + ``en~US~sku/123``, the ``{product}`` segment must + be the unpadded base64url encoding of this string, + which is ``ZW5-VVN-c2t1LzEyMw``. The full resource + name for the product would be + ``accounts/123/products/ZW5-VVN-c2t1LzEyMw``. + + 2. **Plain Format**: The ``{product}`` segment is the + tilde-separated string + ``content_language~feed_label~offer_id``. This format + is suitable only when ``content_language``, + ``feed_label``, and ``offer_id`` do not contain + URL-problematic characters like ``/``, ``%``, or + ``~``. + + We recommend using the **Encoded Format** for all + product IDs to ensure correct parsing, especially those + containing special characters. The presence of tilde + (``~``) characters in the ``{product}`` segment is used + to differentiate between the two formats. + This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -1019,6 +1061,49 @@ def sample_delete_local_inventory(): product to delete. Format: ``accounts/{account}/products/{product}/localInventories/{store_code}`` + The ``{product}`` segment is a unique identifier for the + product. This identifier must be unique within a + merchant account and generally follows the structure: + ``content_language~feed_label~offer_id``. Example: + ``en~US~sku123`` For legacy local products, the + structure is: + ``local~content_language~feed_label~offer_id``. Example: + ``local~en~US~sku123`` + + The format of the ``{product}`` segment in the URL is + automatically detected by the server, supporting two + options: + + 1. **Encoded Format**: The ``{product}`` segment is an + unpadded base64url encoded string (RFC 4648 Section + 5). The decoded string must result in the + ``content_language~feed_label~offer_id`` structure. + This encoding MUST be used if any part of the product + identifier (like ``offer_id``) contains characters + such as ``/``, ``%``, or ``~``. + + - Example: To represent the product ID + ``en~US~sku/123`` for ``store_code`` "store123", + the ``{product}`` segment must be the unpadded + base64url encoding of this string, which is + ``ZW5-VVN-c2t1LzEyMw``. The full resource name for + the local inventory would be + ``accounts/123/products/ZW5-VVN-c2t1LzEyMw/localInventories/store123``. + + 2. **Plain Format**: The ``{product}`` segment is the + tilde-separated string + ``content_language~feed_label~offer_id``. This format + is suitable only when ``content_language``, + ``feed_label``, and ``offer_id`` do not contain + URL-problematic characters like ``/``, ``%``, or + ``~``. + + We recommend using the **Encoded Format** for all + product IDs to ensure correct parsing, especially those + containing special characters. The presence of tilde + (``~``) characters in the ``{product}`` segment is used + to differentiate between the two formats. + This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. diff --git a/packages/google-shopping-merchant-inventories/google/shopping/merchant_inventories_v1/services/regional_inventory_service/async_client.py b/packages/google-shopping-merchant-inventories/google/shopping/merchant_inventories_v1/services/regional_inventory_service/async_client.py index dfe7441db686..743c9f79b74f 100644 --- a/packages/google-shopping-merchant-inventories/google/shopping/merchant_inventories_v1/services/regional_inventory_service/async_client.py +++ b/packages/google-shopping-merchant-inventories/google/shopping/merchant_inventories_v1/services/regional_inventory_service/async_client.py @@ -373,6 +373,48 @@ async def sample_list_regional_inventories(): ``RegionalInventory`` resources for. Format: ``accounts/{account}/products/{product}`` + The ``{product}`` segment is a unique identifier for the + product. This identifier must be unique within a + merchant account and generally follows the structure: + ``content_language~feed_label~offer_id``. Example: + ``en~US~sku123`` For legacy local products, the + structure is: + ``local~content_language~feed_label~offer_id``. Example: + ``local~en~US~sku123`` + + The format of the ``{product}`` segment in the URL is + automatically detected by the server, supporting two + options: + + 1. **Encoded Format**: The ``{product}`` segment is an + **unpadded base64url** encoded string (RFC 4648 + Section 5). The decoded string must result in the + ``content_language~feed_label~offer_id`` structure. + This encoding MUST be used if any part of the product + identifier (like ``offer_id``) contains characters + such as ``/``, ``%``, or ``~``. + + - Example: To represent the product ID + ``en~US~sku/123``, the ``{product}`` segment must + be the unpadded base64url encoding of this string, + which is ``ZW5-VVN-c2t1LzEyMw``. The full resource + name for the product would be + ``accounts/123/products/ZW5-VVN-c2t1LzEyMw``. + + 2. **Plain Format**: The ``{product}`` segment is the + tilde-separated string + ``content_language~feed_label~offer_id``. This format + is suitable only when ``content_language``, + ``feed_label``, and ``offer_id`` do not contain + URL-problematic characters like ``/``, ``%``, or + ``~``. + + We recommend using the **Encoded Format** for all + product IDs to ensure correct parsing, especially those + containing special characters. The presence of tilde + (``~``) characters in the ``{product}`` segment is used + to differentiate between the two formats. + This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -607,6 +649,49 @@ async def sample_delete_regional_inventory(): to delete. Format: ``accounts/{account}/products/{product}/regionalInventories/{region}`` + The ``{product}`` segment is a unique identifier for the + product. This identifier must be unique within a + merchant account and generally follows the structure: + ``content_language~feed_label~offer_id``. Example: + ``en~US~sku123`` For legacy local products, the + structure is: + ``local~content_language~feed_label~offer_id``. Example: + ``local~en~US~sku123`` + + The format of the ``{product}`` segment in the URL is + automatically detected by the server, supporting two + options: + + 1. **Encoded Format**: The ``{product}`` segment is an + **unpadded base64url** encoded string (RFC 4648 + Section 5). The decoded string must result in the + ``content_language~feed_label~offer_id`` structure. + This encoding MUST be used if any part of the product + identifier (like ``offer_id``) contains characters + such as ``/``, ``%``, or ``~``. + + - Example: To represent the product ID + ``en~US~sku/123`` for ``region`` "region123", the + ``{product}`` segment must be the unpadded + base64url encoding of this string, which is + ``ZW5-VVN-c2t1LzEyMw``. The full resource name for + the regional inventory would be + ``accounts/123/products/ZW5-VVN-c2t1LzEyMw/regionalInventories/region123``. + + 2. **Plain Format**: The ``{product}`` segment is the + tilde-separated string + ``content_language~feed_label~offer_id``. This format + is suitable only when ``content_language``, + ``feed_label``, and ``offer_id`` do not contain + URL-problematic characters like ``/``, ``%``, or + ``~``. + + We recommend using the **Encoded Format** for all + product IDs to ensure correct parsing, especially those + containing special characters. The presence of tilde + (``~``) characters in the ``{product}`` segment is used + to differentiate between the two formats. + This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. diff --git a/packages/google-shopping-merchant-inventories/google/shopping/merchant_inventories_v1/services/regional_inventory_service/client.py b/packages/google-shopping-merchant-inventories/google/shopping/merchant_inventories_v1/services/regional_inventory_service/client.py index 1ea5474727d2..3148c2b80410 100644 --- a/packages/google-shopping-merchant-inventories/google/shopping/merchant_inventories_v1/services/regional_inventory_service/client.py +++ b/packages/google-shopping-merchant-inventories/google/shopping/merchant_inventories_v1/services/regional_inventory_service/client.py @@ -800,6 +800,48 @@ def sample_list_regional_inventories(): ``RegionalInventory`` resources for. Format: ``accounts/{account}/products/{product}`` + The ``{product}`` segment is a unique identifier for the + product. This identifier must be unique within a + merchant account and generally follows the structure: + ``content_language~feed_label~offer_id``. Example: + ``en~US~sku123`` For legacy local products, the + structure is: + ``local~content_language~feed_label~offer_id``. Example: + ``local~en~US~sku123`` + + The format of the ``{product}`` segment in the URL is + automatically detected by the server, supporting two + options: + + 1. **Encoded Format**: The ``{product}`` segment is an + **unpadded base64url** encoded string (RFC 4648 + Section 5). The decoded string must result in the + ``content_language~feed_label~offer_id`` structure. + This encoding MUST be used if any part of the product + identifier (like ``offer_id``) contains characters + such as ``/``, ``%``, or ``~``. + + - Example: To represent the product ID + ``en~US~sku/123``, the ``{product}`` segment must + be the unpadded base64url encoding of this string, + which is ``ZW5-VVN-c2t1LzEyMw``. The full resource + name for the product would be + ``accounts/123/products/ZW5-VVN-c2t1LzEyMw``. + + 2. **Plain Format**: The ``{product}`` segment is the + tilde-separated string + ``content_language~feed_label~offer_id``. This format + is suitable only when ``content_language``, + ``feed_label``, and ``offer_id`` do not contain + URL-problematic characters like ``/``, ``%``, or + ``~``. + + We recommend using the **Encoded Format** for all + product IDs to ensure correct parsing, especially those + containing special characters. The presence of tilde + (``~``) characters in the ``{product}`` segment is used + to differentiate between the two formats. + This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. @@ -1033,6 +1075,49 @@ def sample_delete_regional_inventory(): to delete. Format: ``accounts/{account}/products/{product}/regionalInventories/{region}`` + The ``{product}`` segment is a unique identifier for the + product. This identifier must be unique within a + merchant account and generally follows the structure: + ``content_language~feed_label~offer_id``. Example: + ``en~US~sku123`` For legacy local products, the + structure is: + ``local~content_language~feed_label~offer_id``. Example: + ``local~en~US~sku123`` + + The format of the ``{product}`` segment in the URL is + automatically detected by the server, supporting two + options: + + 1. **Encoded Format**: The ``{product}`` segment is an + **unpadded base64url** encoded string (RFC 4648 + Section 5). The decoded string must result in the + ``content_language~feed_label~offer_id`` structure. + This encoding MUST be used if any part of the product + identifier (like ``offer_id``) contains characters + such as ``/``, ``%``, or ``~``. + + - Example: To represent the product ID + ``en~US~sku/123`` for ``region`` "region123", the + ``{product}`` segment must be the unpadded + base64url encoding of this string, which is + ``ZW5-VVN-c2t1LzEyMw``. The full resource name for + the regional inventory would be + ``accounts/123/products/ZW5-VVN-c2t1LzEyMw/regionalInventories/region123``. + + 2. **Plain Format**: The ``{product}`` segment is the + tilde-separated string + ``content_language~feed_label~offer_id``. This format + is suitable only when ``content_language``, + ``feed_label``, and ``offer_id`` do not contain + URL-problematic characters like ``/``, ``%``, or + ``~``. + + We recommend using the **Encoded Format** for all + product IDs to ensure correct parsing, especially those + containing special characters. The presence of tilde + (``~``) characters in the ``{product}`` segment is used + to differentiate between the two formats. + This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. diff --git a/packages/google-shopping-merchant-inventories/google/shopping/merchant_inventories_v1/types/__init__.py b/packages/google-shopping-merchant-inventories/google/shopping/merchant_inventories_v1/types/__init__.py index 1ae11cc81369..56cb5a535f39 100644 --- a/packages/google-shopping-merchant-inventories/google/shopping/merchant_inventories_v1/types/__init__.py +++ b/packages/google-shopping-merchant-inventories/google/shopping/merchant_inventories_v1/types/__init__.py @@ -14,6 +14,7 @@ # limitations under the License. # from .inventories_common import ( + InventoryLoyaltyProgram, LocalInventoryAttributes, RegionalInventoryAttributes, ) @@ -33,6 +34,7 @@ ) __all__ = ( + "InventoryLoyaltyProgram", "LocalInventoryAttributes", "RegionalInventoryAttributes", "DeleteLocalInventoryRequest", diff --git a/packages/google-shopping-merchant-inventories/google/shopping/merchant_inventories_v1/types/inventories_common.py b/packages/google-shopping-merchant-inventories/google/shopping/merchant_inventories_v1/types/inventories_common.py index a75cbeea2caa..fc38bb6833a6 100644 --- a/packages/google-shopping-merchant-inventories/google/shopping/merchant_inventories_v1/types/inventories_common.py +++ b/packages/google-shopping-merchant-inventories/google/shopping/merchant_inventories_v1/types/inventories_common.py @@ -25,6 +25,7 @@ package="google.shopping.merchant.inventories.v1", manifest={ "LocalInventoryAttributes", + "InventoryLoyaltyProgram", "RegionalInventoryAttributes", }, ) @@ -76,6 +77,25 @@ class LocalInventoryAttributes(proto.Message): store. Maximum length is 20 bytes. This field is a member of `oneof`_ ``_instore_product_location``. + loyalty_programs (MutableSequence[google.shopping.merchant_inventories_v1.types.InventoryLoyaltyProgram]): + Optional. An optional list of loyalty programs containing + applicable loyalty member prices for this product at this + store. + + This field is used to show store-specific member prices on + Local Inventory Ads (LIA). + + To use this, the loyalty program must be configured in + Google Merchant Center. The benefits provided must match the + merchant's website and be clear to members. This is only + applicable for merchants in supported countries. + + See `Loyalty + program `__ + for details on supported countries and loyalty program + configuration. For local inventory specific details, see the + `Local inventory data + specification `__. """ class Availability(proto.Enum): @@ -214,6 +234,110 @@ class PickupSla(proto.Enum): number=8, optional=True, ) + loyalty_programs: MutableSequence["InventoryLoyaltyProgram"] = proto.RepeatedField( + proto.MESSAGE, + number=10, + message="InventoryLoyaltyProgram", + ) + + +class InventoryLoyaltyProgram(proto.Message): + r"""A message that represents loyalty program. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + program_label (str): + The label of the loyalty program. This is an + internal label that uniquely identifies the + relationship between a business entity and a + loyalty program entity. The label must be + provided if there are multiple loyalty programs + available for the merchant, so that the system + can associate the assets below (for example, + price and points) with the correct business. The + corresponding program must be linked to the + Merchant Center account. + + This field is a member of `oneof`_ ``_program_label``. + tier_label (str): + The label of the tier within the loyalty + program. Must match one of the labels within the + program. + + This field is a member of `oneof`_ ``_tier_label``. + price (google.shopping.type.types.Price): + The price for members of the given tier, that + is, the instant discount price. Must be smaller + or equal to the regular price. + + This field is a member of `oneof`_ ``_price``. + cashback_for_future_use (google.shopping.type.types.Price): + The cashback that can be used for future + purchases. + + This field is a member of `oneof`_ ``_cashback_for_future_use``. + loyalty_points (int): + The amount of loyalty points earned on a + purchase. + + This field is a member of `oneof`_ ``_loyalty_points``. + member_price_effective_interval (google.type.interval_pb2.Interval): + A date range during which the item is + eligible for member price. If not specified, the + member price is always applicable. The date + range is represented by a pair of ISO 8601 dates + separated by a space, comma, or slash. + + This field is a member of `oneof`_ ``_member_price_effective_interval``. + shipping_label (str): + The label of the shipping benefit. If the + field has value, this offer has loyalty shipping + benefit. If the field value isn't provided, the + item is not eligible for loyalty shipping for + the given loyalty tier. + + This field is a member of `oneof`_ ``_shipping_label``. + """ + + program_label: str = proto.Field( + proto.STRING, + number=1, + optional=True, + ) + tier_label: str = proto.Field( + proto.STRING, + number=2, + optional=True, + ) + price: types.Price = proto.Field( + proto.MESSAGE, + number=3, + optional=True, + message=types.Price, + ) + cashback_for_future_use: types.Price = proto.Field( + proto.MESSAGE, + number=4, + optional=True, + message=types.Price, + ) + loyalty_points: int = proto.Field( + proto.INT64, + number=5, + optional=True, + ) + member_price_effective_interval: interval_pb2.Interval = proto.Field( + proto.MESSAGE, + number=6, + optional=True, + message=interval_pb2.Interval, + ) + shipping_label: str = proto.Field( + proto.STRING, + number=7, + optional=True, + ) class RegionalInventoryAttributes(proto.Message): @@ -239,6 +363,29 @@ class RegionalInventoryAttributes(proto.Message): of the product in this region. This field is a member of `oneof`_ ``_availability``. + loyalty_programs (MutableSequence[google.shopping.merchant_inventories_v1.types.InventoryLoyaltyProgram]): + Optional. An optional list of loyalty programs containing + applicable loyalty member prices for this product in this + region. + + This field is used to show region-specific member prices on + Product Listing Ads (PLA). + + To use this, the loyalty program must be configured in + Google Merchant Center, and the merchant must be using the + Regional Availability and Pricing (RAAP) feature. The + benefits provided must match the merchant's website and be + clear to members. This is only applicable for merchants in + supported countries. + + See `Loyalty + program `__ + for details on supported countries and loyalty program + configuration. Also see `Regional availability and + pricing `__ + and `How to set up regional member + pricing `__ + for more information. """ class Availability(proto.Enum): @@ -280,6 +427,11 @@ class Availability(proto.Enum): optional=True, enum=Availability, ) + loyalty_programs: MutableSequence["InventoryLoyaltyProgram"] = proto.RepeatedField( + proto.MESSAGE, + number=5, + message="InventoryLoyaltyProgram", + ) __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/packages/google-shopping-merchant-inventories/google/shopping/merchant_inventories_v1/types/localinventory.py b/packages/google-shopping-merchant-inventories/google/shopping/merchant_inventories_v1/types/localinventory.py index 78fc2e8c8ebf..b198087d738d 100644 --- a/packages/google-shopping-merchant-inventories/google/shopping/merchant_inventories_v1/types/localinventory.py +++ b/packages/google-shopping-merchant-inventories/google/shopping/merchant_inventories_v1/types/localinventory.py @@ -46,6 +46,60 @@ class LocalInventory(proto.Message): Output only. The name of the ``LocalInventory`` resource. Format: ``accounts/{account}/products/{product}/localInventories/{store_code}`` + + The ``{product}`` segment is a unique identifier for the + product. This identifier must be unique within a merchant + account and generally follows the structure: + ``content_language~feed_label~offer_id``. Example: + ``en~US~sku123`` For legacy local products, the structure + is: ``local~content_language~feed_label~offer_id``. Example: + ``local~en~US~sku123`` + + The format of the ``{product}`` segment in the URL is + automatically detected by the server, supporting two + options: + + 1. **Encoded Format**: The ``{product}`` segment is an + unpadded base64url encoded string (RFC 4648 Section 5). + The decoded string must result in the + ``content_language~feed_label~offer_id`` structure. This + encoding MUST be used if any part of the product + identifier (like ``offer_id``) contains characters such + as ``/``, ``%``, or ``~``. + + - Example: To represent the product ID ``en~US~sku/123`` + for ``store_code`` "store123", the ``{product}`` + segment must be the base64url encoding of this string, + which is ``ZW5-VVN-c2t1LzEyMw``. The full resource name + for the local inventory would be + ``accounts/123/products/ZW5-VVN-c2t1LzEyMw/localInventories/store123``. + + 2. **Plain Format**: The ``{product}`` segment is the + tilde-separated string + ``content_language~feed_label~offer_id``. This format is + suitable only when ``content_language``, ``feed_label``, + and ``offer_id`` do not contain URL-problematic + characters like ``/``, ``%``, or ``~``. + + We recommend using the **Encoded Format** for all product + IDs to ensure correct parsing, especially those containing + special characters. The presence of tilde (``~``) characters + in the ``{product}`` segment is used to differentiate + between the two formats. + base64_encoded_name (str): + Output only. The unpadded base64url encoded name of the + ``LocalInventory`` resource. Format: + ``accounts/{account}/products/{product}/localInventories/{store_code}`` + where the ``{product}`` segment is the unpadded base64url + encoded value of the identifier of the form + ``content_language~feed_label~offer_id``. Example: + ``accounts/123/products/ZW5-VVN-c2t1LzEyMw/localInventories/store123`` + for the decoded product ID ``en~US~sku/123`` and + ``store_code`` "store123". Can be used directly as input to + the API methods that require the local product identifier + within the local inventory name to be encoded if it contains + special characters, for example + ```GetLocalInventory`` `__. account (int): Output only. The account that owns the product. This field will be ignored if set by @@ -65,6 +119,10 @@ class LocalInventory(proto.Message): proto.STRING, number=1, ) + base64_encoded_name: str = proto.Field( + proto.STRING, + number=15, + ) account: int = proto.Field( proto.INT64, number=2, @@ -90,6 +148,46 @@ class ListLocalInventoriesRequest(proto.Message): Required. The ``name`` of the parent product to list local inventories for. Format: ``accounts/{account}/products/{product}`` + + The ``{product}`` segment is a unique identifier for the + product. This identifier must be unique within a merchant + account and generally follows the structure: + ``content_language~feed_label~offer_id``. Example: + ``en~US~sku123`` For legacy local products, the structure + is: ``local~content_language~feed_label~offer_id``. Example: + ``local~en~US~sku123`` + + The format of the ``{product}`` segment in the URL is + automatically detected by the server, supporting two + options: + + 1. **Encoded Format**: The ``{product}`` segment is an + unpadded base64url encoded string (RFC 4648 Section 5). + The decoded string must result in the + ``content_language~feed_label~offer_id`` structure. This + encoding MUST be used if any part of the product + identifier (like ``offer_id``) contains characters such + as ``/``, ``%``, or ``~``. + + - Example: To represent the product ID ``en~US~sku/123``, + the ``{product}`` segment must be the unpadded + base64url encoding of this string, which is + ``ZW5-VVN-c2t1LzEyMw``. The full resource name for the + product would be + ``accounts/123/products/ZW5-VVN-c2t1LzEyMw``. + + 2. **Plain Format**: The ``{product}`` segment is the + tilde-separated string + ``content_language~feed_label~offer_id``. This format is + suitable only when ``content_language``, ``feed_label``, + and ``offer_id`` do not contain URL-problematic + characters like ``/``, ``%``, or ``~``. + + We recommend using the **Encoded Format** for all product + IDs to ensure correct parsing, especially those containing + special characters. The presence of tilde (``~``) characters + in the ``{product}`` segment is used to differentiate + between the two formats. page_size (int): The maximum number of ``LocalInventory`` resources for the given product to return. The service returns fewer than this @@ -160,6 +258,46 @@ class InsertLocalInventoryRequest(proto.Message): Required. The account and product where this inventory will be inserted. Format: ``accounts/{account}/products/{product}`` + + The ``{product}`` segment is a unique identifier for the + product. This identifier must be unique within a merchant + account and generally follows the structure: + ``content_language~feed_label~offer_id``. Example: + ``en~US~sku123`` For legacy local products, the structure + is: ``local~content_language~feed_label~offer_id``. Example: + ``local~en~US~sku123`` + + The format of the ``{product}`` segment in the URL is + automatically detected by the server, supporting two + options: + + 1. **Encoded Format**: The ``{product}`` segment is an + unpadded base64url encoded string (RFC 4648 Section 5). + The decoded string must result in the + ``content_language~feed_label~offer_id`` structure. This + encoding MUST be used if any part of the product + identifier (like ``offer_id``) contains characters such + as ``/``, ``%``, or ``~``. + + - Example: To represent the product ID ``en~US~sku/123``, + the ``{product}`` segment must be the unpadded + base64url encoding of this string, which is + ``ZW5-VVN-c2t1LzEyMw``. The full resource name for the + product would be + ``accounts/123/products/ZW5-VVN-c2t1LzEyMw``. + + 2. **Plain Format**: The ``{product}`` segment is the + tilde-separated string + ``content_language~feed_label~offer_id``. This format is + suitable only when ``content_language``, ``feed_label``, + and ``offer_id`` do not contain URL-problematic + characters like ``/``, ``%``, or ``~``. + + We recommend using the **Encoded Format** for all product + IDs to ensure correct parsing, especially those containing + special characters. The presence of tilde (``~``) characters + in the ``{product}`` segment is used to differentiate + between the two formats. local_inventory (google.shopping.merchant_inventories_v1.types.LocalInventory): Required. Local inventory information of the product. If the product already has a ``LocalInventory`` resource for the @@ -186,6 +324,46 @@ class DeleteLocalInventoryRequest(proto.Message): Required. The name of the local inventory for the given product to delete. Format: ``accounts/{account}/products/{product}/localInventories/{store_code}`` + + The ``{product}`` segment is a unique identifier for the + product. This identifier must be unique within a merchant + account and generally follows the structure: + ``content_language~feed_label~offer_id``. Example: + ``en~US~sku123`` For legacy local products, the structure + is: ``local~content_language~feed_label~offer_id``. Example: + ``local~en~US~sku123`` + + The format of the ``{product}`` segment in the URL is + automatically detected by the server, supporting two + options: + + 1. **Encoded Format**: The ``{product}`` segment is an + unpadded base64url encoded string (RFC 4648 Section 5). + The decoded string must result in the + ``content_language~feed_label~offer_id`` structure. This + encoding MUST be used if any part of the product + identifier (like ``offer_id``) contains characters such + as ``/``, ``%``, or ``~``. + + - Example: To represent the product ID ``en~US~sku/123`` + for ``store_code`` "store123", the ``{product}`` + segment must be the unpadded base64url encoding of this + string, which is ``ZW5-VVN-c2t1LzEyMw``. The full + resource name for the local inventory would be + ``accounts/123/products/ZW5-VVN-c2t1LzEyMw/localInventories/store123``. + + 2. **Plain Format**: The ``{product}`` segment is the + tilde-separated string + ``content_language~feed_label~offer_id``. This format is + suitable only when ``content_language``, ``feed_label``, + and ``offer_id`` do not contain URL-problematic + characters like ``/``, ``%``, or ``~``. + + We recommend using the **Encoded Format** for all product + IDs to ensure correct parsing, especially those containing + special characters. The presence of tilde (``~``) characters + in the ``{product}`` segment is used to differentiate + between the two formats. """ name: str = proto.Field( diff --git a/packages/google-shopping-merchant-inventories/google/shopping/merchant_inventories_v1/types/regionalinventory.py b/packages/google-shopping-merchant-inventories/google/shopping/merchant_inventories_v1/types/regionalinventory.py index ca1738897e4b..e79b9741fd22 100644 --- a/packages/google-shopping-merchant-inventories/google/shopping/merchant_inventories_v1/types/regionalinventory.py +++ b/packages/google-shopping-merchant-inventories/google/shopping/merchant_inventories_v1/types/regionalinventory.py @@ -46,7 +46,61 @@ class RegionalInventory(proto.Message): name (str): Output only. The name of the ``RegionalInventory`` resource. Format: - ``{regional_inventory.name=accounts/{account}/products/{product}/regionalInventories/{region}`` + ``accounts/{account}/products/{product}/regionalInventories/{region}`` + + The ``{product}`` segment is a unique identifier for the + product. This identifier must be unique within a merchant + account and generally follows the structure: + ``content_language~feed_label~offer_id``. Example: + ``en~US~sku123`` For legacy local products, the structure + is: ``local~content_language~feed_label~offer_id``. Example: + ``local~en~US~sku123`` + + The format of the ``{product}`` segment in the URL is + automatically detected by the server, supporting two + options: + + 1. **Encoded Format**: The ``{product}`` segment is an + **unpadded base64url** encoded string (RFC 4648 Section + 5). The decoded string must result in the + ``content_language~feed_label~offer_id`` structure. This + encoding MUST be used if any part of the product + identifier (like ``offer_id``) contains characters such + as ``/``, ``%``, or ``~``. + + - Example: To represent the product ID ``en~US~sku/123`` + for ``region`` "region123", the ``{product}`` segment + must be the unpadded base64url encoding of this string, + which is ``ZW5-VVN-c2t1LzEyMw``. The full resource name + for the regional inventory would be + ``accounts/123/products/ZW5-VVN-c2t1LzEyMw/regionalInventories/region123``. + + 2. **Plain Format**: The ``{product}`` segment is the + tilde-separated string + ``content_language~feed_label~offer_id``. This format is + suitable only when ``content_language``, ``feed_label``, + and ``offer_id`` do not contain URL-problematic + characters like ``/``, ``%``, or ``~``. + + We recommend using the **Encoded Format** for all product + IDs to ensure correct parsing, especially those containing + special characters. The presence of tilde (``~``) characters + in the ``{product}`` segment is used to differentiate + between the two formats. + base64_encoded_name (str): + Output only. The unpadded base64url encoded name of the + ``RegionalInventory`` resource. Format: + ``accounts/{account}/products/{product}/regionalInventories/{region}`` + where the ``{product}`` segment is the unpadded base64url + encoded value of the identifier of the form + ``content_language~feed_label~offer_id``. Example: + ``accounts/123/products/ZW5-VVN-c2t1LzEyMw/regionalInventories/region123`` + for the decoded product ID ``en~US~sku/123`` and ``region`` + "region123". Can be used directly as input to the API + methods that require the product identifier within the + regional inventory name to be encoded if it contains special + characters, for example + ```GetRegionalInventory`` `__. account (int): Output only. The account that owns the product. This field will be ignored if set by @@ -66,6 +120,10 @@ class RegionalInventory(proto.Message): proto.STRING, number=1, ) + base64_encoded_name: str = proto.Field( + proto.STRING, + number=10, + ) account: int = proto.Field( proto.INT64, number=2, @@ -91,6 +149,46 @@ class ListRegionalInventoriesRequest(proto.Message): Required. The ``name`` of the parent product to list ``RegionalInventory`` resources for. Format: ``accounts/{account}/products/{product}`` + + The ``{product}`` segment is a unique identifier for the + product. This identifier must be unique within a merchant + account and generally follows the structure: + ``content_language~feed_label~offer_id``. Example: + ``en~US~sku123`` For legacy local products, the structure + is: ``local~content_language~feed_label~offer_id``. Example: + ``local~en~US~sku123`` + + The format of the ``{product}`` segment in the URL is + automatically detected by the server, supporting two + options: + + 1. **Encoded Format**: The ``{product}`` segment is an + **unpadded base64url** encoded string (RFC 4648 Section + 5). The decoded string must result in the + ``content_language~feed_label~offer_id`` structure. This + encoding MUST be used if any part of the product + identifier (like ``offer_id``) contains characters such + as ``/``, ``%``, or ``~``. + + - Example: To represent the product ID ``en~US~sku/123``, + the ``{product}`` segment must be the unpadded + base64url encoding of this string, which is + ``ZW5-VVN-c2t1LzEyMw``. The full resource name for the + product would be + ``accounts/123/products/ZW5-VVN-c2t1LzEyMw``. + + 2. **Plain Format**: The ``{product}`` segment is the + tilde-separated string + ``content_language~feed_label~offer_id``. This format is + suitable only when ``content_language``, ``feed_label``, + and ``offer_id`` do not contain URL-problematic + characters like ``/``, ``%``, or ``~``. + + We recommend using the **Encoded Format** for all product + IDs to ensure correct parsing, especially those containing + special characters. The presence of tilde (``~``) characters + in the ``{product}`` segment is used to differentiate + between the two formats. page_size (int): The maximum number of ``RegionalInventory`` resources for the given product to return. The service returns fewer than @@ -161,6 +259,46 @@ class InsertRegionalInventoryRequest(proto.Message): Required. The account and product where this inventory will be inserted. Format: ``accounts/{account}/products/{product}`` + + The ``{product}`` segment is a unique identifier for the + product. This identifier must be unique within a merchant + account and generally follows the structure: + ``content_language~feed_label~offer_id``. Example: + ``en~US~sku123`` For legacy local products, the structure + is: ``local~content_language~feed_label~offer_id``. Example: + ``local~en~US~sku123`` + + The format of the ``{product}`` segment in the URL is + automatically detected by the server, supporting two + options: + + 1. **Encoded Format**: The ``{product}`` segment is an + **unpadded base64url** encoded string (RFC 4648 Section + 5). The decoded string must result in the + ``content_language~feed_label~offer_id`` structure. This + encoding MUST be used if any part of the product + identifier (like ``offer_id``) contains characters such + as ``/``, ``%``, or ``~``. + + - Example: To represent the product ID ``en~US~sku/123``, + the ``{product}`` segment must be the unpadded + base64url encoding of this string, which is + ``ZW5-VVN-c2t1LzEyMw``. The full resource name for the + product would be + ``accounts/123/products/ZW5-VVN-c2t1LzEyMw``. + + 2. **Plain Format**: The ``{product}`` segment is the + tilde-separated string + ``content_language~feed_label~offer_id``. This format is + suitable only when ``content_language``, ``feed_label``, + and ``offer_id`` do not contain URL-problematic + characters like ``/``, ``%``, or ``~``. + + We recommend using the **Encoded Format** for all product + IDs to ensure correct parsing, especially those containing + special characters. The presence of tilde (``~``) characters + in the ``{product}`` segment is used to differentiate + between the two formats. regional_inventory (google.shopping.merchant_inventories_v1.types.RegionalInventory): Required. Regional inventory information to add to the product. If the product already has a ``RegionalInventory`` @@ -187,6 +325,46 @@ class DeleteRegionalInventoryRequest(proto.Message): Required. The name of the ``RegionalInventory`` resource to delete. Format: ``accounts/{account}/products/{product}/regionalInventories/{region}`` + + The ``{product}`` segment is a unique identifier for the + product. This identifier must be unique within a merchant + account and generally follows the structure: + ``content_language~feed_label~offer_id``. Example: + ``en~US~sku123`` For legacy local products, the structure + is: ``local~content_language~feed_label~offer_id``. Example: + ``local~en~US~sku123`` + + The format of the ``{product}`` segment in the URL is + automatically detected by the server, supporting two + options: + + 1. **Encoded Format**: The ``{product}`` segment is an + **unpadded base64url** encoded string (RFC 4648 Section + 5). The decoded string must result in the + ``content_language~feed_label~offer_id`` structure. This + encoding MUST be used if any part of the product + identifier (like ``offer_id``) contains characters such + as ``/``, ``%``, or ``~``. + + - Example: To represent the product ID ``en~US~sku/123`` + for ``region`` "region123", the ``{product}`` segment + must be the unpadded base64url encoding of this string, + which is ``ZW5-VVN-c2t1LzEyMw``. The full resource name + for the regional inventory would be + ``accounts/123/products/ZW5-VVN-c2t1LzEyMw/regionalInventories/region123``. + + 2. **Plain Format**: The ``{product}`` segment is the + tilde-separated string + ``content_language~feed_label~offer_id``. This format is + suitable only when ``content_language``, ``feed_label``, + and ``offer_id`` do not contain URL-problematic + characters like ``/``, ``%``, or ``~``. + + We recommend using the **Encoded Format** for all product + IDs to ensure correct parsing, especially those containing + special characters. The presence of tilde (``~``) characters + in the ``{product}`` segment is used to differentiate + between the two formats. """ name: str = proto.Field( diff --git a/packages/google-shopping-merchant-inventories/tests/unit/gapic/merchant_inventories_v1/test_local_inventory_service.py b/packages/google-shopping-merchant-inventories/tests/unit/gapic/merchant_inventories_v1/test_local_inventory_service.py index 4b648a72734c..d5a6dfcdff37 100644 --- a/packages/google-shopping-merchant-inventories/tests/unit/gapic/merchant_inventories_v1/test_local_inventory_service.py +++ b/packages/google-shopping-merchant-inventories/tests/unit/gapic/merchant_inventories_v1/test_local_inventory_service.py @@ -1952,6 +1952,7 @@ def test_insert_local_inventory(request_type, transport: str = "grpc"): # Designate an appropriate return value for the call. call.return_value = localinventory.LocalInventory( name="name_value", + base64_encoded_name="base64_encoded_name_value", account=749, store_code="store_code_value", ) @@ -1966,6 +1967,7 @@ def test_insert_local_inventory(request_type, transport: str = "grpc"): # Establish that the response is the type that we expect. assert isinstance(response, localinventory.LocalInventory) assert response.name == "name_value" + assert response.base64_encoded_name == "base64_encoded_name_value" assert response.account == 749 assert response.store_code == "store_code_value" @@ -2104,6 +2106,7 @@ async def test_insert_local_inventory_async( call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( localinventory.LocalInventory( name="name_value", + base64_encoded_name="base64_encoded_name_value", account=749, store_code="store_code_value", ) @@ -2119,6 +2122,7 @@ async def test_insert_local_inventory_async( # Establish that the response is the type that we expect. assert isinstance(response, localinventory.LocalInventory) assert response.name == "name_value" + assert response.base64_encoded_name == "base64_encoded_name_value" assert response.account == 749 assert response.store_code == "store_code_value" @@ -3334,6 +3338,7 @@ async def test_insert_local_inventory_empty_call_grpc_asyncio(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( localinventory.LocalInventory( name="name_value", + base64_encoded_name="base64_encoded_name_value", account=749, store_code="store_code_value", ) @@ -3560,6 +3565,7 @@ def test_insert_local_inventory_rest_call_success(request_type): request_init = {"parent": "accounts/sample1/products/sample2"} request_init["local_inventory"] = { "name": "name_value", + "base64_encoded_name": "base64_encoded_name_value", "account": 749, "store_code": "store_code_value", "local_inventory_attributes": { @@ -3574,6 +3580,17 @@ def test_insert_local_inventory_rest_call_success(request_type): "pickup_method": 1, "pickup_sla": 1, "instore_product_location": "instore_product_location_value", + "loyalty_programs": [ + { + "program_label": "program_label_value", + "tier_label": "tier_label_value", + "price": {}, + "cashback_for_future_use": {}, + "loyalty_points": 1546, + "member_price_effective_interval": {}, + "shipping_label": "shipping_label_value", + } + ], }, } # The version of a generated dependency at test runtime may differ from the version used during generation. @@ -3652,6 +3669,7 @@ def get_message_fields(field): # Designate an appropriate value for the returned response. return_value = localinventory.LocalInventory( name="name_value", + base64_encoded_name="base64_encoded_name_value", account=749, store_code="store_code_value", ) @@ -3671,6 +3689,7 @@ def get_message_fields(field): # Establish that the response is the type that we expect. assert isinstance(response, localinventory.LocalInventory) assert response.name == "name_value" + assert response.base64_encoded_name == "base64_encoded_name_value" assert response.account == 749 assert response.store_code == "store_code_value" diff --git a/packages/google-shopping-merchant-inventories/tests/unit/gapic/merchant_inventories_v1/test_regional_inventory_service.py b/packages/google-shopping-merchant-inventories/tests/unit/gapic/merchant_inventories_v1/test_regional_inventory_service.py index e3bfdc69b556..3ea0532d9070 100644 --- a/packages/google-shopping-merchant-inventories/tests/unit/gapic/merchant_inventories_v1/test_regional_inventory_service.py +++ b/packages/google-shopping-merchant-inventories/tests/unit/gapic/merchant_inventories_v1/test_regional_inventory_service.py @@ -1966,6 +1966,7 @@ def test_insert_regional_inventory(request_type, transport: str = "grpc"): # Designate an appropriate return value for the call. call.return_value = regionalinventory.RegionalInventory( name="name_value", + base64_encoded_name="base64_encoded_name_value", account=749, region="region_value", ) @@ -1980,6 +1981,7 @@ def test_insert_regional_inventory(request_type, transport: str = "grpc"): # Establish that the response is the type that we expect. assert isinstance(response, regionalinventory.RegionalInventory) assert response.name == "name_value" + assert response.base64_encoded_name == "base64_encoded_name_value" assert response.account == 749 assert response.region == "region_value" @@ -2118,6 +2120,7 @@ async def test_insert_regional_inventory_async( call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( regionalinventory.RegionalInventory( name="name_value", + base64_encoded_name="base64_encoded_name_value", account=749, region="region_value", ) @@ -2133,6 +2136,7 @@ async def test_insert_regional_inventory_async( # Establish that the response is the type that we expect. assert isinstance(response, regionalinventory.RegionalInventory) assert response.name == "name_value" + assert response.base64_encoded_name == "base64_encoded_name_value" assert response.account == 749 assert response.region == "region_value" @@ -3353,6 +3357,7 @@ async def test_insert_regional_inventory_empty_call_grpc_asyncio(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( regionalinventory.RegionalInventory( name="name_value", + base64_encoded_name="base64_encoded_name_value", account=749, region="region_value", ) @@ -3581,6 +3586,7 @@ def test_insert_regional_inventory_rest_call_success(request_type): request_init = {"parent": "accounts/sample1/products/sample2"} request_init["regional_inventory"] = { "name": "name_value", + "base64_encoded_name": "base64_encoded_name_value", "account": 749, "region": "region_value", "regional_inventory_attributes": { @@ -3591,6 +3597,17 @@ def test_insert_regional_inventory_rest_call_success(request_type): "end_time": {}, }, "availability": 1, + "loyalty_programs": [ + { + "program_label": "program_label_value", + "tier_label": "tier_label_value", + "price": {}, + "cashback_for_future_use": {}, + "loyalty_points": 1546, + "member_price_effective_interval": {}, + "shipping_label": "shipping_label_value", + } + ], }, } # The version of a generated dependency at test runtime may differ from the version used during generation. @@ -3669,6 +3686,7 @@ def get_message_fields(field): # Designate an appropriate value for the returned response. return_value = regionalinventory.RegionalInventory( name="name_value", + base64_encoded_name="base64_encoded_name_value", account=749, region="region_value", ) @@ -3688,6 +3706,7 @@ def get_message_fields(field): # Establish that the response is the type that we expect. assert isinstance(response, regionalinventory.RegionalInventory) assert response.name == "name_value" + assert response.base64_encoded_name == "base64_encoded_name_value" assert response.account == 749 assert response.region == "region_value" diff --git a/packages/google-shopping-merchant-products/google/shopping/merchant_products_v1/services/product_inputs_service/async_client.py b/packages/google-shopping-merchant-products/google/shopping/merchant_products_v1/services/product_inputs_service/async_client.py index 6d98adc8d4e0..46757de6eccc 100644 --- a/packages/google-shopping-merchant-products/google/shopping/merchant_products_v1/services/product_inputs_service/async_client.py +++ b/packages/google-shopping-merchant-products/google/shopping/merchant_products_v1/services/product_inputs_service/async_client.py @@ -703,10 +703,10 @@ async def sample_delete_product_input(): - Example: To represent the product ID ``en~US~sku/123``, the ``{productInput}`` segment - must be the base64url encoding of this string, - which is ``ZW5-VVMtc2t1LzEyMw``. The full resource - name for the product would be - ``accounts/123/productInputs/ZW5-VVMtc2t1LzEyMw``. + must be the unpadded base64url encoding of this + string, which is ``ZW5-VVN-c2t1LzEyMw``. The full + resource name for the product would be + ``accounts/123/productInputs/ZW5-VVN-c2t1LzEyMw``. 2. **Plain Format**: The ``{productInput}`` segment is the tilde-separated string @@ -722,11 +722,6 @@ async def sample_delete_product_input(): (``~``) characters in the ``{productInput}`` segment is used to differentiate between the two formats. - Note: For calls to the v1beta version, the plain format - is ``channel~content_language~feed_label~offer_id``, for - example: - ``accounts/123/productinputs/online~en~US~sku123``. - This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. diff --git a/packages/google-shopping-merchant-products/google/shopping/merchant_products_v1/services/product_inputs_service/client.py b/packages/google-shopping-merchant-products/google/shopping/merchant_products_v1/services/product_inputs_service/client.py index 48f18f21d724..5b593b45f02f 100644 --- a/packages/google-shopping-merchant-products/google/shopping/merchant_products_v1/services/product_inputs_service/client.py +++ b/packages/google-shopping-merchant-products/google/shopping/merchant_products_v1/services/product_inputs_service/client.py @@ -1141,10 +1141,10 @@ def sample_delete_product_input(): - Example: To represent the product ID ``en~US~sku/123``, the ``{productInput}`` segment - must be the base64url encoding of this string, - which is ``ZW5-VVMtc2t1LzEyMw``. The full resource - name for the product would be - ``accounts/123/productInputs/ZW5-VVMtc2t1LzEyMw``. + must be the unpadded base64url encoding of this + string, which is ``ZW5-VVN-c2t1LzEyMw``. The full + resource name for the product would be + ``accounts/123/productInputs/ZW5-VVN-c2t1LzEyMw``. 2. **Plain Format**: The ``{productInput}`` segment is the tilde-separated string @@ -1160,11 +1160,6 @@ def sample_delete_product_input(): (``~``) characters in the ``{productInput}`` segment is used to differentiate between the two formats. - Note: For calls to the v1beta version, the plain format - is ``channel~content_language~feed_label~offer_id``, for - example: - ``accounts/123/productinputs/online~en~US~sku123``. - This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. diff --git a/packages/google-shopping-merchant-products/google/shopping/merchant_products_v1/services/products_service/async_client.py b/packages/google-shopping-merchant-products/google/shopping/merchant_products_v1/services/products_service/async_client.py index e1f9bd680dc4..76d494c302f2 100644 --- a/packages/google-shopping-merchant-products/google/shopping/merchant_products_v1/services/products_service/async_client.py +++ b/packages/google-shopping-merchant-products/google/shopping/merchant_products_v1/services/products_service/async_client.py @@ -369,10 +369,10 @@ async def sample_get_product(): - Example: To represent the product ID ``en~US~sku/123``, the ``{product}`` segment must - be the base64url encoding of this string, which is - ``ZW5-VVMtc2t1LzEyMw``. The full resource name for - the product would be - ``accounts/123/products/ZW5-VVMtc2t1LzEyMw``. + be the unpadded base64url encoding of this string, + which is ``ZW5-VVN-c2t1LzEyMw``. The full resource + name for the product would be + ``accounts/123/products/ZW5-VVN-c2t1LzEyMw``. 2. **Plain Format**: The ``{product}`` segment is the tilde-separated string diff --git a/packages/google-shopping-merchant-products/google/shopping/merchant_products_v1/services/products_service/client.py b/packages/google-shopping-merchant-products/google/shopping/merchant_products_v1/services/products_service/client.py index 07bd1ce8e9b6..7efde60db944 100644 --- a/packages/google-shopping-merchant-products/google/shopping/merchant_products_v1/services/products_service/client.py +++ b/packages/google-shopping-merchant-products/google/shopping/merchant_products_v1/services/products_service/client.py @@ -798,10 +798,10 @@ def sample_get_product(): - Example: To represent the product ID ``en~US~sku/123``, the ``{product}`` segment must - be the base64url encoding of this string, which is - ``ZW5-VVMtc2t1LzEyMw``. The full resource name for - the product would be - ``accounts/123/products/ZW5-VVMtc2t1LzEyMw``. + be the unpadded base64url encoding of this string, + which is ``ZW5-VVN-c2t1LzEyMw``. The full resource + name for the product would be + ``accounts/123/products/ZW5-VVN-c2t1LzEyMw``. 2. **Plain Format**: The ``{product}`` segment is the tilde-separated string diff --git a/packages/google-shopping-merchant-products/google/shopping/merchant_products_v1/types/productinputs.py b/packages/google-shopping-merchant-products/google/shopping/merchant_products_v1/types/productinputs.py index 4bf49cd82d83..551fb0d6501e 100644 --- a/packages/google-shopping-merchant-products/google/shopping/merchant_products_v1/types/productinputs.py +++ b/packages/google-shopping-merchant-products/google/shopping/merchant_products_v1/types/productinputs.py @@ -94,11 +94,11 @@ class ProductInput(proto.Message): as ``/``, ``%``, or ``~``. - Example: To represent the product ID ``en~US~sku/123``, - the ``{productinput}`` segment must be the base64url - encoding of this string, which is - ``ZW5-VVMtc2t1LzEyMw``. The full resource name for the + the ``{productinput}`` segment must be the unpadded + base64url encoding of this string, which is + ``ZW5-VVN-c2t1LzEyMw``. The full resource name for the product would be - ``accounts/123/productinputs/ZW5-VVMtc2t1LzEyMw``. + ``accounts/123/productInputs/ZW5-VVN-c2t1LzEyMw``. 2. **Plain Format**: The ``{productinput}`` segment is the tilde-separated string @@ -112,13 +112,35 @@ class ProductInput(proto.Message): special characters. The presence of tilde (``~``) characters in the ``{productinput}`` segment is used to differentiate between the two formats. - - Note: For calls to the v1beta version, the plain format is - ``channel~content_language~feed_label~offer_id``, for - example: ``accounts/123/productinputs/online~en~US~sku123``. + base64_encoded_name (str): + Output only. The **unpadded base64url encoded name** of the + product input. Format: + ``accounts/{account}/productInputs/{productinput}`` where + the last section ``productinput`` is the unpadded base64url + encoding of the ``content_language~feed_label~offer_id`` + name. Example: + ``accounts/123/productInputs/ZW5-VVN-c2t1LzEyMw`` for the + decoded product input name + ``accounts/123/productInputs/en~US~sku/123``. This field can + be used directly as input to the API methods that require + the product input name to be encoded if it contains special + characters, for example + ```GetProductInput`` `__. product (str): Output only. The name of the processed product. Format: ``accounts/{account}/products/{product}`` + base64_encoded_product (str): + Output only. The **unpadded base64url encoded name** of the + processed product. Format: + ``accounts/{account}/products/{product}`` where the last + section ``product`` is the unpadded base64url encoding of + the ``content_language~feed_label~offer_id`` name. Example: + ``accounts/123/products/ZW5-VVN-c2t1LzEyMw`` for the decoded + product name ``accounts/123/products/en~US~sku/123``. This + field can be used directly as input to the API methods that + require the product name to be encoded if it contains + special characters, for example + ```GetProduct`` `__. legacy_local (bool): Immutable. Determines whether the product is **only** targeting local destinations and whether the product name @@ -186,10 +208,18 @@ class ProductInput(proto.Message): proto.STRING, number=1, ) + base64_encoded_name: str = proto.Field( + proto.STRING, + number=12, + ) product: str = proto.Field( proto.STRING, number=2, ) + base64_encoded_product: str = proto.Field( + proto.STRING, + number=13, + ) legacy_local: bool = proto.Field( proto.BOOL, number=10, @@ -347,11 +377,11 @@ class DeleteProductInputRequest(proto.Message): as ``/``, ``%``, or ``~``. - Example: To represent the product ID ``en~US~sku/123``, - the ``{productInput}`` segment must be the base64url - encoding of this string, which is - ``ZW5-VVMtc2t1LzEyMw``. The full resource name for the + the ``{productInput}`` segment must be the unpadded + base64url encoding of this string, which is + ``ZW5-VVN-c2t1LzEyMw``. The full resource name for the product would be - ``accounts/123/productInputs/ZW5-VVMtc2t1LzEyMw``. + ``accounts/123/productInputs/ZW5-VVN-c2t1LzEyMw``. 2. **Plain Format**: The ``{productInput}`` segment is the tilde-separated string @@ -365,10 +395,6 @@ class DeleteProductInputRequest(proto.Message): special characters. The presence of tilde (``~``) characters in the ``{productInput}`` segment is used to differentiate between the two formats. - - Note: For calls to the v1beta version, the plain format is - ``channel~content_language~feed_label~offer_id``, for - example: ``accounts/123/productinputs/online~en~US~sku123``. data_source (str): Required. The primary or supplemental data source from which the product input should be deleted. Format: diff --git a/packages/google-shopping-merchant-products/google/shopping/merchant_products_v1/types/products.py b/packages/google-shopping-merchant-products/google/shopping/merchant_products_v1/types/products.py index 253c263348ec..974e462706d5 100644 --- a/packages/google-shopping-merchant-products/google/shopping/merchant_products_v1/types/products.py +++ b/packages/google-shopping-merchant-products/google/shopping/merchant_products_v1/types/products.py @@ -64,6 +64,18 @@ class Product(proto.Message): consists of: ``channel~content_language~feed_label~offer_id``, for example: ``accounts/123/products/online~en~US~sku123``. + base64_encoded_name (str): + Output only. The **unpadded base64url encoded name** of the + product. Format: ``accounts/{account}/products/{product}`` + where the last section ``product`` is the unpadded base64url + encoding of the ``content_language~feed_label~offer_id`` + name. Example: ``accounts/123/products/ZW5-VVN-c2t1LzEyMw`` + for the decoded product name + ``accounts/123/products/en~US~sku/123``. This field can be + used directly as input to the API methods that require the + product name to be encoded if it contains special + characters, for example + ```GetProduct`` `__. legacy_local (bool): Output only. Determines whether the product is **only** targeting local destinations and whether the product name @@ -131,6 +143,10 @@ class Product(proto.Message): proto.STRING, number=1, ) + base64_encoded_name: str = proto.Field( + proto.STRING, + number=15, + ) legacy_local: bool = proto.Field( proto.BOOL, number=11, @@ -207,11 +223,11 @@ class GetProductRequest(proto.Message): as ``/``, ``%``, or ``~``. - Example: To represent the product ID ``en~US~sku/123``, - the ``{product}`` segment must be the base64url - encoding of this string, which is - ``ZW5-VVMtc2t1LzEyMw``. The full resource name for the + the ``{product}`` segment must be the unpadded + base64url encoding of this string, which is + ``ZW5-VVN-c2t1LzEyMw``. The full resource name for the product would be - ``accounts/123/products/ZW5-VVMtc2t1LzEyMw``. + ``accounts/123/products/ZW5-VVN-c2t1LzEyMw``. 2. **Plain Format**: The ``{product}`` segment is the tilde-separated string diff --git a/packages/google-shopping-merchant-products/google/shopping/merchant_products_v1/types/products_common.py b/packages/google-shopping-merchant-products/google/shopping/merchant_products_v1/types/products_common.py index e01273d99396..a2b36a0bae1f 100644 --- a/packages/google-shopping-merchant-products/google/shopping/merchant_products_v1/types/products_common.py +++ b/packages/google-shopping-merchant-products/google/shopping/merchant_products_v1/types/products_common.py @@ -749,7 +749,7 @@ class ProductAttributes(proto.Message): This field is a member of `oneof`_ ``_google_product_category``. gtins (MutableSequence[str]): Global Trade Item Numbers - (`GTIN `__) + (`GTIN `__) of the item. You can provide up to 10 GTINs. item_group_id (str): Shared identifier for all variants of the @@ -765,7 +765,7 @@ class ProductAttributes(proto.Message): This field is a member of `oneof`_ ``_material``. mpn (str): Manufacturer Part Number - (`MPN `__) + (`MPN `__) of the item. This field is a member of `oneof`_ ``_mpn``. @@ -862,10 +862,19 @@ class ProductAttributes(proto.Message): handling_cutoff_times (MutableSequence[google.shopping.merchant_products_v1.types.HandlingCutoffTime]): The handling cutoff times for shipping. shipping_label (str): - The shipping label of the product, used to - group product in account-level shipping rules. + The shipping label of the product, used to group products in + account-level shipping rules. Max. 100 characters. For more + information, see `Shipping + label `__. This field is a member of `oneof`_ ``_shipping_label``. + return_policy_label (str): + The return label of the product, used to group products in + account-level return policies. Max. 100 characters. For more + information, see `Return policy + label `__. + + This field is a member of `oneof`_ ``_return_policy_label``. transit_time_label (str): The transit time label of the product, used to group product in account-level transit time @@ -1107,6 +1116,12 @@ class `__ sustainability_incentives (MutableSequence[google.shopping.merchant_products_v1.types.ProductSustainabilityIncentive]): The list of sustainability incentive programs. + video_links (MutableSequence[str]): + Optional. A list of video URLs for the item. Use this + attribute to provide more visuals for your product beyond + your image attributes. See the `Help Center + article `__ + for more information. """ class CarrierPriceOption(proto.Enum): @@ -1826,6 +1841,11 @@ class CarrierShipping(proto.Message): number=46, optional=True, ) + return_policy_label: str = proto.Field( + proto.STRING, + number=170, + optional=True, + ) transit_time_label: str = proto.Field( proto.STRING, number=47, @@ -2057,6 +2077,10 @@ class CarrierShipping(proto.Message): message="ProductSustainabilityIncentive", ) ) + video_links: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=169, + ) class ShippingWeight(proto.Message): @@ -2195,6 +2219,10 @@ class ProductInstallment(proto.Message): Type of installment payments. This field is a member of `oneof`_ ``_credit_type``. + annual_percentage_rate (float): + Optional. Annual percentage rate for ``credit_type`` finance + + This field is a member of `oneof`_ ``_annual_percentage_rate``. """ months: int = proto.Field( @@ -2218,6 +2246,11 @@ class ProductInstallment(proto.Message): optional=True, enum="CreditType", ) + annual_percentage_rate: float = proto.Field( + proto.DOUBLE, + number=5, + optional=True, + ) class LoyaltyPoints(proto.Message): diff --git a/packages/google-shopping-merchant-products/tests/unit/gapic/merchant_products_v1/test_product_inputs_service.py b/packages/google-shopping-merchant-products/tests/unit/gapic/merchant_products_v1/test_product_inputs_service.py index ba0b0cd49c0d..ab72f81f17e3 100644 --- a/packages/google-shopping-merchant-products/tests/unit/gapic/merchant_products_v1/test_product_inputs_service.py +++ b/packages/google-shopping-merchant-products/tests/unit/gapic/merchant_products_v1/test_product_inputs_service.py @@ -1400,7 +1400,9 @@ def test_insert_product_input(request_type, transport: str = "grpc"): # Designate an appropriate return value for the call. call.return_value = productinputs.ProductInput( name="name_value", + base64_encoded_name="base64_encoded_name_value", product="product_value", + base64_encoded_product="base64_encoded_product_value", legacy_local=True, offer_id="offer_id_value", content_language="content_language_value", @@ -1418,7 +1420,9 @@ def test_insert_product_input(request_type, transport: str = "grpc"): # Establish that the response is the type that we expect. assert isinstance(response, productinputs.ProductInput) assert response.name == "name_value" + assert response.base64_encoded_name == "base64_encoded_name_value" assert response.product == "product_value" + assert response.base64_encoded_product == "base64_encoded_product_value" assert response.legacy_local is True assert response.offer_id == "offer_id_value" assert response.content_language == "content_language_value" @@ -1561,7 +1565,9 @@ async def test_insert_product_input_async( call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( productinputs.ProductInput( name="name_value", + base64_encoded_name="base64_encoded_name_value", product="product_value", + base64_encoded_product="base64_encoded_product_value", legacy_local=True, offer_id="offer_id_value", content_language="content_language_value", @@ -1580,7 +1586,9 @@ async def test_insert_product_input_async( # Establish that the response is the type that we expect. assert isinstance(response, productinputs.ProductInput) assert response.name == "name_value" + assert response.base64_encoded_name == "base64_encoded_name_value" assert response.product == "product_value" + assert response.base64_encoded_product == "base64_encoded_product_value" assert response.legacy_local is True assert response.offer_id == "offer_id_value" assert response.content_language == "content_language_value" @@ -1682,7 +1690,9 @@ def test_update_product_input(request_type, transport: str = "grpc"): # Designate an appropriate return value for the call. call.return_value = productinputs.ProductInput( name="name_value", + base64_encoded_name="base64_encoded_name_value", product="product_value", + base64_encoded_product="base64_encoded_product_value", legacy_local=True, offer_id="offer_id_value", content_language="content_language_value", @@ -1700,7 +1710,9 @@ def test_update_product_input(request_type, transport: str = "grpc"): # Establish that the response is the type that we expect. assert isinstance(response, productinputs.ProductInput) assert response.name == "name_value" + assert response.base64_encoded_name == "base64_encoded_name_value" assert response.product == "product_value" + assert response.base64_encoded_product == "base64_encoded_product_value" assert response.legacy_local is True assert response.offer_id == "offer_id_value" assert response.content_language == "content_language_value" @@ -1841,7 +1853,9 @@ async def test_update_product_input_async( call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( productinputs.ProductInput( name="name_value", + base64_encoded_name="base64_encoded_name_value", product="product_value", + base64_encoded_product="base64_encoded_product_value", legacy_local=True, offer_id="offer_id_value", content_language="content_language_value", @@ -1860,7 +1874,9 @@ async def test_update_product_input_async( # Establish that the response is the type that we expect. assert isinstance(response, productinputs.ProductInput) assert response.name == "name_value" + assert response.base64_encoded_name == "base64_encoded_name_value" assert response.product == "product_value" + assert response.base64_encoded_product == "base64_encoded_product_value" assert response.legacy_local is True assert response.offer_id == "offer_id_value" assert response.content_language == "content_language_value" @@ -3136,7 +3152,9 @@ async def test_insert_product_input_empty_call_grpc_asyncio(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( productinputs.ProductInput( name="name_value", + base64_encoded_name="base64_encoded_name_value", product="product_value", + base64_encoded_product="base64_encoded_product_value", legacy_local=True, offer_id="offer_id_value", content_language="content_language_value", @@ -3171,7 +3189,9 @@ async def test_update_product_input_empty_call_grpc_asyncio(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( productinputs.ProductInput( name="name_value", + base64_encoded_name="base64_encoded_name_value", product="product_value", + base64_encoded_product="base64_encoded_product_value", legacy_local=True, offer_id="offer_id_value", content_language="content_language_value", @@ -3263,7 +3283,9 @@ def test_insert_product_input_rest_call_success(request_type): request_init = {"parent": "accounts/sample1"} request_init["product_input"] = { "name": "name_value", + "base64_encoded_name": "base64_encoded_name_value", "product": "product_value", + "base64_encoded_product": "base64_encoded_product_value", "legacy_local": True, "offer_id": "offer_id_value", "content_language": "content_language_value", @@ -3305,6 +3327,7 @@ def test_insert_product_input_rest_call_success(request_type): "amount": {}, "downpayment": {}, "credit_type": 1, + "annual_percentage_rate": 0.2311, }, "subscription_cost": {"period": 1, "period_length": 1380, "amount": {}}, "loyalty_points": { @@ -3387,6 +3410,7 @@ def test_insert_product_input_rest_call_success(request_type): } ], "shipping_label": "shipping_label_value", + "return_policy_label": "return_policy_label_value", "transit_time_label": "transit_time_label_value", "size": "size_value", "size_system": 1, @@ -3472,6 +3496,7 @@ def test_insert_product_input_rest_call_success(request_type): "sustainability_incentives": [ {"amount": {}, "percentage": 0.10540000000000001, "type_": 1} ], + "video_links": ["video_links_value1", "video_links_value2"], }, "custom_attributes": [ {"name": "name_value", "value": "value_value", "group_values": {}} @@ -3551,7 +3576,9 @@ def get_message_fields(field): # Designate an appropriate value for the returned response. return_value = productinputs.ProductInput( name="name_value", + base64_encoded_name="base64_encoded_name_value", product="product_value", + base64_encoded_product="base64_encoded_product_value", legacy_local=True, offer_id="offer_id_value", content_language="content_language_value", @@ -3574,7 +3601,9 @@ def get_message_fields(field): # Establish that the response is the type that we expect. assert isinstance(response, productinputs.ProductInput) assert response.name == "name_value" + assert response.base64_encoded_name == "base64_encoded_name_value" assert response.product == "product_value" + assert response.base64_encoded_product == "base64_encoded_product_value" assert response.legacy_local is True assert response.offer_id == "offer_id_value" assert response.content_language == "content_language_value" @@ -3689,7 +3718,9 @@ def test_update_product_input_rest_call_success(request_type): request_init = {"product_input": {"name": "accounts/sample1/productInputs/sample2"}} request_init["product_input"] = { "name": "accounts/sample1/productInputs/sample2", + "base64_encoded_name": "base64_encoded_name_value", "product": "product_value", + "base64_encoded_product": "base64_encoded_product_value", "legacy_local": True, "offer_id": "offer_id_value", "content_language": "content_language_value", @@ -3731,6 +3762,7 @@ def test_update_product_input_rest_call_success(request_type): "amount": {}, "downpayment": {}, "credit_type": 1, + "annual_percentage_rate": 0.2311, }, "subscription_cost": {"period": 1, "period_length": 1380, "amount": {}}, "loyalty_points": { @@ -3813,6 +3845,7 @@ def test_update_product_input_rest_call_success(request_type): } ], "shipping_label": "shipping_label_value", + "return_policy_label": "return_policy_label_value", "transit_time_label": "transit_time_label_value", "size": "size_value", "size_system": 1, @@ -3898,6 +3931,7 @@ def test_update_product_input_rest_call_success(request_type): "sustainability_incentives": [ {"amount": {}, "percentage": 0.10540000000000001, "type_": 1} ], + "video_links": ["video_links_value1", "video_links_value2"], }, "custom_attributes": [ {"name": "name_value", "value": "value_value", "group_values": {}} @@ -3977,7 +4011,9 @@ def get_message_fields(field): # Designate an appropriate value for the returned response. return_value = productinputs.ProductInput( name="name_value", + base64_encoded_name="base64_encoded_name_value", product="product_value", + base64_encoded_product="base64_encoded_product_value", legacy_local=True, offer_id="offer_id_value", content_language="content_language_value", @@ -4000,7 +4036,9 @@ def get_message_fields(field): # Establish that the response is the type that we expect. assert isinstance(response, productinputs.ProductInput) assert response.name == "name_value" + assert response.base64_encoded_name == "base64_encoded_name_value" assert response.product == "product_value" + assert response.base64_encoded_product == "base64_encoded_product_value" assert response.legacy_local is True assert response.offer_id == "offer_id_value" assert response.content_language == "content_language_value" diff --git a/packages/google-shopping-merchant-products/tests/unit/gapic/merchant_products_v1/test_products_service.py b/packages/google-shopping-merchant-products/tests/unit/gapic/merchant_products_v1/test_products_service.py index 0d1126b5b3fe..d283ce54ce9b 100644 --- a/packages/google-shopping-merchant-products/tests/unit/gapic/merchant_products_v1/test_products_service.py +++ b/packages/google-shopping-merchant-products/tests/unit/gapic/merchant_products_v1/test_products_service.py @@ -1360,6 +1360,7 @@ def test_get_product(request_type, transport: str = "grpc"): # Designate an appropriate return value for the call. call.return_value = products.Product( name="name_value", + base64_encoded_name="base64_encoded_name_value", legacy_local=True, offer_id="offer_id_value", content_language="content_language_value", @@ -1378,6 +1379,7 @@ def test_get_product(request_type, transport: str = "grpc"): # Establish that the response is the type that we expect. assert isinstance(response, products.Product) assert response.name == "name_value" + assert response.base64_encoded_name == "base64_encoded_name_value" assert response.legacy_local is True assert response.offer_id == "offer_id_value" assert response.content_language == "content_language_value" @@ -1510,6 +1512,7 @@ async def test_get_product_async( call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( products.Product( name="name_value", + base64_encoded_name="base64_encoded_name_value", legacy_local=True, offer_id="offer_id_value", content_language="content_language_value", @@ -1529,6 +1532,7 @@ async def test_get_product_async( # Establish that the response is the type that we expect. assert isinstance(response, products.Product) assert response.name == "name_value" + assert response.base64_encoded_name == "base64_encoded_name_value" assert response.legacy_local is True assert response.offer_id == "offer_id_value" assert response.content_language == "content_language_value" @@ -2803,6 +2807,7 @@ async def test_get_product_empty_call_grpc_asyncio(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( products.Product( name="name_value", + base64_encoded_name="base64_encoded_name_value", legacy_local=True, offer_id="offer_id_value", content_language="content_language_value", @@ -2900,6 +2905,7 @@ def test_get_product_rest_call_success(request_type): # Designate an appropriate value for the returned response. return_value = products.Product( name="name_value", + base64_encoded_name="base64_encoded_name_value", legacy_local=True, offer_id="offer_id_value", content_language="content_language_value", @@ -2923,6 +2929,7 @@ def test_get_product_rest_call_success(request_type): # Establish that the response is the type that we expect. assert isinstance(response, products.Product) assert response.name == "name_value" + assert response.base64_encoded_name == "base64_encoded_name_value" assert response.legacy_local is True assert response.offer_id == "offer_id_value" assert response.content_language == "content_language_value"