diff --git a/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/_shared_macros.j2 b/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/_shared_macros.j2 index b055b9ca31..8d6fa50f2b 100644 --- a/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/_shared_macros.j2 +++ b/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/_shared_macros.j2 @@ -227,7 +227,8 @@ def _get_http_options(): request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] {# TODO(https://github.com/googleapis/gapic-generator-python/issues/2137): Remove `type: ignore` once version check is added for google-api-core. #} - raise core_exceptions.format_http_response_error(response, method, request_url, payload) # type: ignore + {# Add `# fmt: skip` to keep `# type: ignore` on the same line #} + raise core_exceptions.format_http_response_error(response, method, request_url, payload) # type: ignore # fmt: skip {% else %} raise core_exceptions.from_http_response(response) {% endif %}{# is_async #} diff --git a/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/client.py.j2 b/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/client.py.j2 index 2b85bd9e76..0a86a8df07 100644 --- a/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/client.py.j2 +++ b/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/client.py.j2 @@ -131,7 +131,8 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): DEFAULT_ENDPOINT = {% if service.host %}"{{ service.host }}"{% else %}None{% endif %} - DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + {# Add `# fmt: skip` to keep `# type: ignore` on the same line #} + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore # fmt: skip DEFAULT_ENDPOINT ) diff --git a/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/rest.py.j2 b/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/rest.py.j2 index a55ced7c08..8648f82901 100644 --- a/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/rest.py.j2 +++ b/gapic/ads-templates/%namespace/%name/%version/%sub/services/%service/transports/rest.py.j2 @@ -273,7 +273,8 @@ class {{service.name}}RestTransport(_Base{{ service.name }}RestTransport): {{method.output.ident}}]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._{{method.name}}(self._session, self._host, self._interceptor) # type: ignore + {# Add `# fmt: skip` to keep `# type: ignore` on the same line #} + return self._{{method.name}}(self._session, self._host, self._interceptor) # type: ignore # fmt: skip {% endfor %} diff --git a/gapic/templates/%namespace/%name_%version/%sub/__init__.py.j2 b/gapic/templates/%namespace/%name_%version/%sub/__init__.py.j2 index 9eb968dd01..7d3e1bda33 100644 --- a/gapic/templates/%namespace/%name_%version/%sub/__init__.py.j2 +++ b/gapic/templates/%namespace/%name_%version/%sub/__init__.py.j2 @@ -10,9 +10,11 @@ import sys __version__ = package_version.__version__ -if sys.version_info >= (3, 8): # pragma: NO COVER +{# Add `# fmt: skip` to keep `# pragma: NO COVER` on the same line #} +if sys.version_info >= (3, 8): # pragma: NO COVER # fmt: skip from importlib import metadata -else: # pragma: NO COVER +{# Add `# fmt: skip` to keep `# pragma: NO COVER` on the same line #} +else: # pragma: NO COVER # fmt: skip # TODO(https://github.com/googleapis/python-api-core/issues/835): Remove # this code path once we drop support for Python 3.7 import importlib_metadata as metadata @@ -46,7 +48,8 @@ from .types.{{ proto.module_name }} import {{ enum.name }} {% endfor %} {% endfor %} -if hasattr(api_core, "check_python_version") and hasattr(api_core, "check_dependency_versions"): # pragma: NO COVER +{# Add `# fmt: skip` to keep `# pragma: NO COVER` on the same line #} +if hasattr(api_core, "check_python_version") and hasattr(api_core, "check_dependency_versions"): # pragma: NO COVER # fmt: skip {# TODO(api_core): remove `type:ignore` below when minimum version of api_core makes the else clause unnecessary. #} api_core.check_python_version("{{package_path}}") # type: ignore api_core.check_dependency_versions("{{package_path}}") # type: ignore diff --git a/gapic/templates/%namespace/%name_%version/%sub/services/%service/_shared_macros.j2 b/gapic/templates/%namespace/%name_%version/%sub/services/%service/_shared_macros.j2 index a4d13c34b9..5a6b09e6bc 100644 --- a/gapic/templates/%namespace/%name_%version/%sub/services/%service/_shared_macros.j2 +++ b/gapic/templates/%namespace/%name_%version/%sub/services/%service/_shared_macros.j2 @@ -104,7 +104,8 @@ except ImportError: # pragma: NO COVER `add_google_api_core_version_header_import` to add the necessary import statements. #} {% if service_version %} - if HAS_GOOGLE_API_CORE_VERSION_HEADER: # pragma: NO COVER + {# Add `# fmt: skip` to keep `# pragma: NO COVER` on the same line #} + if HAS_GOOGLE_API_CORE_VERSION_HEADER: # pragma: NO COVER # fmt: skip metadata = tuple(metadata) + ( version_header.to_api_version_header("{{ service_version }}"), ) @@ -215,7 +216,8 @@ def _get_http_options(): # Jsonify the query params query_params = _Base{{ service_name }}RestTransport._Base{{method_name}}._get_query_params_json(transcoded_request) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + {# Add `# fmt: skip` to keep `# pragma: NO COVER` on the same line #} + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER # fmt: skip request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] try: @@ -257,7 +259,8 @@ def _get_http_options(): request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] {# TODO(https://github.com/googleapis/gapic-generator-python/issues/2137): Remove `type: ignore` once version check is added for google-api-core. #} - raise core_exceptions.format_http_response_error(response, method, request_url, payload) # type: ignore + {# Add `# fmt: skip` to keep `# type: ignore` on the same line #} + raise core_exceptions.format_http_response_error(response, method, request_url, payload) # type: ignore # fmt: skip {% else %} raise core_exceptions.from_http_response(response) {% endif %}{# is_async #} @@ -355,7 +358,8 @@ See related issue: https://github.com/googleapis/gapic-generator-python/issues/2 {% macro wrap_async_method_macro() %} def _wrap_method(self, func, *args, **kwargs): {# TODO: Remove `pragma: NO COVER` once https://github.com/googleapis/python-api-core/pull/688 is merged. #} - if self._wrap_with_kind: # pragma: NO COVER + {# Add `# fmt: skip` to keep `# pragma: NO COVER` on the same line #} + if self._wrap_with_kind: # pragma: NO COVER # fmt: skip kwargs["kind"] = self.kind return gapic_v1.method_async.wrap_method(func, *args, **kwargs) {% endmacro %} @@ -489,7 +493,8 @@ class {{ async_method_name_prefix }}{{ service.name }}RestInterceptor: @property def {{ name|snake_case }}(self): - return self.{{ name|make_private }}(self._session, self._host, self._interceptor) # type: ignore + {# Add `# fmt: skip` to keep `# type: ignore` on the same line #} + return self.{{ name|make_private }}(self._session, self._host, self._interceptor) # type: ignore # fmt: skip class {{ name|make_private }}(_Base{{ service.name }}RestTransport._Base{{name}}, {{ async_method_name_prefix }}{{service.name}}RestStub): def __hash__(self): @@ -535,7 +540,8 @@ class {{ name|make_private }}(_Base{{ service.name }}RestTransport._Base{{name}} resp = {{ sig.response_type }}() resp = json_format.Parse(content, resp) resp = {{ await_prefix }}self._interceptor.post_{{ name|snake_case }}(resp) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + {# Add `# fmt: skip` to keep `# pragma: NO COVER` on the same line #} + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER # fmt: skip try: response_payload = json_format.MessageToJson(resp) except: diff --git a/gapic/templates/%namespace/%name_%version/%sub/services/%service/async_client.py.j2 b/gapic/templates/%namespace/%name_%version/%sub/services/%service/async_client.py.j2 index 689d3e0620..eeeabc3677 100644 --- a/gapic/templates/%namespace/%name_%version/%sub/services/%service/async_client.py.j2 +++ b/gapic/templates/%namespace/%name_%version/%sub/services/%service/async_client.py.j2 @@ -98,7 +98,8 @@ class {{ service.async_client_name }}: Returns: {{ service.async_client_name }}: The constructed client. """ - return {{ service.client_name }}.from_service_account_info.__func__({{ service.async_client_name }}, info, *args, **kwargs) # type: ignore + {# Add `# fmt: skip` to keep `# type: ignore` on the same line #} + return {{ service.client_name }}.from_service_account_info.__func__({{ service.async_client_name }}, info, *args, **kwargs) # type: ignore # fmt: skip @classmethod def from_service_account_file(cls, filename: str, *args, **kwargs): @@ -114,7 +115,8 @@ class {{ service.async_client_name }}: Returns: {{ service.async_client_name }}: The constructed client. """ - return {{ service.client_name }}.from_service_account_file.__func__({{ service.async_client_name }}, filename, *args, **kwargs) # type: ignore + {# Add `# fmt: skip` to keep `# type: ignore` on the same line #} + return {{ service.client_name }}.from_service_account_file.__func__({{ service.async_client_name }}, filename, *args, **kwargs) # type: ignore # fmt: skip from_service_account_json = from_service_account_file @@ -150,7 +152,8 @@ class {{ service.async_client_name }}: Raises: google.auth.exceptions.MutualTLSChannelError: If any errors happen. """ - return {{ service.client_name }}.get_mtls_endpoint_and_cert_source(client_options) # type: ignore + {# Add `# fmt: skip` to keep `# type: ignore` on the same line #} + return {{ service.client_name }}.get_mtls_endpoint_and_cert_source(client_options) # type: ignore # fmt: skip @property def transport(self) -> {{ service.name }}Transport: @@ -252,7 +255,8 @@ class {{ service.async_client_name }}: ) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG): # pragma: NO COVER + {# Add ` # fmt: skip` to keep `# pragma: NO COVER` on the same line #} + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG): # pragma: NO COVER # fmt: skip _LOGGER.debug( "Created client `{{ service.meta.address.proto_package_versioned }}.{{ service.async_client_name }}`.", extra = { @@ -727,7 +731,8 @@ class {{ service.async_client_name }}: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) -if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER +{# Add `# fmt: skip` to keep `# pragma: NO COVER` on the same line #} +if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER # fmt: skip DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ diff --git a/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 b/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 index 0d60f47bb6..8795984c8b 100644 --- a/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 +++ b/gapic/templates/%namespace/%name_%version/%sub/services/%service/client.py.j2 @@ -106,7 +106,8 @@ class {{ service.client_name }}Meta(type): _transport_registry["rest"] = {{ service.name }}RestTransport {# TODO(https://github.com/googleapis/gapic-generator-python/issues/2121): Remove this condition when async rest is GA. #} {% if rest_async_io_enabled %} - if HAS_ASYNC_REST_DEPENDENCIES: # pragma: NO COVER + {# Add `# fmt: skip` to keep `# pragma: NO COVER` on the same line #} + if HAS_ASYNC_REST_DEPENDENCIES: # pragma: NO COVER # fmt: skip _transport_registry["rest_asyncio"] = Async{{ service.name }}RestTransport {% endif %}{# if rest_async_io_enabled #} {% endif %} @@ -127,7 +128,8 @@ class {{ service.client_name }}Meta(type): {# TODO(https://github.com/googleapis/gapic-generator-python/issues/2121): Remove this condition when async rest is GA. #} {% if rest_async_io_enabled %} {# NOTE: `pragma: NO COVER` is needed since the coverage for presubmits isn't combined. #} - if label == "rest_asyncio" and not HAS_ASYNC_REST_DEPENDENCIES: # pragma: NO COVER + {# Add ` # fmt: skip` to keep `# pragma: NO COVER` on the same line #} + if label == "rest_asyncio" and not HAS_ASYNC_REST_DEPENDENCIES: # pragma: NO COVER # fmt: skip raise ASYNC_REST_EXCEPTION {% endif %} if label: @@ -175,7 +177,8 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. DEFAULT_ENDPOINT = {% if service.host %}"{{ service.host }}"{% else %}None{% endif %} - DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + {# Add `# fmt: skip` to keep `# type: ignore` on the same line #} + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore # fmt: skip DEFAULT_ENDPOINT ) @@ -197,7 +200,8 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): GOOGLE_API_USE_CLIENT_CERTIFICATE is set to an unexpected value.) """ # check if google-auth version supports should_use_client_cert for automatic mTLS enablement - if hasattr(mtls, "should_use_client_cert"): # pragma: NO COVER + {# Add `# fmt: skip` to keep `# pragma: NO COVER` on the same line #} + if hasattr(mtls, "should_use_client_cert"): # pragma: NO COVER # fmt: skip return mtls.should_use_client_cert() else: # pragma: NO COVER # if unsupported, fallback to reading from env var @@ -568,7 +572,8 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): # Initialize the universe domain validation. self._is_universe_domain_valid = False - if CLIENT_LOGGING_SUPPORTED: # pragma: NO COVER + {# Add ` # fmt: skip` to keep `# pragma: NO COVER` on the same line #} + if CLIENT_LOGGING_SUPPORTED: # pragma: NO COVER # fmt: skip # Setup logging. client_logging.initialize_logging() @@ -660,7 +665,8 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): ) if "async" not in str(self._transport): - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG): # pragma: NO COVER + {# Add `# fmt: skip` to keep `# pragma: NO COVER` on the same line #} + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG): # pragma: NO COVER # fmt: skip _LOGGER.debug( "Created client `{{ service.meta.address.proto_package_versioned }}.{{ service.client_name }}`.", extra = { @@ -1014,7 +1020,8 @@ class {{ service.client_name }}(metaclass={{ service.client_name }}Meta): DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) -if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER +{# Add ` # fmt: skip` to keep `# pragma: NO COVER` on the same line #} +if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER # fmt: skip DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ __all__ = ( diff --git a/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/__init__.py.j2 b/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/__init__.py.j2 index 9745b08d78..f258c0874e 100644 --- a/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/__init__.py.j2 +++ b/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/__init__.py.j2 @@ -39,7 +39,8 @@ _transport_registry['grpc_asyncio'] = {{ service.name }}GrpcAsyncIOTransport {% if 'rest' in opts.transport %} _transport_registry['rest'] = {{ service.name }}RestTransport {% if rest_async_io_enabled %} -if HAS_REST_ASYNC: # pragma: NO COVER +{# Add ` # fmt: skip` to keep `# pragma: NO COVER` on the same line #} +if HAS_REST_ASYNC: # pragma: NO COVER # fmt: skip _transport_registry['rest_asyncio'] = Async{{ service.name }}RestTransport {% endif %}{# if rest_async_io_enabled #} {% endif %} diff --git a/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/base.py.j2 b/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/base.py.j2 index 08c02448b6..d517f09e13 100644 --- a/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/base.py.j2 +++ b/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/base.py.j2 @@ -52,7 +52,8 @@ from {{ (api.naming.module_namespace + (api.naming.versioned_module_name,) + ser DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) -if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER +{# Add `# fmt: skip` to keep `# type: ignore` on the same line #} +if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER # fmt: skip DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ diff --git a/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc_asyncio.py.j2 b/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc_asyncio.py.j2 index d5f692442f..7d05965fc1 100644 --- a/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc_asyncio.py.j2 +++ b/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/grpc_asyncio.py.j2 @@ -68,7 +68,7 @@ class _LoggingClientAIOInterceptor(grpc.aio.UnaryUnaryClientInterceptor): # pra async def intercept_unary_unary(self, continuation, client_call_details, request): {{ shared_macros.unary_request_interceptor_common(service) }} response = await continuation(client_call_details, request) - if logging_enabled: # pragma: NO COVER + if logging_enabled: response_metadata = await response.trailing_metadata() # Convert gRPC metadata `` to list of tuples metadata = dict([(k, str(v)) for k, v in response_metadata]) if response_metadata else None diff --git a/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 b/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 index 0ab91b2974..8c56fbf369 100644 --- a/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 +++ b/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest.py.j2 @@ -56,7 +56,8 @@ DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( rest_version=f"requests@{requests_version}", ) -if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER +{# Add `# fmt: skip` to keep `# pragma: NO COVER` on the same line #} +if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER # fmt: skip DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ @@ -263,7 +264,8 @@ class {{service.name}}RestTransport(_Base{{ service.name }}RestTransport): resp = self._interceptor.post_{{ method.name|snake_case }}(resp) response_metadata = [(k, str(v)) for k, v in response.headers.items()] resp, _ = self._interceptor.post_{{ method.name|snake_case }}_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + {# Add `# fmt: skip` to keep `# pragma: NO COVER` on the same line #} + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER # fmt: skip {# TODO(https://github.com/googleapis/gapic-generator-python/issues/2389): #} {# Depending how we want to log (a) receiving a streaming response vs (b) exposing the next streamed item to the user, we could possibly want to log something here #} {# (a) should always happen in api-core #} @@ -311,7 +313,8 @@ class {{service.name}}RestTransport(_Base{{ service.name }}RestTransport): {{method.output.ident}}]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._{{method.name}}(self._session, self._host, self._interceptor) # type: ignore + {# Add `# fmt: skip` to keep `# type: ignore` on the same line #} + return self._{{method.name}}(self._session, self._host, self._interceptor) # type: ignore # fmt: skip {% endfor %} diff --git a/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest_asyncio.py.j2 b/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest_asyncio.py.j2 index 10118892d4..1f46ef7560 100644 --- a/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest_asyncio.py.j2 +++ b/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest_asyncio.py.j2 @@ -80,7 +80,8 @@ DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( rest_version=f"google-auth@{google.auth.__version__}", ) -if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER +{# Add `# fmt: skip` to keep `# pragma: NO COVER` on the same line #} +if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER # fmt: skip DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ {{ shared_macros.create_interceptor_class(api, service, method, is_async=True) }} @@ -222,7 +223,8 @@ class Async{{service.name}}RestTransport(_Base{{ service.name }}RestTransport): resp = await self._interceptor.post_{{ method.name|snake_case }}(resp) response_metadata = [(k, str(v)) for k, v in response.headers.items()] resp, _ = await self._interceptor.post_{{ method.name|snake_case }}_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + {# Add `# fmt: skip` to keep `# pragma: NO COVER` on the same line #} + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER # fmt: skip {# TODO(https://github.com/googleapis/gapic-generator-python/issues/2389): #} {# Depending how we want to log (a) receiving a streaming response vs (b) exposing the next streamed item to the user, we could possibly want to log something here #} {# (a) should always happen in api-core #} @@ -318,7 +320,8 @@ class Async{{service.name}}RestTransport(_Base{{ service.name }}RestTransport): def {{method.transport_safe_name|snake_case}}(self) -> Callable[ [{{method.input.ident}}], {{method.output.ident}}]: - return self._{{method.name}}(self._session, self._host, self._interceptor) # type: ignore + {# Add `# fmt: skip` to keep `# type: ignore` on the same line #} + return self._{{method.name}}(self._session, self._host, self._interceptor) # type: ignore # fmt: skip {% endfor %} {% for name, sig in api.mixin_api_signatures.items() %} diff --git a/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest_base.py.j2 b/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest_base.py.j2 index b79785afc5..a14c1508e3 100644 --- a/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest_base.py.j2 +++ b/gapic/templates/%namespace/%name_%version/%sub/services/%service/transports/rest_base.py.j2 @@ -89,7 +89,8 @@ class _Base{{ service.name }}RestTransport({{service.name}}Transport): # Run the base constructor maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) if maybe_url_match is None: - raise ValueError(f"Unexpected hostname structure: {host}") # pragma: NO COVER + {# Add `# fmt: skip` to keep `# pragma: NO COVER` on the same line #} + raise ValueError(f"Unexpected hostname structure: {host}") # pragma: NO COVER # fmt: skip url_match_items = maybe_url_match.groupdict() diff --git a/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 b/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 index c0e92cd9d6..1bcf030065 100644 --- a/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 +++ b/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_%service.py.j2 @@ -112,7 +112,8 @@ CRED_INFO_STRING = json.dumps(CRED_INFO_JSON) async def mock_async_gen(data, chunk_size=1): - for i in range(0, len(data)): # pragma: NO COVER + {# Add `# fmt: skip` to keep `# pragma: NO COVER` on the same line #} + for i in range(0, len(data)): # pragma: NO COVER # fmt: skip chunk = data[i : i + chunk_size] yield chunk.encode("utf-8") diff --git a/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_macros.j2 b/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_macros.j2 index f15326d670..bb2b036070 100644 --- a/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_macros.j2 +++ b/gapic/templates/tests/unit/gapic/%name_%version/%sub/test_macros.j2 @@ -1635,7 +1635,8 @@ def test_unsupported_parameter_rest_asyncio(): pytest.skip("the library must be installed with the `async_rest` extra to test this feature.") options = client_options.ClientOptions(quota_project_id="octopus") {# TODO(https://github.com/googleapis/gapic-generator-python/issues/2137): Remove `type: ignore` once we add a version check for google-api-core. #} - with pytest.raises(core_exceptions.AsyncRestUnsupportedParameterError, match="google.api_core.client_options.ClientOptions.quota_project_id") as exc: # type: ignore + {# Add `# fmt: skip` to keep `# type: ignore` on the same line #} + with pytest.raises(core_exceptions.AsyncRestUnsupportedParameterError, match="google.api_core.client_options.ClientOptions.quota_project_id") as exc: # type: ignore # fmt: skip client = {{ get_client(service, True) }}( credentials={{get_credentials(True)}}, transport="rest_asyncio", @@ -1918,7 +1919,8 @@ def test_initialize_client_w_{{transport_name}}(): if is_field_type_proto_plus_type: message_fields = field.message.meta.fields.values() # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER + {# Add `# fmt: skip` to keep `# pragma: NO COVER` on the same line #} + else: # pragma: NO COVER # fmt: skip message_fields = field.message.DESCRIPTOR.fields return message_fields @@ -1932,7 +1934,8 @@ def test_initialize_client_w_{{transport_name}}(): # For each item in the sample request, create a list of sub fields which are not present at runtime # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["{{ field.name }}"].items(): # pragma: NO COVER + {# Add `# fmt: skip` to keep `# pragma: NO COVER` on the same line #} + for field, value in request_init["{{ field.name }}"].items(): # pragma: NO COVER # fmt: skip result = None is_repeated = False # For repeated fields @@ -1952,7 +1955,8 @@ def test_initialize_client_w_{{transport_name}}(): # Remove fields from the sample request which are not present in the runtime version of the dependency # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + {# Add `# fmt: skip` to keep `# pragma: NO COVER` on the same line #} + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER # fmt: skip field = subfield_to_delete.get("field") field_repeated = subfield_to_delete.get("is_repeated") subfield = subfield_to_delete.get("subfield") diff --git a/tests/integration/goldens/asset/google/cloud/asset_v1/__init__.py b/tests/integration/goldens/asset/google/cloud/asset_v1/__init__.py index 31068ac472..faa93ecf69 100755 --- a/tests/integration/goldens/asset/google/cloud/asset_v1/__init__.py +++ b/tests/integration/goldens/asset/google/cloud/asset_v1/__init__.py @@ -20,9 +20,9 @@ __version__ = package_version.__version__ -if sys.version_info >= (3, 8): # pragma: NO COVER +if sys.version_info >= (3, 8): # pragma: NO COVER # fmt: skip from importlib import metadata -else: # pragma: NO COVER +else: # pragma: NO COVER # fmt: skip # TODO(https://github.com/googleapis/python-api-core/issues/835): Remove # this code path once we drop support for Python 3.7 import importlib_metadata as metadata @@ -109,10 +109,10 @@ from .types.assets import TimeWindow from .types.assets import VersionedResource -if hasattr(api_core, "check_python_version") and hasattr(api_core, "check_dependency_versions"): # pragma: NO COVER +if hasattr(api_core, "check_python_version") and hasattr(api_core, "check_dependency_versions"): # pragma: NO COVER # fmt: skip api_core.check_python_version("google.cloud.asset_v1") # type: ignore api_core.check_dependency_versions("google.cloud.asset_v1") # type: ignore -else: # pragma: NO COVER +else: # pragma: NO COVER # fmt: skip # An older version of api_core is installed which does not define the # functions above. We do equivalent checks manually. try: diff --git a/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/async_client.py b/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/async_client.py index 6ae7827124..942b4c0f4d 100755 --- a/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/async_client.py +++ b/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/async_client.py @@ -106,7 +106,7 @@ def from_service_account_info(cls, info: dict, *args, **kwargs): Returns: AssetServiceAsyncClient: The constructed client. """ - return AssetServiceClient.from_service_account_info.__func__(AssetServiceAsyncClient, info, *args, **kwargs) # type: ignore + return AssetServiceClient.from_service_account_info.__func__(AssetServiceAsyncClient, info, *args, **kwargs) # type: ignore # fmt: skip @classmethod def from_service_account_file(cls, filename: str, *args, **kwargs): @@ -122,7 +122,7 @@ def from_service_account_file(cls, filename: str, *args, **kwargs): Returns: AssetServiceAsyncClient: The constructed client. """ - return AssetServiceClient.from_service_account_file.__func__(AssetServiceAsyncClient, filename, *args, **kwargs) # type: ignore + return AssetServiceClient.from_service_account_file.__func__(AssetServiceAsyncClient, filename, *args, **kwargs) # type: ignore # fmt: skip from_service_account_json = from_service_account_file @@ -158,7 +158,7 @@ def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[ClientOption Raises: google.auth.exceptions.MutualTLSChannelError: If any errors happen. """ - return AssetServiceClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore + return AssetServiceClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore # fmt: skip @property def transport(self) -> AssetServiceTransport: @@ -256,7 +256,7 @@ def __init__(self, *, ) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG): # pragma: NO COVER + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG): # pragma: NO COVER # fmt: skip _LOGGER.debug( "Created client `google.cloud.asset_v1.AssetServiceAsyncClient`.", extra = { @@ -3212,7 +3212,7 @@ async def __aexit__(self, exc_type, exc, tb): DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) -if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER +if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER # fmt: skip DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ diff --git a/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/client.py b/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/client.py index 79fb1058cb..c3dad2f425 100755 --- a/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/client.py +++ b/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/client.py @@ -132,7 +132,7 @@ def _get_default_mtls_endpoint(api_endpoint): # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. DEFAULT_ENDPOINT = "cloudasset.googleapis.com" - DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore # fmt: skip DEFAULT_ENDPOINT ) @@ -153,7 +153,7 @@ def _use_client_cert_effective(): GOOGLE_API_USE_CLIENT_CERTIFICATE is set to an unexpected value.) """ # check if google-auth version supports should_use_client_cert for automatic mTLS enablement - if hasattr(mtls, "should_use_client_cert"): # pragma: NO COVER + if hasattr(mtls, "should_use_client_cert"): # pragma: NO COVER # fmt: skip return mtls.should_use_client_cert() else: # pragma: NO COVER # if unsupported, fallback to reading from env var @@ -624,7 +624,7 @@ def __init__(self, *, # Initialize the universe domain validation. self._is_universe_domain_valid = False - if CLIENT_LOGGING_SUPPORTED: # pragma: NO COVER + if CLIENT_LOGGING_SUPPORTED: # pragma: NO COVER # fmt: skip # Setup logging. client_logging.initialize_logging() @@ -681,7 +681,7 @@ def __init__(self, *, ) if "async" not in str(self._transport): - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG): # pragma: NO COVER + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG): # pragma: NO COVER # fmt: skip _LOGGER.debug( "Created client `google.cloud.asset_v1.AssetServiceClient`.", extra = { @@ -3640,7 +3640,7 @@ def get_operation( DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) -if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER +if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER # fmt: skip DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ __all__ = ( diff --git a/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/base.py b/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/base.py index dedcdb2f0b..5a5c69dc2a 100755 --- a/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/base.py +++ b/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/base.py @@ -34,7 +34,7 @@ DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) -if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER +if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER # fmt: skip DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ diff --git a/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/grpc.py b/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/grpc.py index daa6a60e7e..4d5beb9529 100755 --- a/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/grpc.py +++ b/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/grpc.py @@ -48,7 +48,7 @@ class _LoggingClientInterceptor(grpc.UnaryUnaryClientInterceptor): # pragma: NO COVER def intercept_unary_unary(self, continuation, client_call_details, request): logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG) - if logging_enabled: # pragma: NO COVER + if logging_enabled: # pragma: NO COVER # fmt: skip request_metadata = client_call_details.metadata if isinstance(request, proto.Message): request_payload = type(request).to_json(request) @@ -76,7 +76,7 @@ def intercept_unary_unary(self, continuation, client_call_details, request): }, ) response = continuation(client_call_details, request) - if logging_enabled: # pragma: NO COVER + if logging_enabled: # pragma: NO COVER # fmt: skip response_metadata = response.trailing_metadata() # Convert gRPC metadata `` to list of tuples metadata = dict([(k, str(v)) for k, v in response_metadata]) if response_metadata else None diff --git a/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/grpc_asyncio.py b/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/grpc_asyncio.py index ef1bd83e28..a6f754add0 100755 --- a/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/grpc_asyncio.py +++ b/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/grpc_asyncio.py @@ -52,7 +52,7 @@ class _LoggingClientAIOInterceptor(grpc.aio.UnaryUnaryClientInterceptor): # pragma: NO COVER async def intercept_unary_unary(self, continuation, client_call_details, request): logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG) - if logging_enabled: # pragma: NO COVER + if logging_enabled: # pragma: NO COVER # fmt: skip request_metadata = client_call_details.metadata if isinstance(request, proto.Message): request_payload = type(request).to_json(request) @@ -80,7 +80,7 @@ async def intercept_unary_unary(self, continuation, client_call_details, request }, ) response = await continuation(client_call_details, request) - if logging_enabled: # pragma: NO COVER + if logging_enabled: # pragma: NO COVER # fmt: skip response_metadata = await response.trailing_metadata() # Convert gRPC metadata `` to list of tuples metadata = dict([(k, str(v)) for k, v in response_metadata]) if response_metadata else None @@ -1213,7 +1213,7 @@ def _prep_wrapped_messages(self, client_info): } def _wrap_method(self, func, *args, **kwargs): - if self._wrap_with_kind: # pragma: NO COVER + if self._wrap_with_kind: # pragma: NO COVER # fmt: skip kwargs["kind"] = self.kind return gapic_v1.method_async.wrap_method(func, *args, **kwargs) diff --git a/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/rest.py b/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/rest.py index 4721eb89e1..2326d7f0ab 100755 --- a/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/rest.py +++ b/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/rest.py @@ -61,7 +61,7 @@ rest_version=f"requests@{requests_version}", ) -if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER +if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER # fmt: skip DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ @@ -1237,7 +1237,7 @@ def __call__(self, # Jsonify the query params query_params = _BaseAssetServiceRestTransport._BaseAnalyzeIamPolicy._get_query_params_json(transcoded_request) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER # fmt: skip request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] try: @@ -1277,7 +1277,7 @@ def __call__(self, resp = self._interceptor.post_analyze_iam_policy(resp) response_metadata = [(k, str(v)) for k, v in response.headers.items()] resp, _ = self._interceptor.post_analyze_iam_policy_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER # fmt: skip try: response_payload = asset_service.AnalyzeIamPolicyResponse.to_json(response) except: @@ -1364,7 +1364,7 @@ def __call__(self, # Jsonify the query params query_params = _BaseAssetServiceRestTransport._BaseAnalyzeIamPolicyLongrunning._get_query_params_json(transcoded_request) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER # fmt: skip request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] try: @@ -1402,7 +1402,7 @@ def __call__(self, resp = self._interceptor.post_analyze_iam_policy_longrunning(resp) response_metadata = [(k, str(v)) for k, v in response.headers.items()] resp, _ = self._interceptor.post_analyze_iam_policy_longrunning_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER # fmt: skip try: response_payload = json_format.MessageToJson(resp) except: @@ -1484,7 +1484,7 @@ def __call__(self, # Jsonify the query params query_params = _BaseAssetServiceRestTransport._BaseAnalyzeMove._get_query_params_json(transcoded_request) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER # fmt: skip request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] try: @@ -1524,7 +1524,7 @@ def __call__(self, resp = self._interceptor.post_analyze_move(resp) response_metadata = [(k, str(v)) for k, v in response.headers.items()] resp, _ = self._interceptor.post_analyze_move_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER # fmt: skip try: response_payload = asset_service.AnalyzeMoveResponse.to_json(response) except: @@ -1606,7 +1606,7 @@ def __call__(self, # Jsonify the query params query_params = _BaseAssetServiceRestTransport._BaseAnalyzeOrgPolicies._get_query_params_json(transcoded_request) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER # fmt: skip request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] try: @@ -1646,7 +1646,7 @@ def __call__(self, resp = self._interceptor.post_analyze_org_policies(resp) response_metadata = [(k, str(v)) for k, v in response.headers.items()] resp, _ = self._interceptor.post_analyze_org_policies_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER # fmt: skip try: response_payload = asset_service.AnalyzeOrgPoliciesResponse.to_json(response) except: @@ -1729,7 +1729,7 @@ def __call__(self, # Jsonify the query params query_params = _BaseAssetServiceRestTransport._BaseAnalyzeOrgPolicyGovernedAssets._get_query_params_json(transcoded_request) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER # fmt: skip request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] try: @@ -1769,7 +1769,7 @@ def __call__(self, resp = self._interceptor.post_analyze_org_policy_governed_assets(resp) response_metadata = [(k, str(v)) for k, v in response.headers.items()] resp, _ = self._interceptor.post_analyze_org_policy_governed_assets_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER # fmt: skip try: response_payload = asset_service.AnalyzeOrgPolicyGovernedAssetsResponse.to_json(response) except: @@ -1852,7 +1852,7 @@ def __call__(self, # Jsonify the query params query_params = _BaseAssetServiceRestTransport._BaseAnalyzeOrgPolicyGovernedContainers._get_query_params_json(transcoded_request) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER # fmt: skip request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] try: @@ -1892,7 +1892,7 @@ def __call__(self, resp = self._interceptor.post_analyze_org_policy_governed_containers(resp) response_metadata = [(k, str(v)) for k, v in response.headers.items()] resp, _ = self._interceptor.post_analyze_org_policy_governed_containers_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER # fmt: skip try: response_payload = asset_service.AnalyzeOrgPolicyGovernedContainersResponse.to_json(response) except: @@ -1971,7 +1971,7 @@ def __call__(self, # Jsonify the query params query_params = _BaseAssetServiceRestTransport._BaseBatchGetAssetsHistory._get_query_params_json(transcoded_request) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER # fmt: skip request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] try: @@ -2011,7 +2011,7 @@ def __call__(self, resp = self._interceptor.post_batch_get_assets_history(resp) response_metadata = [(k, str(v)) for k, v in response.headers.items()] resp, _ = self._interceptor.post_batch_get_assets_history_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER # fmt: skip try: response_payload = asset_service.BatchGetAssetsHistoryResponse.to_json(response) except: @@ -2094,7 +2094,7 @@ def __call__(self, # Jsonify the query params query_params = _BaseAssetServiceRestTransport._BaseBatchGetEffectiveIamPolicies._get_query_params_json(transcoded_request) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER # fmt: skip request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] try: @@ -2134,7 +2134,7 @@ def __call__(self, resp = self._interceptor.post_batch_get_effective_iam_policies(resp) response_metadata = [(k, str(v)) for k, v in response.headers.items()] resp, _ = self._interceptor.post_batch_get_effective_iam_policies_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER # fmt: skip try: response_payload = asset_service.BatchGetEffectiveIamPoliciesResponse.to_json(response) except: @@ -2224,7 +2224,7 @@ def __call__(self, # Jsonify the query params query_params = _BaseAssetServiceRestTransport._BaseCreateFeed._get_query_params_json(transcoded_request) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER # fmt: skip request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] try: @@ -2264,7 +2264,7 @@ def __call__(self, resp = self._interceptor.post_create_feed(resp) response_metadata = [(k, str(v)) for k, v in response.headers.items()] resp, _ = self._interceptor.post_create_feed_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER # fmt: skip try: response_payload = asset_service.Feed.to_json(response) except: @@ -2348,7 +2348,7 @@ def __call__(self, # Jsonify the query params query_params = _BaseAssetServiceRestTransport._BaseCreateSavedQuery._get_query_params_json(transcoded_request) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER # fmt: skip request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] try: @@ -2388,7 +2388,7 @@ def __call__(self, resp = self._interceptor.post_create_saved_query(resp) response_metadata = [(k, str(v)) for k, v in response.headers.items()] resp, _ = self._interceptor.post_create_saved_query_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER # fmt: skip try: response_payload = asset_service.SavedQuery.to_json(response) except: @@ -2463,7 +2463,7 @@ def __call__(self, # Jsonify the query params query_params = _BaseAssetServiceRestTransport._BaseDeleteFeed._get_query_params_json(transcoded_request) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER # fmt: skip request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] try: @@ -2548,7 +2548,7 @@ def __call__(self, # Jsonify the query params query_params = _BaseAssetServiceRestTransport._BaseDeleteSavedQuery._get_query_params_json(transcoded_request) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER # fmt: skip request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] try: @@ -2643,7 +2643,7 @@ def __call__(self, # Jsonify the query params query_params = _BaseAssetServiceRestTransport._BaseExportAssets._get_query_params_json(transcoded_request) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER # fmt: skip request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] try: @@ -2681,7 +2681,7 @@ def __call__(self, resp = self._interceptor.post_export_assets(resp) response_metadata = [(k, str(v)) for k, v in response.headers.items()] resp, _ = self._interceptor.post_export_assets_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER # fmt: skip try: response_payload = json_format.MessageToJson(resp) except: @@ -2768,7 +2768,7 @@ def __call__(self, # Jsonify the query params query_params = _BaseAssetServiceRestTransport._BaseGetFeed._get_query_params_json(transcoded_request) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER # fmt: skip request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] try: @@ -2808,7 +2808,7 @@ def __call__(self, resp = self._interceptor.post_get_feed(resp) response_metadata = [(k, str(v)) for k, v in response.headers.items()] resp, _ = self._interceptor.post_get_feed_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER # fmt: skip try: response_payload = asset_service.Feed.to_json(response) except: @@ -2889,7 +2889,7 @@ def __call__(self, # Jsonify the query params query_params = _BaseAssetServiceRestTransport._BaseGetSavedQuery._get_query_params_json(transcoded_request) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER # fmt: skip request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] try: @@ -2929,7 +2929,7 @@ def __call__(self, resp = self._interceptor.post_get_saved_query(resp) response_metadata = [(k, str(v)) for k, v in response.headers.items()] resp, _ = self._interceptor.post_get_saved_query_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER # fmt: skip try: response_payload = asset_service.SavedQuery.to_json(response) except: @@ -3008,7 +3008,7 @@ def __call__(self, # Jsonify the query params query_params = _BaseAssetServiceRestTransport._BaseListAssets._get_query_params_json(transcoded_request) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER # fmt: skip request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] try: @@ -3048,7 +3048,7 @@ def __call__(self, resp = self._interceptor.post_list_assets(resp) response_metadata = [(k, str(v)) for k, v in response.headers.items()] resp, _ = self._interceptor.post_list_assets_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER # fmt: skip try: response_payload = asset_service.ListAssetsResponse.to_json(response) except: @@ -3127,7 +3127,7 @@ def __call__(self, # Jsonify the query params query_params = _BaseAssetServiceRestTransport._BaseListFeeds._get_query_params_json(transcoded_request) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER # fmt: skip request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] try: @@ -3167,7 +3167,7 @@ def __call__(self, resp = self._interceptor.post_list_feeds(resp) response_metadata = [(k, str(v)) for k, v in response.headers.items()] resp, _ = self._interceptor.post_list_feeds_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER # fmt: skip try: response_payload = asset_service.ListFeedsResponse.to_json(response) except: @@ -3246,7 +3246,7 @@ def __call__(self, # Jsonify the query params query_params = _BaseAssetServiceRestTransport._BaseListSavedQueries._get_query_params_json(transcoded_request) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER # fmt: skip request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] try: @@ -3286,7 +3286,7 @@ def __call__(self, resp = self._interceptor.post_list_saved_queries(resp) response_metadata = [(k, str(v)) for k, v in response.headers.items()] resp, _ = self._interceptor.post_list_saved_queries_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER # fmt: skip try: response_payload = asset_service.ListSavedQueriesResponse.to_json(response) except: @@ -3368,7 +3368,7 @@ def __call__(self, # Jsonify the query params query_params = _BaseAssetServiceRestTransport._BaseQueryAssets._get_query_params_json(transcoded_request) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER # fmt: skip request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] try: @@ -3408,7 +3408,7 @@ def __call__(self, resp = self._interceptor.post_query_assets(resp) response_metadata = [(k, str(v)) for k, v in response.headers.items()] resp, _ = self._interceptor.post_query_assets_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER # fmt: skip try: response_payload = asset_service.QueryAssetsResponse.to_json(response) except: @@ -3487,7 +3487,7 @@ def __call__(self, # Jsonify the query params query_params = _BaseAssetServiceRestTransport._BaseSearchAllIamPolicies._get_query_params_json(transcoded_request) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER # fmt: skip request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] try: @@ -3527,7 +3527,7 @@ def __call__(self, resp = self._interceptor.post_search_all_iam_policies(resp) response_metadata = [(k, str(v)) for k, v in response.headers.items()] resp, _ = self._interceptor.post_search_all_iam_policies_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER # fmt: skip try: response_payload = asset_service.SearchAllIamPoliciesResponse.to_json(response) except: @@ -3606,7 +3606,7 @@ def __call__(self, # Jsonify the query params query_params = _BaseAssetServiceRestTransport._BaseSearchAllResources._get_query_params_json(transcoded_request) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER # fmt: skip request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] try: @@ -3646,7 +3646,7 @@ def __call__(self, resp = self._interceptor.post_search_all_resources(resp) response_metadata = [(k, str(v)) for k, v in response.headers.items()] resp, _ = self._interceptor.post_search_all_resources_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER # fmt: skip try: response_payload = asset_service.SearchAllResourcesResponse.to_json(response) except: @@ -3736,7 +3736,7 @@ def __call__(self, # Jsonify the query params query_params = _BaseAssetServiceRestTransport._BaseUpdateFeed._get_query_params_json(transcoded_request) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER # fmt: skip request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] try: @@ -3776,7 +3776,7 @@ def __call__(self, resp = self._interceptor.post_update_feed(resp) response_metadata = [(k, str(v)) for k, v in response.headers.items()] resp, _ = self._interceptor.post_update_feed_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER # fmt: skip try: response_payload = asset_service.Feed.to_json(response) except: @@ -3860,7 +3860,7 @@ def __call__(self, # Jsonify the query params query_params = _BaseAssetServiceRestTransport._BaseUpdateSavedQuery._get_query_params_json(transcoded_request) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER # fmt: skip request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] try: @@ -3900,7 +3900,7 @@ def __call__(self, resp = self._interceptor.post_update_saved_query(resp) response_metadata = [(k, str(v)) for k, v in response.headers.items()] resp, _ = self._interceptor.post_update_saved_query_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER # fmt: skip try: response_payload = asset_service.SavedQuery.to_json(response) except: @@ -3927,7 +3927,7 @@ def analyze_iam_policy(self) -> Callable[ asset_service.AnalyzeIamPolicyResponse]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._AnalyzeIamPolicy(self._session, self._host, self._interceptor) # type: ignore + return self._AnalyzeIamPolicy(self._session, self._host, self._interceptor) # type: ignore # fmt: skip @property def analyze_iam_policy_longrunning(self) -> Callable[ @@ -3935,7 +3935,7 @@ def analyze_iam_policy_longrunning(self) -> Callable[ operations_pb2.Operation]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._AnalyzeIamPolicyLongrunning(self._session, self._host, self._interceptor) # type: ignore + return self._AnalyzeIamPolicyLongrunning(self._session, self._host, self._interceptor) # type: ignore # fmt: skip @property def analyze_move(self) -> Callable[ @@ -3943,7 +3943,7 @@ def analyze_move(self) -> Callable[ asset_service.AnalyzeMoveResponse]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._AnalyzeMove(self._session, self._host, self._interceptor) # type: ignore + return self._AnalyzeMove(self._session, self._host, self._interceptor) # type: ignore # fmt: skip @property def analyze_org_policies(self) -> Callable[ @@ -3951,7 +3951,7 @@ def analyze_org_policies(self) -> Callable[ asset_service.AnalyzeOrgPoliciesResponse]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._AnalyzeOrgPolicies(self._session, self._host, self._interceptor) # type: ignore + return self._AnalyzeOrgPolicies(self._session, self._host, self._interceptor) # type: ignore # fmt: skip @property def analyze_org_policy_governed_assets(self) -> Callable[ @@ -3959,7 +3959,7 @@ def analyze_org_policy_governed_assets(self) -> Callable[ asset_service.AnalyzeOrgPolicyGovernedAssetsResponse]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._AnalyzeOrgPolicyGovernedAssets(self._session, self._host, self._interceptor) # type: ignore + return self._AnalyzeOrgPolicyGovernedAssets(self._session, self._host, self._interceptor) # type: ignore # fmt: skip @property def analyze_org_policy_governed_containers(self) -> Callable[ @@ -3967,7 +3967,7 @@ def analyze_org_policy_governed_containers(self) -> Callable[ asset_service.AnalyzeOrgPolicyGovernedContainersResponse]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._AnalyzeOrgPolicyGovernedContainers(self._session, self._host, self._interceptor) # type: ignore + return self._AnalyzeOrgPolicyGovernedContainers(self._session, self._host, self._interceptor) # type: ignore # fmt: skip @property def batch_get_assets_history(self) -> Callable[ @@ -3975,7 +3975,7 @@ def batch_get_assets_history(self) -> Callable[ asset_service.BatchGetAssetsHistoryResponse]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._BatchGetAssetsHistory(self._session, self._host, self._interceptor) # type: ignore + return self._BatchGetAssetsHistory(self._session, self._host, self._interceptor) # type: ignore # fmt: skip @property def batch_get_effective_iam_policies(self) -> Callable[ @@ -3983,7 +3983,7 @@ def batch_get_effective_iam_policies(self) -> Callable[ asset_service.BatchGetEffectiveIamPoliciesResponse]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._BatchGetEffectiveIamPolicies(self._session, self._host, self._interceptor) # type: ignore + return self._BatchGetEffectiveIamPolicies(self._session, self._host, self._interceptor) # type: ignore # fmt: skip @property def create_feed(self) -> Callable[ @@ -3991,7 +3991,7 @@ def create_feed(self) -> Callable[ asset_service.Feed]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._CreateFeed(self._session, self._host, self._interceptor) # type: ignore + return self._CreateFeed(self._session, self._host, self._interceptor) # type: ignore # fmt: skip @property def create_saved_query(self) -> Callable[ @@ -3999,7 +3999,7 @@ def create_saved_query(self) -> Callable[ asset_service.SavedQuery]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._CreateSavedQuery(self._session, self._host, self._interceptor) # type: ignore + return self._CreateSavedQuery(self._session, self._host, self._interceptor) # type: ignore # fmt: skip @property def delete_feed(self) -> Callable[ @@ -4007,7 +4007,7 @@ def delete_feed(self) -> Callable[ empty_pb2.Empty]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._DeleteFeed(self._session, self._host, self._interceptor) # type: ignore + return self._DeleteFeed(self._session, self._host, self._interceptor) # type: ignore # fmt: skip @property def delete_saved_query(self) -> Callable[ @@ -4015,7 +4015,7 @@ def delete_saved_query(self) -> Callable[ empty_pb2.Empty]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._DeleteSavedQuery(self._session, self._host, self._interceptor) # type: ignore + return self._DeleteSavedQuery(self._session, self._host, self._interceptor) # type: ignore # fmt: skip @property def export_assets(self) -> Callable[ @@ -4023,7 +4023,7 @@ def export_assets(self) -> Callable[ operations_pb2.Operation]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._ExportAssets(self._session, self._host, self._interceptor) # type: ignore + return self._ExportAssets(self._session, self._host, self._interceptor) # type: ignore # fmt: skip @property def get_feed(self) -> Callable[ @@ -4031,7 +4031,7 @@ def get_feed(self) -> Callable[ asset_service.Feed]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._GetFeed(self._session, self._host, self._interceptor) # type: ignore + return self._GetFeed(self._session, self._host, self._interceptor) # type: ignore # fmt: skip @property def get_saved_query(self) -> Callable[ @@ -4039,7 +4039,7 @@ def get_saved_query(self) -> Callable[ asset_service.SavedQuery]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._GetSavedQuery(self._session, self._host, self._interceptor) # type: ignore + return self._GetSavedQuery(self._session, self._host, self._interceptor) # type: ignore # fmt: skip @property def list_assets(self) -> Callable[ @@ -4047,7 +4047,7 @@ def list_assets(self) -> Callable[ asset_service.ListAssetsResponse]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._ListAssets(self._session, self._host, self._interceptor) # type: ignore + return self._ListAssets(self._session, self._host, self._interceptor) # type: ignore # fmt: skip @property def list_feeds(self) -> Callable[ @@ -4055,7 +4055,7 @@ def list_feeds(self) -> Callable[ asset_service.ListFeedsResponse]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._ListFeeds(self._session, self._host, self._interceptor) # type: ignore + return self._ListFeeds(self._session, self._host, self._interceptor) # type: ignore # fmt: skip @property def list_saved_queries(self) -> Callable[ @@ -4063,7 +4063,7 @@ def list_saved_queries(self) -> Callable[ asset_service.ListSavedQueriesResponse]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._ListSavedQueries(self._session, self._host, self._interceptor) # type: ignore + return self._ListSavedQueries(self._session, self._host, self._interceptor) # type: ignore # fmt: skip @property def query_assets(self) -> Callable[ @@ -4071,7 +4071,7 @@ def query_assets(self) -> Callable[ asset_service.QueryAssetsResponse]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._QueryAssets(self._session, self._host, self._interceptor) # type: ignore + return self._QueryAssets(self._session, self._host, self._interceptor) # type: ignore # fmt: skip @property def search_all_iam_policies(self) -> Callable[ @@ -4079,7 +4079,7 @@ def search_all_iam_policies(self) -> Callable[ asset_service.SearchAllIamPoliciesResponse]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._SearchAllIamPolicies(self._session, self._host, self._interceptor) # type: ignore + return self._SearchAllIamPolicies(self._session, self._host, self._interceptor) # type: ignore # fmt: skip @property def search_all_resources(self) -> Callable[ @@ -4087,7 +4087,7 @@ def search_all_resources(self) -> Callable[ asset_service.SearchAllResourcesResponse]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._SearchAllResources(self._session, self._host, self._interceptor) # type: ignore + return self._SearchAllResources(self._session, self._host, self._interceptor) # type: ignore # fmt: skip @property def update_feed(self) -> Callable[ @@ -4095,7 +4095,7 @@ def update_feed(self) -> Callable[ asset_service.Feed]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._UpdateFeed(self._session, self._host, self._interceptor) # type: ignore + return self._UpdateFeed(self._session, self._host, self._interceptor) # type: ignore # fmt: skip @property def update_saved_query(self) -> Callable[ @@ -4103,11 +4103,11 @@ def update_saved_query(self) -> Callable[ asset_service.SavedQuery]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._UpdateSavedQuery(self._session, self._host, self._interceptor) # type: ignore + return self._UpdateSavedQuery(self._session, self._host, self._interceptor) # type: ignore # fmt: skip @property def get_operation(self): - return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore + return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore # fmt: skip class _GetOperation(_BaseAssetServiceRestTransport._BaseGetOperation, AssetServiceRestStub): def __hash__(self): @@ -4167,7 +4167,7 @@ def __call__(self, # Jsonify the query params query_params = _BaseAssetServiceRestTransport._BaseGetOperation._get_query_params_json(transcoded_request) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER # fmt: skip request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] try: @@ -4202,7 +4202,7 @@ def __call__(self, resp = operations_pb2.Operation() resp = json_format.Parse(content, resp) resp = self._interceptor.post_get_operation(resp) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER # fmt: skip try: response_payload = json_format.MessageToJson(resp) except: diff --git a/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/rest_base.py b/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/rest_base.py index b4c1d8818c..b582bae732 100755 --- a/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/rest_base.py +++ b/tests/integration/goldens/asset/google/cloud/asset_v1/services/asset_service/transports/rest_base.py @@ -73,7 +73,7 @@ def __init__(self, *, # Run the base constructor maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) if maybe_url_match is None: - raise ValueError(f"Unexpected hostname structure: {host}") # pragma: NO COVER + raise ValueError(f"Unexpected hostname structure: {host}") # pragma: NO COVER # fmt: skip url_match_items = maybe_url_match.groupdict() diff --git a/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/test_asset_service.py b/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/test_asset_service.py index fa466d554f..20d6169cf6 100755 --- a/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/test_asset_service.py +++ b/tests/integration/goldens/asset/tests/unit/gapic/asset_v1/test_asset_service.py @@ -81,7 +81,7 @@ async def mock_async_gen(data, chunk_size=1): - for i in range(0, len(data)): # pragma: NO COVER + for i in range(0, len(data)): # pragma: NO COVER # fmt: skip chunk = data[i : i + chunk_size] yield chunk.encode("utf-8") @@ -15900,7 +15900,7 @@ def get_message_fields(field): if is_field_type_proto_plus_type: message_fields = field.message.meta.fields.values() # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER + else: # pragma: NO COVER # fmt: skip message_fields = field.message.DESCRIPTOR.fields return message_fields @@ -15914,7 +15914,7 @@ def get_message_fields(field): # For each item in the sample request, create a list of sub fields which are not present at runtime # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["saved_query"].items(): # pragma: NO COVER + for field, value in request_init["saved_query"].items(): # pragma: NO COVER # fmt: skip result = None is_repeated = False # For repeated fields @@ -15934,7 +15934,7 @@ def get_message_fields(field): # Remove fields from the sample request which are not present in the runtime version of the dependency # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER # fmt: skip field = subfield_to_delete.get("field") field_repeated = subfield_to_delete.get("is_repeated") subfield = subfield_to_delete.get("subfield") @@ -16294,7 +16294,7 @@ def get_message_fields(field): if is_field_type_proto_plus_type: message_fields = field.message.meta.fields.values() # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER + else: # pragma: NO COVER # fmt: skip message_fields = field.message.DESCRIPTOR.fields return message_fields @@ -16308,7 +16308,7 @@ def get_message_fields(field): # For each item in the sample request, create a list of sub fields which are not present at runtime # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["saved_query"].items(): # pragma: NO COVER + for field, value in request_init["saved_query"].items(): # pragma: NO COVER # fmt: skip result = None is_repeated = False # For repeated fields @@ -16328,7 +16328,7 @@ def get_message_fields(field): # Remove fields from the sample request which are not present in the runtime version of the dependency # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER # fmt: skip field = subfield_to_delete.get("field") field_repeated = subfield_to_delete.get("is_repeated") subfield = subfield_to_delete.get("subfield") diff --git a/tests/integration/goldens/credentials/google/iam/credentials_v1/__init__.py b/tests/integration/goldens/credentials/google/iam/credentials_v1/__init__.py index 2890169a65..6590f87a46 100755 --- a/tests/integration/goldens/credentials/google/iam/credentials_v1/__init__.py +++ b/tests/integration/goldens/credentials/google/iam/credentials_v1/__init__.py @@ -20,9 +20,9 @@ __version__ = package_version.__version__ -if sys.version_info >= (3, 8): # pragma: NO COVER +if sys.version_info >= (3, 8): # pragma: NO COVER # fmt: skip from importlib import metadata -else: # pragma: NO COVER +else: # pragma: NO COVER # fmt: skip # TODO(https://github.com/googleapis/python-api-core/issues/835): Remove # this code path once we drop support for Python 3.7 import importlib_metadata as metadata @@ -40,10 +40,10 @@ from .types.common import SignJwtRequest from .types.common import SignJwtResponse -if hasattr(api_core, "check_python_version") and hasattr(api_core, "check_dependency_versions"): # pragma: NO COVER +if hasattr(api_core, "check_python_version") and hasattr(api_core, "check_dependency_versions"): # pragma: NO COVER # fmt: skip api_core.check_python_version("google.iam.credentials_v1") # type: ignore api_core.check_dependency_versions("google.iam.credentials_v1") # type: ignore -else: # pragma: NO COVER +else: # pragma: NO COVER # fmt: skip # An older version of api_core is installed which does not define the # functions above. We do equivalent checks manually. try: diff --git a/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/async_client.py b/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/async_client.py index c06d819c47..4ffda13cb5 100755 --- a/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/async_client.py +++ b/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/async_client.py @@ -97,7 +97,7 @@ def from_service_account_info(cls, info: dict, *args, **kwargs): Returns: IAMCredentialsAsyncClient: The constructed client. """ - return IAMCredentialsClient.from_service_account_info.__func__(IAMCredentialsAsyncClient, info, *args, **kwargs) # type: ignore + return IAMCredentialsClient.from_service_account_info.__func__(IAMCredentialsAsyncClient, info, *args, **kwargs) # type: ignore # fmt: skip @classmethod def from_service_account_file(cls, filename: str, *args, **kwargs): @@ -113,7 +113,7 @@ def from_service_account_file(cls, filename: str, *args, **kwargs): Returns: IAMCredentialsAsyncClient: The constructed client. """ - return IAMCredentialsClient.from_service_account_file.__func__(IAMCredentialsAsyncClient, filename, *args, **kwargs) # type: ignore + return IAMCredentialsClient.from_service_account_file.__func__(IAMCredentialsAsyncClient, filename, *args, **kwargs) # type: ignore # fmt: skip from_service_account_json = from_service_account_file @@ -149,7 +149,7 @@ def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[ClientOption Raises: google.auth.exceptions.MutualTLSChannelError: If any errors happen. """ - return IAMCredentialsClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore + return IAMCredentialsClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore # fmt: skip @property def transport(self) -> IAMCredentialsTransport: @@ -247,7 +247,7 @@ def __init__(self, *, ) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG): # pragma: NO COVER + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG): # pragma: NO COVER # fmt: skip _LOGGER.debug( "Created client `google.iam.credentials_v1.IAMCredentialsAsyncClient`.", extra = { @@ -858,7 +858,7 @@ async def __aexit__(self, exc_type, exc, tb): DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) -if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER +if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER # fmt: skip DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ diff --git a/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/client.py b/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/client.py index a3a6f84e33..fa525855f1 100755 --- a/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/client.py +++ b/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/client.py @@ -135,7 +135,7 @@ def _get_default_mtls_endpoint(api_endpoint): # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. DEFAULT_ENDPOINT = "iamcredentials.googleapis.com" - DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore # fmt: skip DEFAULT_ENDPOINT ) @@ -156,7 +156,7 @@ def _use_client_cert_effective(): GOOGLE_API_USE_CLIENT_CERTIFICATE is set to an unexpected value.) """ # check if google-auth version supports should_use_client_cert for automatic mTLS enablement - if hasattr(mtls, "should_use_client_cert"): # pragma: NO COVER + if hasattr(mtls, "should_use_client_cert"): # pragma: NO COVER # fmt: skip return mtls.should_use_client_cert() else: # pragma: NO COVER # if unsupported, fallback to reading from env var @@ -561,7 +561,7 @@ def __init__(self, *, # Initialize the universe domain validation. self._is_universe_domain_valid = False - if CLIENT_LOGGING_SUPPORTED: # pragma: NO COVER + if CLIENT_LOGGING_SUPPORTED: # pragma: NO COVER # fmt: skip # Setup logging. client_logging.initialize_logging() @@ -618,7 +618,7 @@ def __init__(self, *, ) if "async" not in str(self._transport): - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG): # pragma: NO COVER + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG): # pragma: NO COVER # fmt: skip _LOGGER.debug( "Created client `google.iam.credentials_v1.IAMCredentialsClient`.", extra = { @@ -1238,7 +1238,7 @@ def __exit__(self, type, value, traceback): DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) -if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER +if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER # fmt: skip DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ __all__ = ( diff --git a/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/base.py b/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/base.py index 5eca520ccd..2bb5b41537 100755 --- a/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/base.py +++ b/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/base.py @@ -31,7 +31,7 @@ DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) -if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER +if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER # fmt: skip DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ diff --git a/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/grpc.py b/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/grpc.py index 5baaeb8e5e..e9797dd045 100755 --- a/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/grpc.py +++ b/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/grpc.py @@ -45,7 +45,7 @@ class _LoggingClientInterceptor(grpc.UnaryUnaryClientInterceptor): # pragma: NO COVER def intercept_unary_unary(self, continuation, client_call_details, request): logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG) - if logging_enabled: # pragma: NO COVER + if logging_enabled: # pragma: NO COVER # fmt: skip request_metadata = client_call_details.metadata if isinstance(request, proto.Message): request_payload = type(request).to_json(request) @@ -73,7 +73,7 @@ def intercept_unary_unary(self, continuation, client_call_details, request): }, ) response = continuation(client_call_details, request) - if logging_enabled: # pragma: NO COVER + if logging_enabled: # pragma: NO COVER # fmt: skip response_metadata = response.trailing_metadata() # Convert gRPC metadata `` to list of tuples metadata = dict([(k, str(v)) for k, v in response_metadata]) if response_metadata else None diff --git a/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/grpc_asyncio.py b/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/grpc_asyncio.py index a793abd09c..cfb5b53709 100755 --- a/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/grpc_asyncio.py +++ b/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/grpc_asyncio.py @@ -49,7 +49,7 @@ class _LoggingClientAIOInterceptor(grpc.aio.UnaryUnaryClientInterceptor): # pragma: NO COVER async def intercept_unary_unary(self, continuation, client_call_details, request): logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG) - if logging_enabled: # pragma: NO COVER + if logging_enabled: # pragma: NO COVER # fmt: skip request_metadata = client_call_details.metadata if isinstance(request, proto.Message): request_payload = type(request).to_json(request) @@ -77,7 +77,7 @@ async def intercept_unary_unary(self, continuation, client_call_details, request }, ) response = await continuation(client_call_details, request) - if logging_enabled: # pragma: NO COVER + if logging_enabled: # pragma: NO COVER # fmt: skip response_metadata = await response.trailing_metadata() # Convert gRPC metadata `` to list of tuples metadata = dict([(k, str(v)) for k, v in response_metadata]) if response_metadata else None @@ -498,7 +498,7 @@ def _prep_wrapped_messages(self, client_info): } def _wrap_method(self, func, *args, **kwargs): - if self._wrap_with_kind: # pragma: NO COVER + if self._wrap_with_kind: # pragma: NO COVER # fmt: skip kwargs["kind"] = self.kind return gapic_v1.method_async.wrap_method(func, *args, **kwargs) diff --git a/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/rest.py b/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/rest.py index c0ad2e73ea..f36d3d575b 100755 --- a/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/rest.py +++ b/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/rest.py @@ -58,7 +58,7 @@ rest_version=f"requests@{requests_version}", ) -if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER +if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER # fmt: skip DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ @@ -418,7 +418,7 @@ def __call__(self, # Jsonify the query params query_params = _BaseIAMCredentialsRestTransport._BaseGenerateAccessToken._get_query_params_json(transcoded_request) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER # fmt: skip request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] try: @@ -458,7 +458,7 @@ def __call__(self, resp = self._interceptor.post_generate_access_token(resp) response_metadata = [(k, str(v)) for k, v in response.headers.items()] resp, _ = self._interceptor.post_generate_access_token_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER # fmt: skip try: response_payload = common.GenerateAccessTokenResponse.to_json(response) except: @@ -540,7 +540,7 @@ def __call__(self, # Jsonify the query params query_params = _BaseIAMCredentialsRestTransport._BaseGenerateIdToken._get_query_params_json(transcoded_request) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER # fmt: skip request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] try: @@ -580,7 +580,7 @@ def __call__(self, resp = self._interceptor.post_generate_id_token(resp) response_metadata = [(k, str(v)) for k, v in response.headers.items()] resp, _ = self._interceptor.post_generate_id_token_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER # fmt: skip try: response_payload = common.GenerateIdTokenResponse.to_json(response) except: @@ -662,7 +662,7 @@ def __call__(self, # Jsonify the query params query_params = _BaseIAMCredentialsRestTransport._BaseSignBlob._get_query_params_json(transcoded_request) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER # fmt: skip request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] try: @@ -702,7 +702,7 @@ def __call__(self, resp = self._interceptor.post_sign_blob(resp) response_metadata = [(k, str(v)) for k, v in response.headers.items()] resp, _ = self._interceptor.post_sign_blob_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER # fmt: skip try: response_payload = common.SignBlobResponse.to_json(response) except: @@ -784,7 +784,7 @@ def __call__(self, # Jsonify the query params query_params = _BaseIAMCredentialsRestTransport._BaseSignJwt._get_query_params_json(transcoded_request) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER # fmt: skip request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] try: @@ -824,7 +824,7 @@ def __call__(self, resp = self._interceptor.post_sign_jwt(resp) response_metadata = [(k, str(v)) for k, v in response.headers.items()] resp, _ = self._interceptor.post_sign_jwt_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER # fmt: skip try: response_payload = common.SignJwtResponse.to_json(response) except: @@ -851,7 +851,7 @@ def generate_access_token(self) -> Callable[ common.GenerateAccessTokenResponse]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._GenerateAccessToken(self._session, self._host, self._interceptor) # type: ignore + return self._GenerateAccessToken(self._session, self._host, self._interceptor) # type: ignore # fmt: skip @property def generate_id_token(self) -> Callable[ @@ -859,7 +859,7 @@ def generate_id_token(self) -> Callable[ common.GenerateIdTokenResponse]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._GenerateIdToken(self._session, self._host, self._interceptor) # type: ignore + return self._GenerateIdToken(self._session, self._host, self._interceptor) # type: ignore # fmt: skip @property def sign_blob(self) -> Callable[ @@ -867,7 +867,7 @@ def sign_blob(self) -> Callable[ common.SignBlobResponse]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._SignBlob(self._session, self._host, self._interceptor) # type: ignore + return self._SignBlob(self._session, self._host, self._interceptor) # type: ignore # fmt: skip @property def sign_jwt(self) -> Callable[ @@ -875,7 +875,7 @@ def sign_jwt(self) -> Callable[ common.SignJwtResponse]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._SignJwt(self._session, self._host, self._interceptor) # type: ignore + return self._SignJwt(self._session, self._host, self._interceptor) # type: ignore # fmt: skip @property def kind(self) -> str: diff --git a/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/rest_base.py b/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/rest_base.py index a03907410f..d1a6523ab3 100755 --- a/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/rest_base.py +++ b/tests/integration/goldens/credentials/google/iam/credentials_v1/services/iam_credentials/transports/rest_base.py @@ -71,7 +71,7 @@ def __init__(self, *, # Run the base constructor maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) if maybe_url_match is None: - raise ValueError(f"Unexpected hostname structure: {host}") # pragma: NO COVER + raise ValueError(f"Unexpected hostname structure: {host}") # pragma: NO COVER # fmt: skip url_match_items = maybe_url_match.groupdict() diff --git a/tests/integration/goldens/credentials/tests/unit/gapic/credentials_v1/test_iam_credentials.py b/tests/integration/goldens/credentials/tests/unit/gapic/credentials_v1/test_iam_credentials.py index 4f6edbe175..eb8b66e815 100755 --- a/tests/integration/goldens/credentials/tests/unit/gapic/credentials_v1/test_iam_credentials.py +++ b/tests/integration/goldens/credentials/tests/unit/gapic/credentials_v1/test_iam_credentials.py @@ -71,7 +71,7 @@ async def mock_async_gen(data, chunk_size=1): - for i in range(0, len(data)): # pragma: NO COVER + for i in range(0, len(data)): # pragma: NO COVER # fmt: skip chunk = data[i : i + chunk_size] yield chunk.encode("utf-8") diff --git a/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/__init__.py b/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/__init__.py index 20d981ec3e..3b3b5c71d4 100755 --- a/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/__init__.py +++ b/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/__init__.py @@ -20,9 +20,9 @@ __version__ = package_version.__version__ -if sys.version_info >= (3, 8): # pragma: NO COVER +if sys.version_info >= (3, 8): # pragma: NO COVER # fmt: skip from importlib import metadata -else: # pragma: NO COVER +else: # pragma: NO COVER # fmt: skip # TODO(https://github.com/googleapis/python-api-core/issues/835): Remove # this code path once we drop support for Python 3.7 import importlib_metadata as metadata @@ -69,10 +69,10 @@ from .types.trigger import Transport from .types.trigger import Trigger -if hasattr(api_core, "check_python_version") and hasattr(api_core, "check_dependency_versions"): # pragma: NO COVER +if hasattr(api_core, "check_python_version") and hasattr(api_core, "check_dependency_versions"): # pragma: NO COVER # fmt: skip api_core.check_python_version("google.cloud.eventarc_v1") # type: ignore api_core.check_dependency_versions("google.cloud.eventarc_v1") # type: ignore -else: # pragma: NO COVER +else: # pragma: NO COVER # fmt: skip # An older version of api_core is installed which does not define the # functions above. We do equivalent checks manually. try: diff --git a/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/async_client.py b/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/async_client.py index 15dacf36ca..28e5bbbb98 100755 --- a/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/async_client.py +++ b/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/async_client.py @@ -124,7 +124,7 @@ def from_service_account_info(cls, info: dict, *args, **kwargs): Returns: EventarcAsyncClient: The constructed client. """ - return EventarcClient.from_service_account_info.__func__(EventarcAsyncClient, info, *args, **kwargs) # type: ignore + return EventarcClient.from_service_account_info.__func__(EventarcAsyncClient, info, *args, **kwargs) # type: ignore # fmt: skip @classmethod def from_service_account_file(cls, filename: str, *args, **kwargs): @@ -140,7 +140,7 @@ def from_service_account_file(cls, filename: str, *args, **kwargs): Returns: EventarcAsyncClient: The constructed client. """ - return EventarcClient.from_service_account_file.__func__(EventarcAsyncClient, filename, *args, **kwargs) # type: ignore + return EventarcClient.from_service_account_file.__func__(EventarcAsyncClient, filename, *args, **kwargs) # type: ignore # fmt: skip from_service_account_json = from_service_account_file @@ -176,7 +176,7 @@ def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[ClientOption Raises: google.auth.exceptions.MutualTLSChannelError: If any errors happen. """ - return EventarcClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore + return EventarcClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore # fmt: skip @property def transport(self) -> EventarcTransport: @@ -274,7 +274,7 @@ def __init__(self, *, ) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG): # pragma: NO COVER + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG): # pragma: NO COVER # fmt: skip _LOGGER.debug( "Created client `google.cloud.eventarc_v1.EventarcAsyncClient`.", extra = { @@ -3161,7 +3161,7 @@ async def __aexit__(self, exc_type, exc, tb): DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) -if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER +if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER # fmt: skip DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ diff --git a/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/client.py b/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/client.py index b190ba2f34..19db769e53 100755 --- a/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/client.py +++ b/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/client.py @@ -144,7 +144,7 @@ def _get_default_mtls_endpoint(api_endpoint): # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. DEFAULT_ENDPOINT = "eventarc.googleapis.com" - DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore # fmt: skip DEFAULT_ENDPOINT ) @@ -165,7 +165,7 @@ def _use_client_cert_effective(): GOOGLE_API_USE_CLIENT_CERTIFICATE is set to an unexpected value.) """ # check if google-auth version supports should_use_client_cert for automatic mTLS enablement - if hasattr(mtls, "should_use_client_cert"): # pragma: NO COVER + if hasattr(mtls, "should_use_client_cert"): # pragma: NO COVER # fmt: skip return mtls.should_use_client_cert() else: # pragma: NO COVER # if unsupported, fallback to reading from env var @@ -669,7 +669,7 @@ def __init__(self, *, # Initialize the universe domain validation. self._is_universe_domain_valid = False - if CLIENT_LOGGING_SUPPORTED: # pragma: NO COVER + if CLIENT_LOGGING_SUPPORTED: # pragma: NO COVER # fmt: skip # Setup logging. client_logging.initialize_logging() @@ -726,7 +726,7 @@ def __init__(self, *, ) if "async" not in str(self._transport): - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG): # pragma: NO COVER + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG): # pragma: NO COVER # fmt: skip _LOGGER.debug( "Created client `google.cloud.eventarc_v1.EventarcClient`.", extra = { @@ -3631,7 +3631,7 @@ def list_locations( DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) -if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER +if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER # fmt: skip DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ __all__ = ( diff --git a/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/base.py b/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/base.py index 654a746911..dc491fbe38 100755 --- a/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/base.py +++ b/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/base.py @@ -42,7 +42,7 @@ DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) -if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER +if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER # fmt: skip DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ diff --git a/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/grpc.py b/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/grpc.py index 887700548d..3ad31fdb95 100755 --- a/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/grpc.py +++ b/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/grpc.py @@ -56,7 +56,7 @@ class _LoggingClientInterceptor(grpc.UnaryUnaryClientInterceptor): # pragma: NO COVER def intercept_unary_unary(self, continuation, client_call_details, request): logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG) - if logging_enabled: # pragma: NO COVER + if logging_enabled: # pragma: NO COVER # fmt: skip request_metadata = client_call_details.metadata if isinstance(request, proto.Message): request_payload = type(request).to_json(request) @@ -84,7 +84,7 @@ def intercept_unary_unary(self, continuation, client_call_details, request): }, ) response = continuation(client_call_details, request) - if logging_enabled: # pragma: NO COVER + if logging_enabled: # pragma: NO COVER # fmt: skip response_metadata = response.trailing_metadata() # Convert gRPC metadata `` to list of tuples metadata = dict([(k, str(v)) for k, v in response_metadata]) if response_metadata else None diff --git a/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/grpc_asyncio.py b/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/grpc_asyncio.py index 6c940b3a82..1339542092 100755 --- a/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/grpc_asyncio.py +++ b/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/grpc_asyncio.py @@ -60,7 +60,7 @@ class _LoggingClientAIOInterceptor(grpc.aio.UnaryUnaryClientInterceptor): # pragma: NO COVER async def intercept_unary_unary(self, continuation, client_call_details, request): logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG) - if logging_enabled: # pragma: NO COVER + if logging_enabled: # pragma: NO COVER # fmt: skip request_metadata = client_call_details.metadata if isinstance(request, proto.Message): request_payload = type(request).to_json(request) @@ -88,7 +88,7 @@ async def intercept_unary_unary(self, continuation, client_call_details, request }, ) response = await continuation(client_call_details, request) - if logging_enabled: # pragma: NO COVER + if logging_enabled: # pragma: NO COVER # fmt: skip response_metadata = await response.trailing_metadata() # Convert gRPC metadata `` to list of tuples metadata = dict([(k, str(v)) for k, v in response_metadata]) if response_metadata else None @@ -957,7 +957,7 @@ def _prep_wrapped_messages(self, client_info): } def _wrap_method(self, func, *args, **kwargs): - if self._wrap_with_kind: # pragma: NO COVER + if self._wrap_with_kind: # pragma: NO COVER # fmt: skip kwargs["kind"] = self.kind return gapic_v1.method_async.wrap_method(func, *args, **kwargs) diff --git a/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/rest.py b/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/rest.py index dc3ded0df6..fef0dd6ec4 100755 --- a/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/rest.py +++ b/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/rest.py @@ -69,7 +69,7 @@ rest_version=f"requests@{requests_version}", ) -if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER +if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER # fmt: skip DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ @@ -1282,7 +1282,7 @@ def __call__(self, # Jsonify the query params query_params = _BaseEventarcRestTransport._BaseCreateChannel._get_query_params_json(transcoded_request) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER # fmt: skip request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] try: @@ -1320,7 +1320,7 @@ def __call__(self, resp = self._interceptor.post_create_channel(resp) response_metadata = [(k, str(v)) for k, v in response.headers.items()] resp, _ = self._interceptor.post_create_channel_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER # fmt: skip try: response_payload = json_format.MessageToJson(resp) except: @@ -1406,7 +1406,7 @@ def __call__(self, # Jsonify the query params query_params = _BaseEventarcRestTransport._BaseCreateChannelConnection._get_query_params_json(transcoded_request) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER # fmt: skip request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] try: @@ -1444,7 +1444,7 @@ def __call__(self, resp = self._interceptor.post_create_channel_connection(resp) response_metadata = [(k, str(v)) for k, v in response.headers.items()] resp, _ = self._interceptor.post_create_channel_connection_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER # fmt: skip try: response_payload = json_format.MessageToJson(resp) except: @@ -1530,7 +1530,7 @@ def __call__(self, # Jsonify the query params query_params = _BaseEventarcRestTransport._BaseCreateTrigger._get_query_params_json(transcoded_request) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER # fmt: skip request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] try: @@ -1568,7 +1568,7 @@ def __call__(self, resp = self._interceptor.post_create_trigger(resp) response_metadata = [(k, str(v)) for k, v in response.headers.items()] resp, _ = self._interceptor.post_create_trigger_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER # fmt: skip try: response_payload = json_format.MessageToJson(resp) except: @@ -1651,7 +1651,7 @@ def __call__(self, # Jsonify the query params query_params = _BaseEventarcRestTransport._BaseDeleteChannel._get_query_params_json(transcoded_request) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER # fmt: skip request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] try: @@ -1689,7 +1689,7 @@ def __call__(self, resp = self._interceptor.post_delete_channel(resp) response_metadata = [(k, str(v)) for k, v in response.headers.items()] resp, _ = self._interceptor.post_delete_channel_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER # fmt: skip try: response_payload = json_format.MessageToJson(resp) except: @@ -1772,7 +1772,7 @@ def __call__(self, # Jsonify the query params query_params = _BaseEventarcRestTransport._BaseDeleteChannelConnection._get_query_params_json(transcoded_request) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER # fmt: skip request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] try: @@ -1810,7 +1810,7 @@ def __call__(self, resp = self._interceptor.post_delete_channel_connection(resp) response_metadata = [(k, str(v)) for k, v in response.headers.items()] resp, _ = self._interceptor.post_delete_channel_connection_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER # fmt: skip try: response_payload = json_format.MessageToJson(resp) except: @@ -1893,7 +1893,7 @@ def __call__(self, # Jsonify the query params query_params = _BaseEventarcRestTransport._BaseDeleteTrigger._get_query_params_json(transcoded_request) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER # fmt: skip request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] try: @@ -1931,7 +1931,7 @@ def __call__(self, resp = self._interceptor.post_delete_trigger(resp) response_metadata = [(k, str(v)) for k, v in response.headers.items()] resp, _ = self._interceptor.post_delete_trigger_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER # fmt: skip try: response_payload = json_format.MessageToJson(resp) except: @@ -2019,7 +2019,7 @@ def __call__(self, # Jsonify the query params query_params = _BaseEventarcRestTransport._BaseGetChannel._get_query_params_json(transcoded_request) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER # fmt: skip request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] try: @@ -2059,7 +2059,7 @@ def __call__(self, resp = self._interceptor.post_get_channel(resp) response_metadata = [(k, str(v)) for k, v in response.headers.items()] resp, _ = self._interceptor.post_get_channel_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER # fmt: skip try: response_payload = channel.Channel.to_json(response) except: @@ -2146,7 +2146,7 @@ def __call__(self, # Jsonify the query params query_params = _BaseEventarcRestTransport._BaseGetChannelConnection._get_query_params_json(transcoded_request) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER # fmt: skip request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] try: @@ -2186,7 +2186,7 @@ def __call__(self, resp = self._interceptor.post_get_channel_connection(resp) response_metadata = [(k, str(v)) for k, v in response.headers.items()] resp, _ = self._interceptor.post_get_channel_connection_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER # fmt: skip try: response_payload = channel_connection.ChannelConnection.to_json(response) except: @@ -2274,7 +2274,7 @@ def __call__(self, # Jsonify the query params query_params = _BaseEventarcRestTransport._BaseGetGoogleChannelConfig._get_query_params_json(transcoded_request) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER # fmt: skip request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] try: @@ -2314,7 +2314,7 @@ def __call__(self, resp = self._interceptor.post_get_google_channel_config(resp) response_metadata = [(k, str(v)) for k, v in response.headers.items()] resp, _ = self._interceptor.post_get_google_channel_config_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER # fmt: skip try: response_payload = google_channel_config.GoogleChannelConfig.to_json(response) except: @@ -2396,7 +2396,7 @@ def __call__(self, # Jsonify the query params query_params = _BaseEventarcRestTransport._BaseGetProvider._get_query_params_json(transcoded_request) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER # fmt: skip request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] try: @@ -2436,7 +2436,7 @@ def __call__(self, resp = self._interceptor.post_get_provider(resp) response_metadata = [(k, str(v)) for k, v in response.headers.items()] resp, _ = self._interceptor.post_get_provider_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER # fmt: skip try: response_payload = discovery.Provider.to_json(response) except: @@ -2518,7 +2518,7 @@ def __call__(self, # Jsonify the query params query_params = _BaseEventarcRestTransport._BaseGetTrigger._get_query_params_json(transcoded_request) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER # fmt: skip request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] try: @@ -2558,7 +2558,7 @@ def __call__(self, resp = self._interceptor.post_get_trigger(resp) response_metadata = [(k, str(v)) for k, v in response.headers.items()] resp, _ = self._interceptor.post_get_trigger_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER # fmt: skip try: response_payload = trigger.Trigger.to_json(response) except: @@ -2640,7 +2640,7 @@ def __call__(self, # Jsonify the query params query_params = _BaseEventarcRestTransport._BaseListChannelConnections._get_query_params_json(transcoded_request) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER # fmt: skip request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] try: @@ -2680,7 +2680,7 @@ def __call__(self, resp = self._interceptor.post_list_channel_connections(resp) response_metadata = [(k, str(v)) for k, v in response.headers.items()] resp, _ = self._interceptor.post_list_channel_connections_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER # fmt: skip try: response_payload = eventarc.ListChannelConnectionsResponse.to_json(response) except: @@ -2760,7 +2760,7 @@ def __call__(self, # Jsonify the query params query_params = _BaseEventarcRestTransport._BaseListChannels._get_query_params_json(transcoded_request) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER # fmt: skip request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] try: @@ -2800,7 +2800,7 @@ def __call__(self, resp = self._interceptor.post_list_channels(resp) response_metadata = [(k, str(v)) for k, v in response.headers.items()] resp, _ = self._interceptor.post_list_channels_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER # fmt: skip try: response_payload = eventarc.ListChannelsResponse.to_json(response) except: @@ -2880,7 +2880,7 @@ def __call__(self, # Jsonify the query params query_params = _BaseEventarcRestTransport._BaseListProviders._get_query_params_json(transcoded_request) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER # fmt: skip request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] try: @@ -2920,7 +2920,7 @@ def __call__(self, resp = self._interceptor.post_list_providers(resp) response_metadata = [(k, str(v)) for k, v in response.headers.items()] resp, _ = self._interceptor.post_list_providers_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER # fmt: skip try: response_payload = eventarc.ListProvidersResponse.to_json(response) except: @@ -3000,7 +3000,7 @@ def __call__(self, # Jsonify the query params query_params = _BaseEventarcRestTransport._BaseListTriggers._get_query_params_json(transcoded_request) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER # fmt: skip request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] try: @@ -3040,7 +3040,7 @@ def __call__(self, resp = self._interceptor.post_list_triggers(resp) response_metadata = [(k, str(v)) for k, v in response.headers.items()] resp, _ = self._interceptor.post_list_triggers_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER # fmt: skip try: response_payload = eventarc.ListTriggersResponse.to_json(response) except: @@ -3126,7 +3126,7 @@ def __call__(self, # Jsonify the query params query_params = _BaseEventarcRestTransport._BaseUpdateChannel._get_query_params_json(transcoded_request) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER # fmt: skip request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] try: @@ -3164,7 +3164,7 @@ def __call__(self, resp = self._interceptor.post_update_channel(resp) response_metadata = [(k, str(v)) for k, v in response.headers.items()] resp, _ = self._interceptor.post_update_channel_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER # fmt: skip try: response_payload = json_format.MessageToJson(resp) except: @@ -3256,7 +3256,7 @@ def __call__(self, # Jsonify the query params query_params = _BaseEventarcRestTransport._BaseUpdateGoogleChannelConfig._get_query_params_json(transcoded_request) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER # fmt: skip request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] try: @@ -3296,7 +3296,7 @@ def __call__(self, resp = self._interceptor.post_update_google_channel_config(resp) response_metadata = [(k, str(v)) for k, v in response.headers.items()] resp, _ = self._interceptor.post_update_google_channel_config_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER # fmt: skip try: response_payload = gce_google_channel_config.GoogleChannelConfig.to_json(response) except: @@ -3382,7 +3382,7 @@ def __call__(self, # Jsonify the query params query_params = _BaseEventarcRestTransport._BaseUpdateTrigger._get_query_params_json(transcoded_request) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER # fmt: skip request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] try: @@ -3420,7 +3420,7 @@ def __call__(self, resp = self._interceptor.post_update_trigger(resp) response_metadata = [(k, str(v)) for k, v in response.headers.items()] resp, _ = self._interceptor.post_update_trigger_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER # fmt: skip try: response_payload = json_format.MessageToJson(resp) except: @@ -3447,7 +3447,7 @@ def create_channel_(self) -> Callable[ operations_pb2.Operation]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._CreateChannel(self._session, self._host, self._interceptor) # type: ignore + return self._CreateChannel(self._session, self._host, self._interceptor) # type: ignore # fmt: skip @property def create_channel_connection(self) -> Callable[ @@ -3455,7 +3455,7 @@ def create_channel_connection(self) -> Callable[ operations_pb2.Operation]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._CreateChannelConnection(self._session, self._host, self._interceptor) # type: ignore + return self._CreateChannelConnection(self._session, self._host, self._interceptor) # type: ignore # fmt: skip @property def create_trigger(self) -> Callable[ @@ -3463,7 +3463,7 @@ def create_trigger(self) -> Callable[ operations_pb2.Operation]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._CreateTrigger(self._session, self._host, self._interceptor) # type: ignore + return self._CreateTrigger(self._session, self._host, self._interceptor) # type: ignore # fmt: skip @property def delete_channel(self) -> Callable[ @@ -3471,7 +3471,7 @@ def delete_channel(self) -> Callable[ operations_pb2.Operation]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._DeleteChannel(self._session, self._host, self._interceptor) # type: ignore + return self._DeleteChannel(self._session, self._host, self._interceptor) # type: ignore # fmt: skip @property def delete_channel_connection(self) -> Callable[ @@ -3479,7 +3479,7 @@ def delete_channel_connection(self) -> Callable[ operations_pb2.Operation]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._DeleteChannelConnection(self._session, self._host, self._interceptor) # type: ignore + return self._DeleteChannelConnection(self._session, self._host, self._interceptor) # type: ignore # fmt: skip @property def delete_trigger(self) -> Callable[ @@ -3487,7 +3487,7 @@ def delete_trigger(self) -> Callable[ operations_pb2.Operation]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._DeleteTrigger(self._session, self._host, self._interceptor) # type: ignore + return self._DeleteTrigger(self._session, self._host, self._interceptor) # type: ignore # fmt: skip @property def get_channel(self) -> Callable[ @@ -3495,7 +3495,7 @@ def get_channel(self) -> Callable[ channel.Channel]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._GetChannel(self._session, self._host, self._interceptor) # type: ignore + return self._GetChannel(self._session, self._host, self._interceptor) # type: ignore # fmt: skip @property def get_channel_connection(self) -> Callable[ @@ -3503,7 +3503,7 @@ def get_channel_connection(self) -> Callable[ channel_connection.ChannelConnection]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._GetChannelConnection(self._session, self._host, self._interceptor) # type: ignore + return self._GetChannelConnection(self._session, self._host, self._interceptor) # type: ignore # fmt: skip @property def get_google_channel_config(self) -> Callable[ @@ -3511,7 +3511,7 @@ def get_google_channel_config(self) -> Callable[ google_channel_config.GoogleChannelConfig]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._GetGoogleChannelConfig(self._session, self._host, self._interceptor) # type: ignore + return self._GetGoogleChannelConfig(self._session, self._host, self._interceptor) # type: ignore # fmt: skip @property def get_provider(self) -> Callable[ @@ -3519,7 +3519,7 @@ def get_provider(self) -> Callable[ discovery.Provider]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._GetProvider(self._session, self._host, self._interceptor) # type: ignore + return self._GetProvider(self._session, self._host, self._interceptor) # type: ignore # fmt: skip @property def get_trigger(self) -> Callable[ @@ -3527,7 +3527,7 @@ def get_trigger(self) -> Callable[ trigger.Trigger]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._GetTrigger(self._session, self._host, self._interceptor) # type: ignore + return self._GetTrigger(self._session, self._host, self._interceptor) # type: ignore # fmt: skip @property def list_channel_connections(self) -> Callable[ @@ -3535,7 +3535,7 @@ def list_channel_connections(self) -> Callable[ eventarc.ListChannelConnectionsResponse]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._ListChannelConnections(self._session, self._host, self._interceptor) # type: ignore + return self._ListChannelConnections(self._session, self._host, self._interceptor) # type: ignore # fmt: skip @property def list_channels(self) -> Callable[ @@ -3543,7 +3543,7 @@ def list_channels(self) -> Callable[ eventarc.ListChannelsResponse]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._ListChannels(self._session, self._host, self._interceptor) # type: ignore + return self._ListChannels(self._session, self._host, self._interceptor) # type: ignore # fmt: skip @property def list_providers(self) -> Callable[ @@ -3551,7 +3551,7 @@ def list_providers(self) -> Callable[ eventarc.ListProvidersResponse]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._ListProviders(self._session, self._host, self._interceptor) # type: ignore + return self._ListProviders(self._session, self._host, self._interceptor) # type: ignore # fmt: skip @property def list_triggers(self) -> Callable[ @@ -3559,7 +3559,7 @@ def list_triggers(self) -> Callable[ eventarc.ListTriggersResponse]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._ListTriggers(self._session, self._host, self._interceptor) # type: ignore + return self._ListTriggers(self._session, self._host, self._interceptor) # type: ignore # fmt: skip @property def update_channel(self) -> Callable[ @@ -3567,7 +3567,7 @@ def update_channel(self) -> Callable[ operations_pb2.Operation]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._UpdateChannel(self._session, self._host, self._interceptor) # type: ignore + return self._UpdateChannel(self._session, self._host, self._interceptor) # type: ignore # fmt: skip @property def update_google_channel_config(self) -> Callable[ @@ -3575,7 +3575,7 @@ def update_google_channel_config(self) -> Callable[ gce_google_channel_config.GoogleChannelConfig]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._UpdateGoogleChannelConfig(self._session, self._host, self._interceptor) # type: ignore + return self._UpdateGoogleChannelConfig(self._session, self._host, self._interceptor) # type: ignore # fmt: skip @property def update_trigger(self) -> Callable[ @@ -3583,11 +3583,11 @@ def update_trigger(self) -> Callable[ operations_pb2.Operation]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._UpdateTrigger(self._session, self._host, self._interceptor) # type: ignore + return self._UpdateTrigger(self._session, self._host, self._interceptor) # type: ignore # fmt: skip @property def get_location(self): - return self._GetLocation(self._session, self._host, self._interceptor) # type: ignore + return self._GetLocation(self._session, self._host, self._interceptor) # type: ignore # fmt: skip class _GetLocation(_BaseEventarcRestTransport._BaseGetLocation, EventarcRestStub): def __hash__(self): @@ -3647,7 +3647,7 @@ def __call__(self, # Jsonify the query params query_params = _BaseEventarcRestTransport._BaseGetLocation._get_query_params_json(transcoded_request) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER # fmt: skip request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] try: @@ -3682,7 +3682,7 @@ def __call__(self, resp = locations_pb2.Location() resp = json_format.Parse(content, resp) resp = self._interceptor.post_get_location(resp) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER # fmt: skip try: response_payload = json_format.MessageToJson(resp) except: @@ -3705,7 +3705,7 @@ def __call__(self, @property def list_locations(self): - return self._ListLocations(self._session, self._host, self._interceptor) # type: ignore + return self._ListLocations(self._session, self._host, self._interceptor) # type: ignore # fmt: skip class _ListLocations(_BaseEventarcRestTransport._BaseListLocations, EventarcRestStub): def __hash__(self): @@ -3765,7 +3765,7 @@ def __call__(self, # Jsonify the query params query_params = _BaseEventarcRestTransport._BaseListLocations._get_query_params_json(transcoded_request) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER # fmt: skip request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] try: @@ -3800,7 +3800,7 @@ def __call__(self, resp = locations_pb2.ListLocationsResponse() resp = json_format.Parse(content, resp) resp = self._interceptor.post_list_locations(resp) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER # fmt: skip try: response_payload = json_format.MessageToJson(resp) except: @@ -3823,7 +3823,7 @@ def __call__(self, @property def get_iam_policy(self): - return self._GetIamPolicy(self._session, self._host, self._interceptor) # type: ignore + return self._GetIamPolicy(self._session, self._host, self._interceptor) # type: ignore # fmt: skip class _GetIamPolicy(_BaseEventarcRestTransport._BaseGetIamPolicy, EventarcRestStub): def __hash__(self): @@ -3883,7 +3883,7 @@ def __call__(self, # Jsonify the query params query_params = _BaseEventarcRestTransport._BaseGetIamPolicy._get_query_params_json(transcoded_request) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER # fmt: skip request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] try: @@ -3918,7 +3918,7 @@ def __call__(self, resp = policy_pb2.Policy() resp = json_format.Parse(content, resp) resp = self._interceptor.post_get_iam_policy(resp) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER # fmt: skip try: response_payload = json_format.MessageToJson(resp) except: @@ -3941,7 +3941,7 @@ def __call__(self, @property def set_iam_policy(self): - return self._SetIamPolicy(self._session, self._host, self._interceptor) # type: ignore + return self._SetIamPolicy(self._session, self._host, self._interceptor) # type: ignore # fmt: skip class _SetIamPolicy(_BaseEventarcRestTransport._BaseSetIamPolicy, EventarcRestStub): def __hash__(self): @@ -4004,7 +4004,7 @@ def __call__(self, # Jsonify the query params query_params = _BaseEventarcRestTransport._BaseSetIamPolicy._get_query_params_json(transcoded_request) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER # fmt: skip request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] try: @@ -4039,7 +4039,7 @@ def __call__(self, resp = policy_pb2.Policy() resp = json_format.Parse(content, resp) resp = self._interceptor.post_set_iam_policy(resp) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER # fmt: skip try: response_payload = json_format.MessageToJson(resp) except: @@ -4062,7 +4062,7 @@ def __call__(self, @property def test_iam_permissions(self): - return self._TestIamPermissions(self._session, self._host, self._interceptor) # type: ignore + return self._TestIamPermissions(self._session, self._host, self._interceptor) # type: ignore # fmt: skip class _TestIamPermissions(_BaseEventarcRestTransport._BaseTestIamPermissions, EventarcRestStub): def __hash__(self): @@ -4125,7 +4125,7 @@ def __call__(self, # Jsonify the query params query_params = _BaseEventarcRestTransport._BaseTestIamPermissions._get_query_params_json(transcoded_request) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER # fmt: skip request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] try: @@ -4160,7 +4160,7 @@ def __call__(self, resp = iam_policy_pb2.TestIamPermissionsResponse() resp = json_format.Parse(content, resp) resp = self._interceptor.post_test_iam_permissions(resp) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER # fmt: skip try: response_payload = json_format.MessageToJson(resp) except: @@ -4183,7 +4183,7 @@ def __call__(self, @property def cancel_operation(self): - return self._CancelOperation(self._session, self._host, self._interceptor) # type: ignore + return self._CancelOperation(self._session, self._host, self._interceptor) # type: ignore # fmt: skip class _CancelOperation(_BaseEventarcRestTransport._BaseCancelOperation, EventarcRestStub): def __hash__(self): @@ -4243,7 +4243,7 @@ def __call__(self, # Jsonify the query params query_params = _BaseEventarcRestTransport._BaseCancelOperation._get_query_params_json(transcoded_request) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER # fmt: skip request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] try: @@ -4278,7 +4278,7 @@ def __call__(self, @property def delete_operation(self): - return self._DeleteOperation(self._session, self._host, self._interceptor) # type: ignore + return self._DeleteOperation(self._session, self._host, self._interceptor) # type: ignore # fmt: skip class _DeleteOperation(_BaseEventarcRestTransport._BaseDeleteOperation, EventarcRestStub): def __hash__(self): @@ -4335,7 +4335,7 @@ def __call__(self, # Jsonify the query params query_params = _BaseEventarcRestTransport._BaseDeleteOperation._get_query_params_json(transcoded_request) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER # fmt: skip request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] try: @@ -4370,7 +4370,7 @@ def __call__(self, @property def get_operation(self): - return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore + return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore # fmt: skip class _GetOperation(_BaseEventarcRestTransport._BaseGetOperation, EventarcRestStub): def __hash__(self): @@ -4430,7 +4430,7 @@ def __call__(self, # Jsonify the query params query_params = _BaseEventarcRestTransport._BaseGetOperation._get_query_params_json(transcoded_request) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER # fmt: skip request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] try: @@ -4465,7 +4465,7 @@ def __call__(self, resp = operations_pb2.Operation() resp = json_format.Parse(content, resp) resp = self._interceptor.post_get_operation(resp) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER # fmt: skip try: response_payload = json_format.MessageToJson(resp) except: @@ -4488,7 +4488,7 @@ def __call__(self, @property def list_operations(self): - return self._ListOperations(self._session, self._host, self._interceptor) # type: ignore + return self._ListOperations(self._session, self._host, self._interceptor) # type: ignore # fmt: skip class _ListOperations(_BaseEventarcRestTransport._BaseListOperations, EventarcRestStub): def __hash__(self): @@ -4548,7 +4548,7 @@ def __call__(self, # Jsonify the query params query_params = _BaseEventarcRestTransport._BaseListOperations._get_query_params_json(transcoded_request) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER # fmt: skip request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] try: @@ -4583,7 +4583,7 @@ def __call__(self, resp = operations_pb2.ListOperationsResponse() resp = json_format.Parse(content, resp) resp = self._interceptor.post_list_operations(resp) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER # fmt: skip try: response_payload = json_format.MessageToJson(resp) except: diff --git a/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/rest_base.py b/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/rest_base.py index 6ff0278064..92f967b949 100755 --- a/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/rest_base.py +++ b/tests/integration/goldens/eventarc/google/cloud/eventarc_v1/services/eventarc/transports/rest_base.py @@ -81,7 +81,7 @@ def __init__(self, *, # Run the base constructor maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) if maybe_url_match is None: - raise ValueError(f"Unexpected hostname structure: {host}") # pragma: NO COVER + raise ValueError(f"Unexpected hostname structure: {host}") # pragma: NO COVER # fmt: skip url_match_items = maybe_url_match.groupdict() diff --git a/tests/integration/goldens/eventarc/tests/unit/gapic/eventarc_v1/test_eventarc.py b/tests/integration/goldens/eventarc/tests/unit/gapic/eventarc_v1/test_eventarc.py index fe8e04769b..8eaef3023d 100755 --- a/tests/integration/goldens/eventarc/tests/unit/gapic/eventarc_v1/test_eventarc.py +++ b/tests/integration/goldens/eventarc/tests/unit/gapic/eventarc_v1/test_eventarc.py @@ -91,7 +91,7 @@ async def mock_async_gen(data, chunk_size=1): - for i in range(0, len(data)): # pragma: NO COVER + for i in range(0, len(data)): # pragma: NO COVER # fmt: skip chunk = data[i : i + chunk_size] yield chunk.encode("utf-8") @@ -12277,7 +12277,7 @@ def get_message_fields(field): if is_field_type_proto_plus_type: message_fields = field.message.meta.fields.values() # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER + else: # pragma: NO COVER # fmt: skip message_fields = field.message.DESCRIPTOR.fields return message_fields @@ -12291,7 +12291,7 @@ def get_message_fields(field): # For each item in the sample request, create a list of sub fields which are not present at runtime # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["trigger"].items(): # pragma: NO COVER + for field, value in request_init["trigger"].items(): # pragma: NO COVER # fmt: skip result = None is_repeated = False # For repeated fields @@ -12311,7 +12311,7 @@ def get_message_fields(field): # Remove fields from the sample request which are not present in the runtime version of the dependency # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER # fmt: skip field = subfield_to_delete.get("field") field_repeated = subfield_to_delete.get("is_repeated") subfield = subfield_to_delete.get("subfield") @@ -12442,7 +12442,7 @@ def get_message_fields(field): if is_field_type_proto_plus_type: message_fields = field.message.meta.fields.values() # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER + else: # pragma: NO COVER # fmt: skip message_fields = field.message.DESCRIPTOR.fields return message_fields @@ -12456,7 +12456,7 @@ def get_message_fields(field): # For each item in the sample request, create a list of sub fields which are not present at runtime # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["trigger"].items(): # pragma: NO COVER + for field, value in request_init["trigger"].items(): # pragma: NO COVER # fmt: skip result = None is_repeated = False # For repeated fields @@ -12476,7 +12476,7 @@ def get_message_fields(field): # Remove fields from the sample request which are not present in the runtime version of the dependency # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER # fmt: skip field = subfield_to_delete.get("field") field_repeated = subfield_to_delete.get("is_repeated") subfield = subfield_to_delete.get("subfield") @@ -12933,7 +12933,7 @@ def get_message_fields(field): if is_field_type_proto_plus_type: message_fields = field.message.meta.fields.values() # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER + else: # pragma: NO COVER # fmt: skip message_fields = field.message.DESCRIPTOR.fields return message_fields @@ -12947,7 +12947,7 @@ def get_message_fields(field): # For each item in the sample request, create a list of sub fields which are not present at runtime # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["channel"].items(): # pragma: NO COVER + for field, value in request_init["channel"].items(): # pragma: NO COVER # fmt: skip result = None is_repeated = False # For repeated fields @@ -12967,7 +12967,7 @@ def get_message_fields(field): # Remove fields from the sample request which are not present in the runtime version of the dependency # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER # fmt: skip field = subfield_to_delete.get("field") field_repeated = subfield_to_delete.get("is_repeated") subfield = subfield_to_delete.get("subfield") @@ -13098,7 +13098,7 @@ def get_message_fields(field): if is_field_type_proto_plus_type: message_fields = field.message.meta.fields.values() # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER + else: # pragma: NO COVER # fmt: skip message_fields = field.message.DESCRIPTOR.fields return message_fields @@ -13112,7 +13112,7 @@ def get_message_fields(field): # For each item in the sample request, create a list of sub fields which are not present at runtime # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["channel"].items(): # pragma: NO COVER + for field, value in request_init["channel"].items(): # pragma: NO COVER # fmt: skip result = None is_repeated = False # For repeated fields @@ -13132,7 +13132,7 @@ def get_message_fields(field): # Remove fields from the sample request which are not present in the runtime version of the dependency # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER # fmt: skip field = subfield_to_delete.get("field") field_repeated = subfield_to_delete.get("is_repeated") subfield = subfield_to_delete.get("subfield") @@ -13800,7 +13800,7 @@ def get_message_fields(field): if is_field_type_proto_plus_type: message_fields = field.message.meta.fields.values() # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER + else: # pragma: NO COVER # fmt: skip message_fields = field.message.DESCRIPTOR.fields return message_fields @@ -13814,7 +13814,7 @@ def get_message_fields(field): # For each item in the sample request, create a list of sub fields which are not present at runtime # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["channel_connection"].items(): # pragma: NO COVER + for field, value in request_init["channel_connection"].items(): # pragma: NO COVER # fmt: skip result = None is_repeated = False # For repeated fields @@ -13834,7 +13834,7 @@ def get_message_fields(field): # Remove fields from the sample request which are not present in the runtime version of the dependency # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER # fmt: skip field = subfield_to_delete.get("field") field_repeated = subfield_to_delete.get("is_repeated") subfield = subfield_to_delete.get("subfield") @@ -14174,7 +14174,7 @@ def get_message_fields(field): if is_field_type_proto_plus_type: message_fields = field.message.meta.fields.values() # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER + else: # pragma: NO COVER # fmt: skip message_fields = field.message.DESCRIPTOR.fields return message_fields @@ -14188,7 +14188,7 @@ def get_message_fields(field): # For each item in the sample request, create a list of sub fields which are not present at runtime # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["google_channel_config"].items(): # pragma: NO COVER + for field, value in request_init["google_channel_config"].items(): # pragma: NO COVER # fmt: skip result = None is_repeated = False # For repeated fields @@ -14208,7 +14208,7 @@ def get_message_fields(field): # Remove fields from the sample request which are not present in the runtime version of the dependency # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER # fmt: skip field = subfield_to_delete.get("field") field_repeated = subfield_to_delete.get("is_repeated") subfield = subfield_to_delete.get("subfield") diff --git a/tests/integration/goldens/logging/google/cloud/logging_v2/__init__.py b/tests/integration/goldens/logging/google/cloud/logging_v2/__init__.py index 242a438825..7a1411aae8 100755 --- a/tests/integration/goldens/logging/google/cloud/logging_v2/__init__.py +++ b/tests/integration/goldens/logging/google/cloud/logging_v2/__init__.py @@ -20,9 +20,9 @@ __version__ = package_version.__version__ -if sys.version_info >= (3, 8): # pragma: NO COVER +if sys.version_info >= (3, 8): # pragma: NO COVER # fmt: skip from importlib import metadata -else: # pragma: NO COVER +else: # pragma: NO COVER # fmt: skip # TODO(https://github.com/googleapis/python-api-core/issues/835): Remove # this code path once we drop support for Python 3.7 import importlib_metadata as metadata @@ -112,10 +112,10 @@ from .types.logging_metrics import LogMetric from .types.logging_metrics import UpdateLogMetricRequest -if hasattr(api_core, "check_python_version") and hasattr(api_core, "check_dependency_versions"): # pragma: NO COVER +if hasattr(api_core, "check_python_version") and hasattr(api_core, "check_dependency_versions"): # pragma: NO COVER # fmt: skip api_core.check_python_version("google.cloud.logging_v2") # type: ignore api_core.check_dependency_versions("google.cloud.logging_v2") # type: ignore -else: # pragma: NO COVER +else: # pragma: NO COVER # fmt: skip # An older version of api_core is installed which does not define the # functions above. We do equivalent checks manually. try: diff --git a/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/async_client.py b/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/async_client.py index 5946de63c3..fa9a56c835 100755 --- a/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/async_client.py +++ b/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/async_client.py @@ -104,7 +104,7 @@ def from_service_account_info(cls, info: dict, *args, **kwargs): Returns: ConfigServiceV2AsyncClient: The constructed client. """ - return ConfigServiceV2Client.from_service_account_info.__func__(ConfigServiceV2AsyncClient, info, *args, **kwargs) # type: ignore + return ConfigServiceV2Client.from_service_account_info.__func__(ConfigServiceV2AsyncClient, info, *args, **kwargs) # type: ignore # fmt: skip @classmethod def from_service_account_file(cls, filename: str, *args, **kwargs): @@ -120,7 +120,7 @@ def from_service_account_file(cls, filename: str, *args, **kwargs): Returns: ConfigServiceV2AsyncClient: The constructed client. """ - return ConfigServiceV2Client.from_service_account_file.__func__(ConfigServiceV2AsyncClient, filename, *args, **kwargs) # type: ignore + return ConfigServiceV2Client.from_service_account_file.__func__(ConfigServiceV2AsyncClient, filename, *args, **kwargs) # type: ignore # fmt: skip from_service_account_json = from_service_account_file @@ -156,7 +156,7 @@ def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[ClientOption Raises: google.auth.exceptions.MutualTLSChannelError: If any errors happen. """ - return ConfigServiceV2Client.get_mtls_endpoint_and_cert_source(client_options) # type: ignore + return ConfigServiceV2Client.get_mtls_endpoint_and_cert_source(client_options) # type: ignore # fmt: skip @property def transport(self) -> ConfigServiceV2Transport: @@ -251,7 +251,7 @@ def __init__(self, *, ) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG): # pragma: NO COVER + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG): # pragma: NO COVER # fmt: skip _LOGGER.debug( "Created client `google.logging_v2.ConfigServiceV2AsyncClient`.", extra = { @@ -4059,7 +4059,7 @@ async def __aexit__(self, exc_type, exc, tb): DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) -if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER +if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER # fmt: skip DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ diff --git a/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/client.py b/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/client.py index 1dc177afa6..2f46b57ae4 100755 --- a/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/client.py +++ b/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/client.py @@ -128,7 +128,7 @@ def _get_default_mtls_endpoint(api_endpoint): # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. DEFAULT_ENDPOINT = "logging.googleapis.com" - DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore # fmt: skip DEFAULT_ENDPOINT ) @@ -149,7 +149,7 @@ def _use_client_cert_effective(): GOOGLE_API_USE_CLIENT_CERTIFICATE is set to an unexpected value.) """ # check if google-auth version supports should_use_client_cert for automatic mTLS enablement - if hasattr(mtls, "should_use_client_cert"): # pragma: NO COVER + if hasattr(mtls, "should_use_client_cert"): # pragma: NO COVER # fmt: skip return mtls.should_use_client_cert() else: # pragma: NO COVER # if unsupported, fallback to reading from env var @@ -617,7 +617,7 @@ def __init__(self, *, # Initialize the universe domain validation. self._is_universe_domain_valid = False - if CLIENT_LOGGING_SUPPORTED: # pragma: NO COVER + if CLIENT_LOGGING_SUPPORTED: # pragma: NO COVER # fmt: skip # Setup logging. client_logging.initialize_logging() @@ -674,7 +674,7 @@ def __init__(self, *, ) if "async" not in str(self._transport): - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG): # pragma: NO COVER + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG): # pragma: NO COVER # fmt: skip _LOGGER.debug( "Created client `google.logging_v2.ConfigServiceV2Client`.", extra = { @@ -4485,7 +4485,7 @@ def cancel_operation( DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) -if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER +if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER # fmt: skip DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ __all__ = ( diff --git a/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/base.py b/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/base.py index a54da26717..3afff3da52 100755 --- a/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/base.py +++ b/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/base.py @@ -34,7 +34,7 @@ DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) -if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER +if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER # fmt: skip DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ diff --git a/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc.py b/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc.py index c178350462..106368aa2b 100755 --- a/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc.py +++ b/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc.py @@ -48,7 +48,7 @@ class _LoggingClientInterceptor(grpc.UnaryUnaryClientInterceptor): # pragma: NO COVER def intercept_unary_unary(self, continuation, client_call_details, request): logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG) - if logging_enabled: # pragma: NO COVER + if logging_enabled: # pragma: NO COVER # fmt: skip request_metadata = client_call_details.metadata if isinstance(request, proto.Message): request_payload = type(request).to_json(request) @@ -76,7 +76,7 @@ def intercept_unary_unary(self, continuation, client_call_details, request): }, ) response = continuation(client_call_details, request) - if logging_enabled: # pragma: NO COVER + if logging_enabled: # pragma: NO COVER # fmt: skip response_metadata = response.trailing_metadata() # Convert gRPC metadata `` to list of tuples metadata = dict([(k, str(v)) for k, v in response_metadata]) if response_metadata else None diff --git a/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc_asyncio.py b/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc_asyncio.py index 85199473c0..e09efb7774 100755 --- a/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc_asyncio.py +++ b/tests/integration/goldens/logging/google/cloud/logging_v2/services/config_service_v2/transports/grpc_asyncio.py @@ -52,7 +52,7 @@ class _LoggingClientAIOInterceptor(grpc.aio.UnaryUnaryClientInterceptor): # pragma: NO COVER async def intercept_unary_unary(self, continuation, client_call_details, request): logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG) - if logging_enabled: # pragma: NO COVER + if logging_enabled: # pragma: NO COVER # fmt: skip request_metadata = client_call_details.metadata if isinstance(request, proto.Message): request_payload = type(request).to_json(request) @@ -80,7 +80,7 @@ async def intercept_unary_unary(self, continuation, client_call_details, request }, ) response = await continuation(client_call_details, request) - if logging_enabled: # pragma: NO COVER + if logging_enabled: # pragma: NO COVER # fmt: skip response_metadata = await response.trailing_metadata() # Convert gRPC metadata `` to list of tuples metadata = dict([(k, str(v)) for k, v in response_metadata]) if response_metadata else None @@ -1526,7 +1526,7 @@ def _prep_wrapped_messages(self, client_info): } def _wrap_method(self, func, *args, **kwargs): - if self._wrap_with_kind: # pragma: NO COVER + if self._wrap_with_kind: # pragma: NO COVER # fmt: skip kwargs["kind"] = self.kind return gapic_v1.method_async.wrap_method(func, *args, **kwargs) diff --git a/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py b/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py index 545946ee11..cec9d01d83 100755 --- a/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py +++ b/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/async_client.py @@ -89,7 +89,7 @@ def from_service_account_info(cls, info: dict, *args, **kwargs): Returns: LoggingServiceV2AsyncClient: The constructed client. """ - return LoggingServiceV2Client.from_service_account_info.__func__(LoggingServiceV2AsyncClient, info, *args, **kwargs) # type: ignore + return LoggingServiceV2Client.from_service_account_info.__func__(LoggingServiceV2AsyncClient, info, *args, **kwargs) # type: ignore # fmt: skip @classmethod def from_service_account_file(cls, filename: str, *args, **kwargs): @@ -105,7 +105,7 @@ def from_service_account_file(cls, filename: str, *args, **kwargs): Returns: LoggingServiceV2AsyncClient: The constructed client. """ - return LoggingServiceV2Client.from_service_account_file.__func__(LoggingServiceV2AsyncClient, filename, *args, **kwargs) # type: ignore + return LoggingServiceV2Client.from_service_account_file.__func__(LoggingServiceV2AsyncClient, filename, *args, **kwargs) # type: ignore # fmt: skip from_service_account_json = from_service_account_file @@ -141,7 +141,7 @@ def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[ClientOption Raises: google.auth.exceptions.MutualTLSChannelError: If any errors happen. """ - return LoggingServiceV2Client.get_mtls_endpoint_and_cert_source(client_options) # type: ignore + return LoggingServiceV2Client.get_mtls_endpoint_and_cert_source(client_options) # type: ignore # fmt: skip @property def transport(self) -> LoggingServiceV2Transport: @@ -236,7 +236,7 @@ def __init__(self, *, ) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG): # pragma: NO COVER + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG): # pragma: NO COVER # fmt: skip _LOGGER.debug( "Created client `google.logging_v2.LoggingServiceV2AsyncClient`.", extra = { @@ -1174,7 +1174,7 @@ async def __aexit__(self, exc_type, exc, tb): DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) -if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER +if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER # fmt: skip DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ diff --git a/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/client.py b/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/client.py index 7080ae8c84..6ff759d61c 100755 --- a/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/client.py +++ b/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/client.py @@ -125,7 +125,7 @@ def _get_default_mtls_endpoint(api_endpoint): # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. DEFAULT_ENDPOINT = "logging.googleapis.com" - DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore # fmt: skip DEFAULT_ENDPOINT ) @@ -146,7 +146,7 @@ def _use_client_cert_effective(): GOOGLE_API_USE_CLIENT_CERTIFICATE is set to an unexpected value.) """ # check if google-auth version supports should_use_client_cert for automatic mTLS enablement - if hasattr(mtls, "should_use_client_cert"): # pragma: NO COVER + if hasattr(mtls, "should_use_client_cert"): # pragma: NO COVER # fmt: skip return mtls.should_use_client_cert() else: # pragma: NO COVER # if unsupported, fallback to reading from env var @@ -548,7 +548,7 @@ def __init__(self, *, # Initialize the universe domain validation. self._is_universe_domain_valid = False - if CLIENT_LOGGING_SUPPORTED: # pragma: NO COVER + if CLIENT_LOGGING_SUPPORTED: # pragma: NO COVER # fmt: skip # Setup logging. client_logging.initialize_logging() @@ -605,7 +605,7 @@ def __init__(self, *, ) if "async" not in str(self._transport): - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG): # pragma: NO COVER + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG): # pragma: NO COVER # fmt: skip _LOGGER.debug( "Created client `google.logging_v2.LoggingServiceV2Client`.", extra = { @@ -1559,7 +1559,7 @@ def cancel_operation( DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) -if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER +if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER # fmt: skip DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ __all__ = ( diff --git a/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/base.py b/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/base.py index d3eb7d5bd5..e0614a2c09 100755 --- a/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/base.py +++ b/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/base.py @@ -33,7 +33,7 @@ DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) -if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER +if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER # fmt: skip DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ diff --git a/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc.py b/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc.py index 7a0f637c92..974581ded0 100755 --- a/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc.py +++ b/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc.py @@ -47,7 +47,7 @@ class _LoggingClientInterceptor(grpc.UnaryUnaryClientInterceptor): # pragma: NO COVER def intercept_unary_unary(self, continuation, client_call_details, request): logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG) - if logging_enabled: # pragma: NO COVER + if logging_enabled: # pragma: NO COVER # fmt: skip request_metadata = client_call_details.metadata if isinstance(request, proto.Message): request_payload = type(request).to_json(request) @@ -75,7 +75,7 @@ def intercept_unary_unary(self, continuation, client_call_details, request): }, ) response = continuation(client_call_details, request) - if logging_enabled: # pragma: NO COVER + if logging_enabled: # pragma: NO COVER # fmt: skip response_metadata = response.trailing_metadata() # Convert gRPC metadata `` to list of tuples metadata = dict([(k, str(v)) for k, v in response_metadata]) if response_metadata else None diff --git a/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc_asyncio.py b/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc_asyncio.py index da2030c510..bfe64ecf8f 100755 --- a/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc_asyncio.py +++ b/tests/integration/goldens/logging/google/cloud/logging_v2/services/logging_service_v2/transports/grpc_asyncio.py @@ -51,7 +51,7 @@ class _LoggingClientAIOInterceptor(grpc.aio.UnaryUnaryClientInterceptor): # pragma: NO COVER async def intercept_unary_unary(self, continuation, client_call_details, request): logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG) - if logging_enabled: # pragma: NO COVER + if logging_enabled: # pragma: NO COVER # fmt: skip request_metadata = client_call_details.metadata if isinstance(request, proto.Message): request_payload = type(request).to_json(request) @@ -79,7 +79,7 @@ async def intercept_unary_unary(self, continuation, client_call_details, request }, ) response = await continuation(client_call_details, request) - if logging_enabled: # pragma: NO COVER + if logging_enabled: # pragma: NO COVER # fmt: skip response_metadata = await response.trailing_metadata() # Convert gRPC metadata `` to list of tuples metadata = dict([(k, str(v)) for k, v in response_metadata]) if response_metadata else None @@ -609,7 +609,7 @@ def _prep_wrapped_messages(self, client_info): } def _wrap_method(self, func, *args, **kwargs): - if self._wrap_with_kind: # pragma: NO COVER + if self._wrap_with_kind: # pragma: NO COVER # fmt: skip kwargs["kind"] = self.kind return gapic_v1.method_async.wrap_method(func, *args, **kwargs) diff --git a/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/async_client.py b/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/async_client.py index e3fdd29780..bfb110b85f 100755 --- a/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/async_client.py +++ b/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/async_client.py @@ -90,7 +90,7 @@ def from_service_account_info(cls, info: dict, *args, **kwargs): Returns: MetricsServiceV2AsyncClient: The constructed client. """ - return MetricsServiceV2Client.from_service_account_info.__func__(MetricsServiceV2AsyncClient, info, *args, **kwargs) # type: ignore + return MetricsServiceV2Client.from_service_account_info.__func__(MetricsServiceV2AsyncClient, info, *args, **kwargs) # type: ignore # fmt: skip @classmethod def from_service_account_file(cls, filename: str, *args, **kwargs): @@ -106,7 +106,7 @@ def from_service_account_file(cls, filename: str, *args, **kwargs): Returns: MetricsServiceV2AsyncClient: The constructed client. """ - return MetricsServiceV2Client.from_service_account_file.__func__(MetricsServiceV2AsyncClient, filename, *args, **kwargs) # type: ignore + return MetricsServiceV2Client.from_service_account_file.__func__(MetricsServiceV2AsyncClient, filename, *args, **kwargs) # type: ignore # fmt: skip from_service_account_json = from_service_account_file @@ -142,7 +142,7 @@ def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[ClientOption Raises: google.auth.exceptions.MutualTLSChannelError: If any errors happen. """ - return MetricsServiceV2Client.get_mtls_endpoint_and_cert_source(client_options) # type: ignore + return MetricsServiceV2Client.get_mtls_endpoint_and_cert_source(client_options) # type: ignore # fmt: skip @property def transport(self) -> MetricsServiceV2Transport: @@ -237,7 +237,7 @@ def __init__(self, *, ) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG): # pragma: NO COVER + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG): # pragma: NO COVER # fmt: skip _LOGGER.debug( "Created client `google.logging_v2.MetricsServiceV2AsyncClient`.", extra = { @@ -1025,7 +1025,7 @@ async def __aexit__(self, exc_type, exc, tb): DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) -if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER +if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER # fmt: skip DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ diff --git a/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/client.py b/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/client.py index e17a68536b..dd1424ba6d 100755 --- a/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/client.py +++ b/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/client.py @@ -126,7 +126,7 @@ def _get_default_mtls_endpoint(api_endpoint): # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. DEFAULT_ENDPOINT = "logging.googleapis.com" - DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore # fmt: skip DEFAULT_ENDPOINT ) @@ -147,7 +147,7 @@ def _use_client_cert_effective(): GOOGLE_API_USE_CLIENT_CERTIFICATE is set to an unexpected value.) """ # check if google-auth version supports should_use_client_cert for automatic mTLS enablement - if hasattr(mtls, "should_use_client_cert"): # pragma: NO COVER + if hasattr(mtls, "should_use_client_cert"): # pragma: NO COVER # fmt: skip return mtls.should_use_client_cert() else: # pragma: NO COVER # if unsupported, fallback to reading from env var @@ -549,7 +549,7 @@ def __init__(self, *, # Initialize the universe domain validation. self._is_universe_domain_valid = False - if CLIENT_LOGGING_SUPPORTED: # pragma: NO COVER + if CLIENT_LOGGING_SUPPORTED: # pragma: NO COVER # fmt: skip # Setup logging. client_logging.initialize_logging() @@ -606,7 +606,7 @@ def __init__(self, *, ) if "async" not in str(self._transport): - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG): # pragma: NO COVER + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG): # pragma: NO COVER # fmt: skip _LOGGER.debug( "Created client `google.logging_v2.MetricsServiceV2Client`.", extra = { @@ -1410,7 +1410,7 @@ def cancel_operation( DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) -if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER +if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER # fmt: skip DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ __all__ = ( diff --git a/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/base.py b/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/base.py index 6c9441cc02..1d6919d3ab 100755 --- a/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/base.py +++ b/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/base.py @@ -33,7 +33,7 @@ DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) -if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER +if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER # fmt: skip DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ diff --git a/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc.py b/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc.py index 0a6c2a5282..7738a7c75e 100755 --- a/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc.py +++ b/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc.py @@ -47,7 +47,7 @@ class _LoggingClientInterceptor(grpc.UnaryUnaryClientInterceptor): # pragma: NO COVER def intercept_unary_unary(self, continuation, client_call_details, request): logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG) - if logging_enabled: # pragma: NO COVER + if logging_enabled: # pragma: NO COVER # fmt: skip request_metadata = client_call_details.metadata if isinstance(request, proto.Message): request_payload = type(request).to_json(request) @@ -75,7 +75,7 @@ def intercept_unary_unary(self, continuation, client_call_details, request): }, ) response = continuation(client_call_details, request) - if logging_enabled: # pragma: NO COVER + if logging_enabled: # pragma: NO COVER # fmt: skip response_metadata = response.trailing_metadata() # Convert gRPC metadata `` to list of tuples metadata = dict([(k, str(v)) for k, v in response_metadata]) if response_metadata else None diff --git a/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc_asyncio.py b/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc_asyncio.py index 7f88d30b76..ec1bdf632f 100755 --- a/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc_asyncio.py +++ b/tests/integration/goldens/logging/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc_asyncio.py @@ -51,7 +51,7 @@ class _LoggingClientAIOInterceptor(grpc.aio.UnaryUnaryClientInterceptor): # pragma: NO COVER async def intercept_unary_unary(self, continuation, client_call_details, request): logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG) - if logging_enabled: # pragma: NO COVER + if logging_enabled: # pragma: NO COVER # fmt: skip request_metadata = client_call_details.metadata if isinstance(request, proto.Message): request_payload = type(request).to_json(request) @@ -79,7 +79,7 @@ async def intercept_unary_unary(self, continuation, client_call_details, request }, ) response = await continuation(client_call_details, request) - if logging_enabled: # pragma: NO COVER + if logging_enabled: # pragma: NO COVER # fmt: skip response_metadata = await response.trailing_metadata() # Convert gRPC metadata `` to list of tuples metadata = dict([(k, str(v)) for k, v in response_metadata]) if response_metadata else None @@ -537,7 +537,7 @@ def _prep_wrapped_messages(self, client_info): } def _wrap_method(self, func, *args, **kwargs): - if self._wrap_with_kind: # pragma: NO COVER + if self._wrap_with_kind: # pragma: NO COVER # fmt: skip kwargs["kind"] = self.kind return gapic_v1.method_async.wrap_method(func, *args, **kwargs) diff --git a/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_config_service_v2.py b/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_config_service_v2.py index b614a42929..ba75d0aba9 100755 --- a/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_config_service_v2.py +++ b/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_config_service_v2.py @@ -72,7 +72,7 @@ async def mock_async_gen(data, chunk_size=1): - for i in range(0, len(data)): # pragma: NO COVER + for i in range(0, len(data)): # pragma: NO COVER # fmt: skip chunk = data[i : i + chunk_size] yield chunk.encode("utf-8") diff --git a/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py b/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py index 65e5ea8ddc..c35d1b7d59 100755 --- a/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py +++ b/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_logging_service_v2.py @@ -73,7 +73,7 @@ async def mock_async_gen(data, chunk_size=1): - for i in range(0, len(data)): # pragma: NO COVER + for i in range(0, len(data)): # pragma: NO COVER # fmt: skip chunk = data[i : i + chunk_size] yield chunk.encode("utf-8") diff --git a/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py b/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py index 3f1c3b92a6..28becc38ef 100755 --- a/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py +++ b/tests/integration/goldens/logging/tests/unit/gapic/logging_v2/test_metrics_service_v2.py @@ -71,7 +71,7 @@ async def mock_async_gen(data, chunk_size=1): - for i in range(0, len(data)): # pragma: NO COVER + for i in range(0, len(data)): # pragma: NO COVER # fmt: skip chunk = data[i : i + chunk_size] yield chunk.encode("utf-8") diff --git a/tests/integration/goldens/logging_internal/google/cloud/logging_v2/__init__.py b/tests/integration/goldens/logging_internal/google/cloud/logging_v2/__init__.py index 38f3dc49f5..f7a1f97532 100755 --- a/tests/integration/goldens/logging_internal/google/cloud/logging_v2/__init__.py +++ b/tests/integration/goldens/logging_internal/google/cloud/logging_v2/__init__.py @@ -20,9 +20,9 @@ __version__ = package_version.__version__ -if sys.version_info >= (3, 8): # pragma: NO COVER +if sys.version_info >= (3, 8): # pragma: NO COVER # fmt: skip from importlib import metadata -else: # pragma: NO COVER +else: # pragma: NO COVER # fmt: skip # TODO(https://github.com/googleapis/python-api-core/issues/835): Remove # this code path once we drop support for Python 3.7 import importlib_metadata as metadata @@ -112,10 +112,10 @@ from .types.logging_metrics import LogMetric from .types.logging_metrics import UpdateLogMetricRequest -if hasattr(api_core, "check_python_version") and hasattr(api_core, "check_dependency_versions"): # pragma: NO COVER +if hasattr(api_core, "check_python_version") and hasattr(api_core, "check_dependency_versions"): # pragma: NO COVER # fmt: skip api_core.check_python_version("google.cloud.logging_v2") # type: ignore api_core.check_dependency_versions("google.cloud.logging_v2") # type: ignore -else: # pragma: NO COVER +else: # pragma: NO COVER # fmt: skip # An older version of api_core is installed which does not define the # functions above. We do equivalent checks manually. try: diff --git a/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/config_service_v2/async_client.py b/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/config_service_v2/async_client.py index dbd972107b..015581175c 100755 --- a/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/config_service_v2/async_client.py +++ b/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/config_service_v2/async_client.py @@ -104,7 +104,7 @@ def from_service_account_info(cls, info: dict, *args, **kwargs): Returns: BaseConfigServiceV2AsyncClient: The constructed client. """ - return BaseConfigServiceV2Client.from_service_account_info.__func__(BaseConfigServiceV2AsyncClient, info, *args, **kwargs) # type: ignore + return BaseConfigServiceV2Client.from_service_account_info.__func__(BaseConfigServiceV2AsyncClient, info, *args, **kwargs) # type: ignore # fmt: skip @classmethod def from_service_account_file(cls, filename: str, *args, **kwargs): @@ -120,7 +120,7 @@ def from_service_account_file(cls, filename: str, *args, **kwargs): Returns: BaseConfigServiceV2AsyncClient: The constructed client. """ - return BaseConfigServiceV2Client.from_service_account_file.__func__(BaseConfigServiceV2AsyncClient, filename, *args, **kwargs) # type: ignore + return BaseConfigServiceV2Client.from_service_account_file.__func__(BaseConfigServiceV2AsyncClient, filename, *args, **kwargs) # type: ignore # fmt: skip from_service_account_json = from_service_account_file @@ -156,7 +156,7 @@ def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[ClientOption Raises: google.auth.exceptions.MutualTLSChannelError: If any errors happen. """ - return BaseConfigServiceV2Client.get_mtls_endpoint_and_cert_source(client_options) # type: ignore + return BaseConfigServiceV2Client.get_mtls_endpoint_and_cert_source(client_options) # type: ignore # fmt: skip @property def transport(self) -> ConfigServiceV2Transport: @@ -251,7 +251,7 @@ def __init__(self, *, ) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG): # pragma: NO COVER + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG): # pragma: NO COVER # fmt: skip _LOGGER.debug( "Created client `google.logging_v2.BaseConfigServiceV2AsyncClient`.", extra = { @@ -4059,7 +4059,7 @@ async def __aexit__(self, exc_type, exc, tb): DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) -if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER +if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER # fmt: skip DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ diff --git a/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/config_service_v2/client.py b/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/config_service_v2/client.py index 8d2c2149d4..f2d2dc2c9c 100755 --- a/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/config_service_v2/client.py +++ b/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/config_service_v2/client.py @@ -128,7 +128,7 @@ def _get_default_mtls_endpoint(api_endpoint): # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. DEFAULT_ENDPOINT = "logging.googleapis.com" - DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore # fmt: skip DEFAULT_ENDPOINT ) @@ -149,7 +149,7 @@ def _use_client_cert_effective(): GOOGLE_API_USE_CLIENT_CERTIFICATE is set to an unexpected value.) """ # check if google-auth version supports should_use_client_cert for automatic mTLS enablement - if hasattr(mtls, "should_use_client_cert"): # pragma: NO COVER + if hasattr(mtls, "should_use_client_cert"): # pragma: NO COVER # fmt: skip return mtls.should_use_client_cert() else: # pragma: NO COVER # if unsupported, fallback to reading from env var @@ -617,7 +617,7 @@ def __init__(self, *, # Initialize the universe domain validation. self._is_universe_domain_valid = False - if CLIENT_LOGGING_SUPPORTED: # pragma: NO COVER + if CLIENT_LOGGING_SUPPORTED: # pragma: NO COVER # fmt: skip # Setup logging. client_logging.initialize_logging() @@ -674,7 +674,7 @@ def __init__(self, *, ) if "async" not in str(self._transport): - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG): # pragma: NO COVER + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG): # pragma: NO COVER # fmt: skip _LOGGER.debug( "Created client `google.logging_v2.BaseConfigServiceV2Client`.", extra = { @@ -4485,7 +4485,7 @@ def cancel_operation( DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) -if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER +if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER # fmt: skip DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ __all__ = ( diff --git a/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/config_service_v2/transports/base.py b/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/config_service_v2/transports/base.py index a54da26717..3afff3da52 100755 --- a/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/config_service_v2/transports/base.py +++ b/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/config_service_v2/transports/base.py @@ -34,7 +34,7 @@ DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) -if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER +if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER # fmt: skip DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ diff --git a/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/config_service_v2/transports/grpc.py b/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/config_service_v2/transports/grpc.py index c178350462..106368aa2b 100755 --- a/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/config_service_v2/transports/grpc.py +++ b/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/config_service_v2/transports/grpc.py @@ -48,7 +48,7 @@ class _LoggingClientInterceptor(grpc.UnaryUnaryClientInterceptor): # pragma: NO COVER def intercept_unary_unary(self, continuation, client_call_details, request): logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG) - if logging_enabled: # pragma: NO COVER + if logging_enabled: # pragma: NO COVER # fmt: skip request_metadata = client_call_details.metadata if isinstance(request, proto.Message): request_payload = type(request).to_json(request) @@ -76,7 +76,7 @@ def intercept_unary_unary(self, continuation, client_call_details, request): }, ) response = continuation(client_call_details, request) - if logging_enabled: # pragma: NO COVER + if logging_enabled: # pragma: NO COVER # fmt: skip response_metadata = response.trailing_metadata() # Convert gRPC metadata `` to list of tuples metadata = dict([(k, str(v)) for k, v in response_metadata]) if response_metadata else None diff --git a/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/config_service_v2/transports/grpc_asyncio.py b/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/config_service_v2/transports/grpc_asyncio.py index 85199473c0..e09efb7774 100755 --- a/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/config_service_v2/transports/grpc_asyncio.py +++ b/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/config_service_v2/transports/grpc_asyncio.py @@ -52,7 +52,7 @@ class _LoggingClientAIOInterceptor(grpc.aio.UnaryUnaryClientInterceptor): # pragma: NO COVER async def intercept_unary_unary(self, continuation, client_call_details, request): logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG) - if logging_enabled: # pragma: NO COVER + if logging_enabled: # pragma: NO COVER # fmt: skip request_metadata = client_call_details.metadata if isinstance(request, proto.Message): request_payload = type(request).to_json(request) @@ -80,7 +80,7 @@ async def intercept_unary_unary(self, continuation, client_call_details, request }, ) response = await continuation(client_call_details, request) - if logging_enabled: # pragma: NO COVER + if logging_enabled: # pragma: NO COVER # fmt: skip response_metadata = await response.trailing_metadata() # Convert gRPC metadata `` to list of tuples metadata = dict([(k, str(v)) for k, v in response_metadata]) if response_metadata else None @@ -1526,7 +1526,7 @@ def _prep_wrapped_messages(self, client_info): } def _wrap_method(self, func, *args, **kwargs): - if self._wrap_with_kind: # pragma: NO COVER + if self._wrap_with_kind: # pragma: NO COVER # fmt: skip kwargs["kind"] = self.kind return gapic_v1.method_async.wrap_method(func, *args, **kwargs) diff --git a/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/logging_service_v2/async_client.py b/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/logging_service_v2/async_client.py index 545946ee11..cec9d01d83 100755 --- a/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/logging_service_v2/async_client.py +++ b/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/logging_service_v2/async_client.py @@ -89,7 +89,7 @@ def from_service_account_info(cls, info: dict, *args, **kwargs): Returns: LoggingServiceV2AsyncClient: The constructed client. """ - return LoggingServiceV2Client.from_service_account_info.__func__(LoggingServiceV2AsyncClient, info, *args, **kwargs) # type: ignore + return LoggingServiceV2Client.from_service_account_info.__func__(LoggingServiceV2AsyncClient, info, *args, **kwargs) # type: ignore # fmt: skip @classmethod def from_service_account_file(cls, filename: str, *args, **kwargs): @@ -105,7 +105,7 @@ def from_service_account_file(cls, filename: str, *args, **kwargs): Returns: LoggingServiceV2AsyncClient: The constructed client. """ - return LoggingServiceV2Client.from_service_account_file.__func__(LoggingServiceV2AsyncClient, filename, *args, **kwargs) # type: ignore + return LoggingServiceV2Client.from_service_account_file.__func__(LoggingServiceV2AsyncClient, filename, *args, **kwargs) # type: ignore # fmt: skip from_service_account_json = from_service_account_file @@ -141,7 +141,7 @@ def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[ClientOption Raises: google.auth.exceptions.MutualTLSChannelError: If any errors happen. """ - return LoggingServiceV2Client.get_mtls_endpoint_and_cert_source(client_options) # type: ignore + return LoggingServiceV2Client.get_mtls_endpoint_and_cert_source(client_options) # type: ignore # fmt: skip @property def transport(self) -> LoggingServiceV2Transport: @@ -236,7 +236,7 @@ def __init__(self, *, ) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG): # pragma: NO COVER + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG): # pragma: NO COVER # fmt: skip _LOGGER.debug( "Created client `google.logging_v2.LoggingServiceV2AsyncClient`.", extra = { @@ -1174,7 +1174,7 @@ async def __aexit__(self, exc_type, exc, tb): DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) -if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER +if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER # fmt: skip DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ diff --git a/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/logging_service_v2/client.py b/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/logging_service_v2/client.py index 7080ae8c84..6ff759d61c 100755 --- a/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/logging_service_v2/client.py +++ b/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/logging_service_v2/client.py @@ -125,7 +125,7 @@ def _get_default_mtls_endpoint(api_endpoint): # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. DEFAULT_ENDPOINT = "logging.googleapis.com" - DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore # fmt: skip DEFAULT_ENDPOINT ) @@ -146,7 +146,7 @@ def _use_client_cert_effective(): GOOGLE_API_USE_CLIENT_CERTIFICATE is set to an unexpected value.) """ # check if google-auth version supports should_use_client_cert for automatic mTLS enablement - if hasattr(mtls, "should_use_client_cert"): # pragma: NO COVER + if hasattr(mtls, "should_use_client_cert"): # pragma: NO COVER # fmt: skip return mtls.should_use_client_cert() else: # pragma: NO COVER # if unsupported, fallback to reading from env var @@ -548,7 +548,7 @@ def __init__(self, *, # Initialize the universe domain validation. self._is_universe_domain_valid = False - if CLIENT_LOGGING_SUPPORTED: # pragma: NO COVER + if CLIENT_LOGGING_SUPPORTED: # pragma: NO COVER # fmt: skip # Setup logging. client_logging.initialize_logging() @@ -605,7 +605,7 @@ def __init__(self, *, ) if "async" not in str(self._transport): - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG): # pragma: NO COVER + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG): # pragma: NO COVER # fmt: skip _LOGGER.debug( "Created client `google.logging_v2.LoggingServiceV2Client`.", extra = { @@ -1559,7 +1559,7 @@ def cancel_operation( DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) -if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER +if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER # fmt: skip DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ __all__ = ( diff --git a/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/logging_service_v2/transports/base.py b/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/logging_service_v2/transports/base.py index d3eb7d5bd5..e0614a2c09 100755 --- a/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/logging_service_v2/transports/base.py +++ b/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/logging_service_v2/transports/base.py @@ -33,7 +33,7 @@ DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) -if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER +if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER # fmt: skip DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ diff --git a/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/logging_service_v2/transports/grpc.py b/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/logging_service_v2/transports/grpc.py index 7a0f637c92..974581ded0 100755 --- a/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/logging_service_v2/transports/grpc.py +++ b/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/logging_service_v2/transports/grpc.py @@ -47,7 +47,7 @@ class _LoggingClientInterceptor(grpc.UnaryUnaryClientInterceptor): # pragma: NO COVER def intercept_unary_unary(self, continuation, client_call_details, request): logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG) - if logging_enabled: # pragma: NO COVER + if logging_enabled: # pragma: NO COVER # fmt: skip request_metadata = client_call_details.metadata if isinstance(request, proto.Message): request_payload = type(request).to_json(request) @@ -75,7 +75,7 @@ def intercept_unary_unary(self, continuation, client_call_details, request): }, ) response = continuation(client_call_details, request) - if logging_enabled: # pragma: NO COVER + if logging_enabled: # pragma: NO COVER # fmt: skip response_metadata = response.trailing_metadata() # Convert gRPC metadata `` to list of tuples metadata = dict([(k, str(v)) for k, v in response_metadata]) if response_metadata else None diff --git a/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/logging_service_v2/transports/grpc_asyncio.py b/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/logging_service_v2/transports/grpc_asyncio.py index da2030c510..bfe64ecf8f 100755 --- a/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/logging_service_v2/transports/grpc_asyncio.py +++ b/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/logging_service_v2/transports/grpc_asyncio.py @@ -51,7 +51,7 @@ class _LoggingClientAIOInterceptor(grpc.aio.UnaryUnaryClientInterceptor): # pragma: NO COVER async def intercept_unary_unary(self, continuation, client_call_details, request): logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG) - if logging_enabled: # pragma: NO COVER + if logging_enabled: # pragma: NO COVER # fmt: skip request_metadata = client_call_details.metadata if isinstance(request, proto.Message): request_payload = type(request).to_json(request) @@ -79,7 +79,7 @@ async def intercept_unary_unary(self, continuation, client_call_details, request }, ) response = await continuation(client_call_details, request) - if logging_enabled: # pragma: NO COVER + if logging_enabled: # pragma: NO COVER # fmt: skip response_metadata = await response.trailing_metadata() # Convert gRPC metadata `` to list of tuples metadata = dict([(k, str(v)) for k, v in response_metadata]) if response_metadata else None @@ -609,7 +609,7 @@ def _prep_wrapped_messages(self, client_info): } def _wrap_method(self, func, *args, **kwargs): - if self._wrap_with_kind: # pragma: NO COVER + if self._wrap_with_kind: # pragma: NO COVER # fmt: skip kwargs["kind"] = self.kind return gapic_v1.method_async.wrap_method(func, *args, **kwargs) diff --git a/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/metrics_service_v2/async_client.py b/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/metrics_service_v2/async_client.py index 4fcff827d2..e2a32a8a64 100755 --- a/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/metrics_service_v2/async_client.py +++ b/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/metrics_service_v2/async_client.py @@ -90,7 +90,7 @@ def from_service_account_info(cls, info: dict, *args, **kwargs): Returns: BaseMetricsServiceV2AsyncClient: The constructed client. """ - return BaseMetricsServiceV2Client.from_service_account_info.__func__(BaseMetricsServiceV2AsyncClient, info, *args, **kwargs) # type: ignore + return BaseMetricsServiceV2Client.from_service_account_info.__func__(BaseMetricsServiceV2AsyncClient, info, *args, **kwargs) # type: ignore # fmt: skip @classmethod def from_service_account_file(cls, filename: str, *args, **kwargs): @@ -106,7 +106,7 @@ def from_service_account_file(cls, filename: str, *args, **kwargs): Returns: BaseMetricsServiceV2AsyncClient: The constructed client. """ - return BaseMetricsServiceV2Client.from_service_account_file.__func__(BaseMetricsServiceV2AsyncClient, filename, *args, **kwargs) # type: ignore + return BaseMetricsServiceV2Client.from_service_account_file.__func__(BaseMetricsServiceV2AsyncClient, filename, *args, **kwargs) # type: ignore # fmt: skip from_service_account_json = from_service_account_file @@ -142,7 +142,7 @@ def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[ClientOption Raises: google.auth.exceptions.MutualTLSChannelError: If any errors happen. """ - return BaseMetricsServiceV2Client.get_mtls_endpoint_and_cert_source(client_options) # type: ignore + return BaseMetricsServiceV2Client.get_mtls_endpoint_and_cert_source(client_options) # type: ignore # fmt: skip @property def transport(self) -> MetricsServiceV2Transport: @@ -237,7 +237,7 @@ def __init__(self, *, ) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG): # pragma: NO COVER + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG): # pragma: NO COVER # fmt: skip _LOGGER.debug( "Created client `google.logging_v2.BaseMetricsServiceV2AsyncClient`.", extra = { @@ -1025,7 +1025,7 @@ async def __aexit__(self, exc_type, exc, tb): DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) -if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER +if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER # fmt: skip DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ diff --git a/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/metrics_service_v2/client.py b/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/metrics_service_v2/client.py index ef25218bc1..d363aabbaf 100755 --- a/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/metrics_service_v2/client.py +++ b/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/metrics_service_v2/client.py @@ -126,7 +126,7 @@ def _get_default_mtls_endpoint(api_endpoint): # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. DEFAULT_ENDPOINT = "logging.googleapis.com" - DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore # fmt: skip DEFAULT_ENDPOINT ) @@ -147,7 +147,7 @@ def _use_client_cert_effective(): GOOGLE_API_USE_CLIENT_CERTIFICATE is set to an unexpected value.) """ # check if google-auth version supports should_use_client_cert for automatic mTLS enablement - if hasattr(mtls, "should_use_client_cert"): # pragma: NO COVER + if hasattr(mtls, "should_use_client_cert"): # pragma: NO COVER # fmt: skip return mtls.should_use_client_cert() else: # pragma: NO COVER # if unsupported, fallback to reading from env var @@ -549,7 +549,7 @@ def __init__(self, *, # Initialize the universe domain validation. self._is_universe_domain_valid = False - if CLIENT_LOGGING_SUPPORTED: # pragma: NO COVER + if CLIENT_LOGGING_SUPPORTED: # pragma: NO COVER # fmt: skip # Setup logging. client_logging.initialize_logging() @@ -606,7 +606,7 @@ def __init__(self, *, ) if "async" not in str(self._transport): - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG): # pragma: NO COVER + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG): # pragma: NO COVER # fmt: skip _LOGGER.debug( "Created client `google.logging_v2.BaseMetricsServiceV2Client`.", extra = { @@ -1410,7 +1410,7 @@ def cancel_operation( DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) -if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER +if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER # fmt: skip DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ __all__ = ( diff --git a/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/metrics_service_v2/transports/base.py b/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/metrics_service_v2/transports/base.py index 6c9441cc02..1d6919d3ab 100755 --- a/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/metrics_service_v2/transports/base.py +++ b/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/metrics_service_v2/transports/base.py @@ -33,7 +33,7 @@ DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) -if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER +if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER # fmt: skip DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ diff --git a/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc.py b/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc.py index 0a6c2a5282..7738a7c75e 100755 --- a/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc.py +++ b/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc.py @@ -47,7 +47,7 @@ class _LoggingClientInterceptor(grpc.UnaryUnaryClientInterceptor): # pragma: NO COVER def intercept_unary_unary(self, continuation, client_call_details, request): logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG) - if logging_enabled: # pragma: NO COVER + if logging_enabled: # pragma: NO COVER # fmt: skip request_metadata = client_call_details.metadata if isinstance(request, proto.Message): request_payload = type(request).to_json(request) @@ -75,7 +75,7 @@ def intercept_unary_unary(self, continuation, client_call_details, request): }, ) response = continuation(client_call_details, request) - if logging_enabled: # pragma: NO COVER + if logging_enabled: # pragma: NO COVER # fmt: skip response_metadata = response.trailing_metadata() # Convert gRPC metadata `` to list of tuples metadata = dict([(k, str(v)) for k, v in response_metadata]) if response_metadata else None diff --git a/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc_asyncio.py b/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc_asyncio.py index 7f88d30b76..ec1bdf632f 100755 --- a/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc_asyncio.py +++ b/tests/integration/goldens/logging_internal/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc_asyncio.py @@ -51,7 +51,7 @@ class _LoggingClientAIOInterceptor(grpc.aio.UnaryUnaryClientInterceptor): # pragma: NO COVER async def intercept_unary_unary(self, continuation, client_call_details, request): logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG) - if logging_enabled: # pragma: NO COVER + if logging_enabled: # pragma: NO COVER # fmt: skip request_metadata = client_call_details.metadata if isinstance(request, proto.Message): request_payload = type(request).to_json(request) @@ -79,7 +79,7 @@ async def intercept_unary_unary(self, continuation, client_call_details, request }, ) response = await continuation(client_call_details, request) - if logging_enabled: # pragma: NO COVER + if logging_enabled: # pragma: NO COVER # fmt: skip response_metadata = await response.trailing_metadata() # Convert gRPC metadata `` to list of tuples metadata = dict([(k, str(v)) for k, v in response_metadata]) if response_metadata else None @@ -537,7 +537,7 @@ def _prep_wrapped_messages(self, client_info): } def _wrap_method(self, func, *args, **kwargs): - if self._wrap_with_kind: # pragma: NO COVER + if self._wrap_with_kind: # pragma: NO COVER # fmt: skip kwargs["kind"] = self.kind return gapic_v1.method_async.wrap_method(func, *args, **kwargs) diff --git a/tests/integration/goldens/logging_internal/tests/unit/gapic/logging_v2/test_config_service_v2.py b/tests/integration/goldens/logging_internal/tests/unit/gapic/logging_v2/test_config_service_v2.py index 9a74e0973f..9f55666d16 100755 --- a/tests/integration/goldens/logging_internal/tests/unit/gapic/logging_v2/test_config_service_v2.py +++ b/tests/integration/goldens/logging_internal/tests/unit/gapic/logging_v2/test_config_service_v2.py @@ -72,7 +72,7 @@ async def mock_async_gen(data, chunk_size=1): - for i in range(0, len(data)): # pragma: NO COVER + for i in range(0, len(data)): # pragma: NO COVER # fmt: skip chunk = data[i : i + chunk_size] yield chunk.encode("utf-8") diff --git a/tests/integration/goldens/logging_internal/tests/unit/gapic/logging_v2/test_logging_service_v2.py b/tests/integration/goldens/logging_internal/tests/unit/gapic/logging_v2/test_logging_service_v2.py index 65e5ea8ddc..c35d1b7d59 100755 --- a/tests/integration/goldens/logging_internal/tests/unit/gapic/logging_v2/test_logging_service_v2.py +++ b/tests/integration/goldens/logging_internal/tests/unit/gapic/logging_v2/test_logging_service_v2.py @@ -73,7 +73,7 @@ async def mock_async_gen(data, chunk_size=1): - for i in range(0, len(data)): # pragma: NO COVER + for i in range(0, len(data)): # pragma: NO COVER # fmt: skip chunk = data[i : i + chunk_size] yield chunk.encode("utf-8") diff --git a/tests/integration/goldens/logging_internal/tests/unit/gapic/logging_v2/test_metrics_service_v2.py b/tests/integration/goldens/logging_internal/tests/unit/gapic/logging_v2/test_metrics_service_v2.py index f2e2530d7c..622b53e971 100755 --- a/tests/integration/goldens/logging_internal/tests/unit/gapic/logging_v2/test_metrics_service_v2.py +++ b/tests/integration/goldens/logging_internal/tests/unit/gapic/logging_v2/test_metrics_service_v2.py @@ -71,7 +71,7 @@ async def mock_async_gen(data, chunk_size=1): - for i in range(0, len(data)): # pragma: NO COVER + for i in range(0, len(data)): # pragma: NO COVER # fmt: skip chunk = data[i : i + chunk_size] yield chunk.encode("utf-8") diff --git a/tests/integration/goldens/redis/google/cloud/redis_v1/__init__.py b/tests/integration/goldens/redis/google/cloud/redis_v1/__init__.py index c5182e2f84..2426694c7a 100755 --- a/tests/integration/goldens/redis/google/cloud/redis_v1/__init__.py +++ b/tests/integration/goldens/redis/google/cloud/redis_v1/__init__.py @@ -20,9 +20,9 @@ __version__ = package_version.__version__ -if sys.version_info >= (3, 8): # pragma: NO COVER +if sys.version_info >= (3, 8): # pragma: NO COVER # fmt: skip from importlib import metadata -else: # pragma: NO COVER +else: # pragma: NO COVER # fmt: skip # TODO(https://github.com/googleapis/python-api-core/issues/835): Remove # this code path once we drop support for Python 3.7 import importlib_metadata as metadata @@ -59,10 +59,10 @@ from .types.cloud_redis import WeeklyMaintenanceWindow from .types.cloud_redis import ZoneMetadata -if hasattr(api_core, "check_python_version") and hasattr(api_core, "check_dependency_versions"): # pragma: NO COVER +if hasattr(api_core, "check_python_version") and hasattr(api_core, "check_dependency_versions"): # pragma: NO COVER # fmt: skip api_core.check_python_version("google.cloud.redis_v1") # type: ignore api_core.check_dependency_versions("google.cloud.redis_v1") # type: ignore -else: # pragma: NO COVER +else: # pragma: NO COVER # fmt: skip # An older version of api_core is installed which does not define the # functions above. We do equivalent checks manually. try: diff --git a/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/async_client.py b/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/async_client.py index 3a958b16e5..6fb2aa27d4 100755 --- a/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/async_client.py +++ b/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/async_client.py @@ -114,7 +114,7 @@ def from_service_account_info(cls, info: dict, *args, **kwargs): Returns: CloudRedisAsyncClient: The constructed client. """ - return CloudRedisClient.from_service_account_info.__func__(CloudRedisAsyncClient, info, *args, **kwargs) # type: ignore + return CloudRedisClient.from_service_account_info.__func__(CloudRedisAsyncClient, info, *args, **kwargs) # type: ignore # fmt: skip @classmethod def from_service_account_file(cls, filename: str, *args, **kwargs): @@ -130,7 +130,7 @@ def from_service_account_file(cls, filename: str, *args, **kwargs): Returns: CloudRedisAsyncClient: The constructed client. """ - return CloudRedisClient.from_service_account_file.__func__(CloudRedisAsyncClient, filename, *args, **kwargs) # type: ignore + return CloudRedisClient.from_service_account_file.__func__(CloudRedisAsyncClient, filename, *args, **kwargs) # type: ignore # fmt: skip from_service_account_json = from_service_account_file @@ -166,7 +166,7 @@ def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[ClientOption Raises: google.auth.exceptions.MutualTLSChannelError: If any errors happen. """ - return CloudRedisClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore + return CloudRedisClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore # fmt: skip @property def transport(self) -> CloudRedisTransport: @@ -264,7 +264,7 @@ def __init__(self, *, ) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG): # pragma: NO COVER + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG): # pragma: NO COVER # fmt: skip _LOGGER.debug( "Created client `google.cloud.redis_v1.CloudRedisAsyncClient`.", extra = { @@ -2146,7 +2146,7 @@ async def __aexit__(self, exc_type, exc, tb): DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) -if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER +if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER # fmt: skip DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ diff --git a/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/client.py b/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/client.py index a21a9fd930..468ae767e5 100755 --- a/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/client.py +++ b/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/client.py @@ -80,7 +80,7 @@ class CloudRedisClientMeta(type): _transport_registry["grpc"] = CloudRedisGrpcTransport _transport_registry["grpc_asyncio"] = CloudRedisGrpcAsyncIOTransport _transport_registry["rest"] = CloudRedisRestTransport - if HAS_ASYNC_REST_DEPENDENCIES: # pragma: NO COVER + if HAS_ASYNC_REST_DEPENDENCIES: # pragma: NO COVER # fmt: skip _transport_registry["rest_asyncio"] = AsyncCloudRedisRestTransport def get_transport_class(cls, @@ -96,7 +96,7 @@ def get_transport_class(cls, The transport class to use. """ # If a specific transport is requested, return that one. - if label == "rest_asyncio" and not HAS_ASYNC_REST_DEPENDENCIES: # pragma: NO COVER + if label == "rest_asyncio" and not HAS_ASYNC_REST_DEPENDENCIES: # pragma: NO COVER # fmt: skip raise ASYNC_REST_EXCEPTION if label: return cls._transport_registry[label] @@ -162,7 +162,7 @@ def _get_default_mtls_endpoint(api_endpoint): # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. DEFAULT_ENDPOINT = "redis.googleapis.com" - DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore # fmt: skip DEFAULT_ENDPOINT ) @@ -183,7 +183,7 @@ def _use_client_cert_effective(): GOOGLE_API_USE_CLIENT_CERTIFICATE is set to an unexpected value.) """ # check if google-auth version supports should_use_client_cert for automatic mTLS enablement - if hasattr(mtls, "should_use_client_cert"): # pragma: NO COVER + if hasattr(mtls, "should_use_client_cert"): # pragma: NO COVER # fmt: skip return mtls.should_use_client_cert() else: # pragma: NO COVER # if unsupported, fallback to reading from env var @@ -588,7 +588,7 @@ def __init__(self, *, # Initialize the universe domain validation. self._is_universe_domain_valid = False - if CLIENT_LOGGING_SUPPORTED: # pragma: NO COVER + if CLIENT_LOGGING_SUPPORTED: # pragma: NO COVER # fmt: skip # Setup logging. client_logging.initialize_logging() @@ -667,7 +667,7 @@ def __init__(self, *, ) if "async" not in str(self._transport): - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG): # pragma: NO COVER + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG): # pragma: NO COVER # fmt: skip _LOGGER.debug( "Created client `google.cloud.redis_v1.CloudRedisClient`.", extra = { @@ -2566,7 +2566,7 @@ def list_locations( DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) -if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER +if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER # fmt: skip DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ __all__ = ( diff --git a/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/__init__.py b/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/__init__.py index 1cbbf54c25..b5ec255c25 100755 --- a/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/__init__.py +++ b/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/__init__.py @@ -37,7 +37,7 @@ _transport_registry['grpc'] = CloudRedisGrpcTransport _transport_registry['grpc_asyncio'] = CloudRedisGrpcAsyncIOTransport _transport_registry['rest'] = CloudRedisRestTransport -if HAS_REST_ASYNC: # pragma: NO COVER +if HAS_REST_ASYNC: # pragma: NO COVER # fmt: skip _transport_registry['rest_asyncio'] = AsyncCloudRedisRestTransport __all__ = ( diff --git a/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/base.py b/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/base.py index 54f7d694a3..52c8d61334 100755 --- a/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/base.py +++ b/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/base.py @@ -34,7 +34,7 @@ DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) -if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER +if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER # fmt: skip DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ diff --git a/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/grpc.py b/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/grpc.py index ac164835f5..13c62056dd 100755 --- a/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/grpc.py +++ b/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/grpc.py @@ -48,7 +48,7 @@ class _LoggingClientInterceptor(grpc.UnaryUnaryClientInterceptor): # pragma: NO COVER def intercept_unary_unary(self, continuation, client_call_details, request): logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG) - if logging_enabled: # pragma: NO COVER + if logging_enabled: # pragma: NO COVER # fmt: skip request_metadata = client_call_details.metadata if isinstance(request, proto.Message): request_payload = type(request).to_json(request) @@ -76,7 +76,7 @@ def intercept_unary_unary(self, continuation, client_call_details, request): }, ) response = continuation(client_call_details, request) - if logging_enabled: # pragma: NO COVER + if logging_enabled: # pragma: NO COVER # fmt: skip response_metadata = response.trailing_metadata() # Convert gRPC metadata `` to list of tuples metadata = dict([(k, str(v)) for k, v in response_metadata]) if response_metadata else None diff --git a/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/grpc_asyncio.py b/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/grpc_asyncio.py index 46f96b5c5c..90ae2e5c74 100755 --- a/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/grpc_asyncio.py +++ b/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/grpc_asyncio.py @@ -52,7 +52,7 @@ class _LoggingClientAIOInterceptor(grpc.aio.UnaryUnaryClientInterceptor): # pragma: NO COVER async def intercept_unary_unary(self, continuation, client_call_details, request): logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG) - if logging_enabled: # pragma: NO COVER + if logging_enabled: # pragma: NO COVER # fmt: skip request_metadata = client_call_details.metadata if isinstance(request, proto.Message): request_payload = type(request).to_json(request) @@ -80,7 +80,7 @@ async def intercept_unary_unary(self, continuation, client_call_details, request }, ) response = await continuation(client_call_details, request) - if logging_enabled: # pragma: NO COVER + if logging_enabled: # pragma: NO COVER # fmt: skip response_metadata = await response.trailing_metadata() # Convert gRPC metadata `` to list of tuples metadata = dict([(k, str(v)) for k, v in response_metadata]) if response_metadata else None @@ -785,7 +785,7 @@ def _prep_wrapped_messages(self, client_info): } def _wrap_method(self, func, *args, **kwargs): - if self._wrap_with_kind: # pragma: NO COVER + if self._wrap_with_kind: # pragma: NO COVER # fmt: skip kwargs["kind"] = self.kind return gapic_v1.method_async.wrap_method(func, *args, **kwargs) diff --git a/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/rest.py b/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/rest.py index 6cf7fb2eca..2b56412694 100755 --- a/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/rest.py +++ b/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/rest.py @@ -61,7 +61,7 @@ rest_version=f"requests@{requests_version}", ) -if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER +if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER # fmt: skip DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ @@ -948,7 +948,7 @@ def __call__(self, # Jsonify the query params query_params = _BaseCloudRedisRestTransport._BaseCreateInstance._get_query_params_json(transcoded_request) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER # fmt: skip request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] try: @@ -986,7 +986,7 @@ def __call__(self, resp = self._interceptor.post_create_instance(resp) response_metadata = [(k, str(v)) for k, v in response.headers.items()] resp, _ = self._interceptor.post_create_instance_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER # fmt: skip try: response_payload = json_format.MessageToJson(resp) except: @@ -1069,7 +1069,7 @@ def __call__(self, # Jsonify the query params query_params = _BaseCloudRedisRestTransport._BaseDeleteInstance._get_query_params_json(transcoded_request) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER # fmt: skip request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] try: @@ -1107,7 +1107,7 @@ def __call__(self, resp = self._interceptor.post_delete_instance(resp) response_metadata = [(k, str(v)) for k, v in response.headers.items()] resp, _ = self._interceptor.post_delete_instance_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER # fmt: skip try: response_payload = json_format.MessageToJson(resp) except: @@ -1193,7 +1193,7 @@ def __call__(self, # Jsonify the query params query_params = _BaseCloudRedisRestTransport._BaseExportInstance._get_query_params_json(transcoded_request) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER # fmt: skip request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] try: @@ -1231,7 +1231,7 @@ def __call__(self, resp = self._interceptor.post_export_instance(resp) response_metadata = [(k, str(v)) for k, v in response.headers.items()] resp, _ = self._interceptor.post_export_instance_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER # fmt: skip try: response_payload = json_format.MessageToJson(resp) except: @@ -1317,7 +1317,7 @@ def __call__(self, # Jsonify the query params query_params = _BaseCloudRedisRestTransport._BaseFailoverInstance._get_query_params_json(transcoded_request) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER # fmt: skip request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] try: @@ -1355,7 +1355,7 @@ def __call__(self, resp = self._interceptor.post_failover_instance(resp) response_metadata = [(k, str(v)) for k, v in response.headers.items()] resp, _ = self._interceptor.post_failover_instance_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER # fmt: skip try: response_payload = json_format.MessageToJson(resp) except: @@ -1435,7 +1435,7 @@ def __call__(self, # Jsonify the query params query_params = _BaseCloudRedisRestTransport._BaseGetInstance._get_query_params_json(transcoded_request) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER # fmt: skip request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] try: @@ -1475,7 +1475,7 @@ def __call__(self, resp = self._interceptor.post_get_instance(resp) response_metadata = [(k, str(v)) for k, v in response.headers.items()] resp, _ = self._interceptor.post_get_instance_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER # fmt: skip try: response_payload = cloud_redis.Instance.to_json(response) except: @@ -1555,7 +1555,7 @@ def __call__(self, # Jsonify the query params query_params = _BaseCloudRedisRestTransport._BaseGetInstanceAuthString._get_query_params_json(transcoded_request) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER # fmt: skip request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] try: @@ -1595,7 +1595,7 @@ def __call__(self, resp = self._interceptor.post_get_instance_auth_string(resp) response_metadata = [(k, str(v)) for k, v in response.headers.items()] resp, _ = self._interceptor.post_get_instance_auth_string_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER # fmt: skip try: response_payload = cloud_redis.InstanceAuthString.to_json(response) except: @@ -1681,7 +1681,7 @@ def __call__(self, # Jsonify the query params query_params = _BaseCloudRedisRestTransport._BaseImportInstance._get_query_params_json(transcoded_request) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER # fmt: skip request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] try: @@ -1719,7 +1719,7 @@ def __call__(self, resp = self._interceptor.post_import_instance(resp) response_metadata = [(k, str(v)) for k, v in response.headers.items()] resp, _ = self._interceptor.post_import_instance_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER # fmt: skip try: response_payload = json_format.MessageToJson(resp) except: @@ -1801,7 +1801,7 @@ def __call__(self, # Jsonify the query params query_params = _BaseCloudRedisRestTransport._BaseListInstances._get_query_params_json(transcoded_request) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER # fmt: skip request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] try: @@ -1841,7 +1841,7 @@ def __call__(self, resp = self._interceptor.post_list_instances(resp) response_metadata = [(k, str(v)) for k, v in response.headers.items()] resp, _ = self._interceptor.post_list_instances_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER # fmt: skip try: response_payload = cloud_redis.ListInstancesResponse.to_json(response) except: @@ -1927,7 +1927,7 @@ def __call__(self, # Jsonify the query params query_params = _BaseCloudRedisRestTransport._BaseRescheduleMaintenance._get_query_params_json(transcoded_request) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER # fmt: skip request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] try: @@ -1965,7 +1965,7 @@ def __call__(self, resp = self._interceptor.post_reschedule_maintenance(resp) response_metadata = [(k, str(v)) for k, v in response.headers.items()] resp, _ = self._interceptor.post_reschedule_maintenance_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER # fmt: skip try: response_payload = json_format.MessageToJson(resp) except: @@ -2051,7 +2051,7 @@ def __call__(self, # Jsonify the query params query_params = _BaseCloudRedisRestTransport._BaseUpdateInstance._get_query_params_json(transcoded_request) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER # fmt: skip request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] try: @@ -2089,7 +2089,7 @@ def __call__(self, resp = self._interceptor.post_update_instance(resp) response_metadata = [(k, str(v)) for k, v in response.headers.items()] resp, _ = self._interceptor.post_update_instance_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER # fmt: skip try: response_payload = json_format.MessageToJson(resp) except: @@ -2175,7 +2175,7 @@ def __call__(self, # Jsonify the query params query_params = _BaseCloudRedisRestTransport._BaseUpgradeInstance._get_query_params_json(transcoded_request) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER # fmt: skip request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] try: @@ -2213,7 +2213,7 @@ def __call__(self, resp = self._interceptor.post_upgrade_instance(resp) response_metadata = [(k, str(v)) for k, v in response.headers.items()] resp, _ = self._interceptor.post_upgrade_instance_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER # fmt: skip try: response_payload = json_format.MessageToJson(resp) except: @@ -2240,7 +2240,7 @@ def create_instance(self) -> Callable[ operations_pb2.Operation]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._CreateInstance(self._session, self._host, self._interceptor) # type: ignore + return self._CreateInstance(self._session, self._host, self._interceptor) # type: ignore # fmt: skip @property def delete_instance(self) -> Callable[ @@ -2248,7 +2248,7 @@ def delete_instance(self) -> Callable[ operations_pb2.Operation]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._DeleteInstance(self._session, self._host, self._interceptor) # type: ignore + return self._DeleteInstance(self._session, self._host, self._interceptor) # type: ignore # fmt: skip @property def export_instance(self) -> Callable[ @@ -2256,7 +2256,7 @@ def export_instance(self) -> Callable[ operations_pb2.Operation]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._ExportInstance(self._session, self._host, self._interceptor) # type: ignore + return self._ExportInstance(self._session, self._host, self._interceptor) # type: ignore # fmt: skip @property def failover_instance(self) -> Callable[ @@ -2264,7 +2264,7 @@ def failover_instance(self) -> Callable[ operations_pb2.Operation]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._FailoverInstance(self._session, self._host, self._interceptor) # type: ignore + return self._FailoverInstance(self._session, self._host, self._interceptor) # type: ignore # fmt: skip @property def get_instance(self) -> Callable[ @@ -2272,7 +2272,7 @@ def get_instance(self) -> Callable[ cloud_redis.Instance]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._GetInstance(self._session, self._host, self._interceptor) # type: ignore + return self._GetInstance(self._session, self._host, self._interceptor) # type: ignore # fmt: skip @property def get_instance_auth_string(self) -> Callable[ @@ -2280,7 +2280,7 @@ def get_instance_auth_string(self) -> Callable[ cloud_redis.InstanceAuthString]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._GetInstanceAuthString(self._session, self._host, self._interceptor) # type: ignore + return self._GetInstanceAuthString(self._session, self._host, self._interceptor) # type: ignore # fmt: skip @property def import_instance(self) -> Callable[ @@ -2288,7 +2288,7 @@ def import_instance(self) -> Callable[ operations_pb2.Operation]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._ImportInstance(self._session, self._host, self._interceptor) # type: ignore + return self._ImportInstance(self._session, self._host, self._interceptor) # type: ignore # fmt: skip @property def list_instances(self) -> Callable[ @@ -2296,7 +2296,7 @@ def list_instances(self) -> Callable[ cloud_redis.ListInstancesResponse]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._ListInstances(self._session, self._host, self._interceptor) # type: ignore + return self._ListInstances(self._session, self._host, self._interceptor) # type: ignore # fmt: skip @property def reschedule_maintenance(self) -> Callable[ @@ -2304,7 +2304,7 @@ def reschedule_maintenance(self) -> Callable[ operations_pb2.Operation]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._RescheduleMaintenance(self._session, self._host, self._interceptor) # type: ignore + return self._RescheduleMaintenance(self._session, self._host, self._interceptor) # type: ignore # fmt: skip @property def update_instance(self) -> Callable[ @@ -2312,7 +2312,7 @@ def update_instance(self) -> Callable[ operations_pb2.Operation]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._UpdateInstance(self._session, self._host, self._interceptor) # type: ignore + return self._UpdateInstance(self._session, self._host, self._interceptor) # type: ignore # fmt: skip @property def upgrade_instance(self) -> Callable[ @@ -2320,11 +2320,11 @@ def upgrade_instance(self) -> Callable[ operations_pb2.Operation]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._UpgradeInstance(self._session, self._host, self._interceptor) # type: ignore + return self._UpgradeInstance(self._session, self._host, self._interceptor) # type: ignore # fmt: skip @property def get_location(self): - return self._GetLocation(self._session, self._host, self._interceptor) # type: ignore + return self._GetLocation(self._session, self._host, self._interceptor) # type: ignore # fmt: skip class _GetLocation(_BaseCloudRedisRestTransport._BaseGetLocation, CloudRedisRestStub): def __hash__(self): @@ -2384,7 +2384,7 @@ def __call__(self, # Jsonify the query params query_params = _BaseCloudRedisRestTransport._BaseGetLocation._get_query_params_json(transcoded_request) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER # fmt: skip request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] try: @@ -2419,7 +2419,7 @@ def __call__(self, resp = locations_pb2.Location() resp = json_format.Parse(content, resp) resp = self._interceptor.post_get_location(resp) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER # fmt: skip try: response_payload = json_format.MessageToJson(resp) except: @@ -2442,7 +2442,7 @@ def __call__(self, @property def list_locations(self): - return self._ListLocations(self._session, self._host, self._interceptor) # type: ignore + return self._ListLocations(self._session, self._host, self._interceptor) # type: ignore # fmt: skip class _ListLocations(_BaseCloudRedisRestTransport._BaseListLocations, CloudRedisRestStub): def __hash__(self): @@ -2502,7 +2502,7 @@ def __call__(self, # Jsonify the query params query_params = _BaseCloudRedisRestTransport._BaseListLocations._get_query_params_json(transcoded_request) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER # fmt: skip request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] try: @@ -2537,7 +2537,7 @@ def __call__(self, resp = locations_pb2.ListLocationsResponse() resp = json_format.Parse(content, resp) resp = self._interceptor.post_list_locations(resp) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER # fmt: skip try: response_payload = json_format.MessageToJson(resp) except: @@ -2560,7 +2560,7 @@ def __call__(self, @property def cancel_operation(self): - return self._CancelOperation(self._session, self._host, self._interceptor) # type: ignore + return self._CancelOperation(self._session, self._host, self._interceptor) # type: ignore # fmt: skip class _CancelOperation(_BaseCloudRedisRestTransport._BaseCancelOperation, CloudRedisRestStub): def __hash__(self): @@ -2617,7 +2617,7 @@ def __call__(self, # Jsonify the query params query_params = _BaseCloudRedisRestTransport._BaseCancelOperation._get_query_params_json(transcoded_request) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER # fmt: skip request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] try: @@ -2652,7 +2652,7 @@ def __call__(self, @property def delete_operation(self): - return self._DeleteOperation(self._session, self._host, self._interceptor) # type: ignore + return self._DeleteOperation(self._session, self._host, self._interceptor) # type: ignore # fmt: skip class _DeleteOperation(_BaseCloudRedisRestTransport._BaseDeleteOperation, CloudRedisRestStub): def __hash__(self): @@ -2709,7 +2709,7 @@ def __call__(self, # Jsonify the query params query_params = _BaseCloudRedisRestTransport._BaseDeleteOperation._get_query_params_json(transcoded_request) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER # fmt: skip request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] try: @@ -2744,7 +2744,7 @@ def __call__(self, @property def get_operation(self): - return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore + return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore # fmt: skip class _GetOperation(_BaseCloudRedisRestTransport._BaseGetOperation, CloudRedisRestStub): def __hash__(self): @@ -2804,7 +2804,7 @@ def __call__(self, # Jsonify the query params query_params = _BaseCloudRedisRestTransport._BaseGetOperation._get_query_params_json(transcoded_request) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER # fmt: skip request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] try: @@ -2839,7 +2839,7 @@ def __call__(self, resp = operations_pb2.Operation() resp = json_format.Parse(content, resp) resp = self._interceptor.post_get_operation(resp) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER # fmt: skip try: response_payload = json_format.MessageToJson(resp) except: @@ -2862,7 +2862,7 @@ def __call__(self, @property def list_operations(self): - return self._ListOperations(self._session, self._host, self._interceptor) # type: ignore + return self._ListOperations(self._session, self._host, self._interceptor) # type: ignore # fmt: skip class _ListOperations(_BaseCloudRedisRestTransport._BaseListOperations, CloudRedisRestStub): def __hash__(self): @@ -2922,7 +2922,7 @@ def __call__(self, # Jsonify the query params query_params = _BaseCloudRedisRestTransport._BaseListOperations._get_query_params_json(transcoded_request) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER # fmt: skip request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] try: @@ -2957,7 +2957,7 @@ def __call__(self, resp = operations_pb2.ListOperationsResponse() resp = json_format.Parse(content, resp) resp = self._interceptor.post_list_operations(resp) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER # fmt: skip try: response_payload = json_format.MessageToJson(resp) except: @@ -2980,7 +2980,7 @@ def __call__(self, @property def wait_operation(self): - return self._WaitOperation(self._session, self._host, self._interceptor) # type: ignore + return self._WaitOperation(self._session, self._host, self._interceptor) # type: ignore # fmt: skip class _WaitOperation(_BaseCloudRedisRestTransport._BaseWaitOperation, CloudRedisRestStub): def __hash__(self): @@ -3043,7 +3043,7 @@ def __call__(self, # Jsonify the query params query_params = _BaseCloudRedisRestTransport._BaseWaitOperation._get_query_params_json(transcoded_request) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER # fmt: skip request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] try: @@ -3078,7 +3078,7 @@ def __call__(self, resp = operations_pb2.Operation() resp = json_format.Parse(content, resp) resp = self._interceptor.post_wait_operation(resp) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER # fmt: skip try: response_payload = json_format.MessageToJson(resp) except: diff --git a/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/rest_asyncio.py b/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/rest_asyncio.py index 613a5e3411..103fc77500 100755 --- a/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/rest_asyncio.py +++ b/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/rest_asyncio.py @@ -73,7 +73,7 @@ rest_version=f"google-auth@{google.auth.__version__}", ) -if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER +if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER # fmt: skip DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ @@ -908,7 +908,7 @@ def _prep_wrapped_messages(self, client_info): } def _wrap_method(self, func, *args, **kwargs): - if self._wrap_with_kind: # pragma: NO COVER + if self._wrap_with_kind: # pragma: NO COVER # fmt: skip kwargs["kind"] = self.kind return gapic_v1.method_async.wrap_method(func, *args, **kwargs) @@ -977,7 +977,7 @@ async def __call__(self, # Jsonify the query params query_params = _BaseCloudRedisRestTransport._BaseCreateInstance._get_query_params_json(transcoded_request) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER # fmt: skip request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] try: @@ -1010,7 +1010,7 @@ async def __call__(self, payload = json.loads(content.decode('utf-8')) request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] - raise core_exceptions.format_http_response_error(response, method, request_url, payload) # type: ignore + raise core_exceptions.format_http_response_error(response, method, request_url, payload) # type: ignore # fmt: skip # Return the response resp = operations_pb2.Operation() @@ -1020,7 +1020,7 @@ async def __call__(self, resp = await self._interceptor.post_create_instance(resp) response_metadata = [(k, str(v)) for k, v in response.headers.items()] resp, _ = await self._interceptor.post_create_instance_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER # fmt: skip try: response_payload = json_format.MessageToJson(resp) except: @@ -1104,7 +1104,7 @@ async def __call__(self, # Jsonify the query params query_params = _BaseCloudRedisRestTransport._BaseDeleteInstance._get_query_params_json(transcoded_request) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER # fmt: skip request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] try: @@ -1137,7 +1137,7 @@ async def __call__(self, payload = json.loads(content.decode('utf-8')) request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] - raise core_exceptions.format_http_response_error(response, method, request_url, payload) # type: ignore + raise core_exceptions.format_http_response_error(response, method, request_url, payload) # type: ignore # fmt: skip # Return the response resp = operations_pb2.Operation() @@ -1147,7 +1147,7 @@ async def __call__(self, resp = await self._interceptor.post_delete_instance(resp) response_metadata = [(k, str(v)) for k, v in response.headers.items()] resp, _ = await self._interceptor.post_delete_instance_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER # fmt: skip try: response_payload = json_format.MessageToJson(resp) except: @@ -1234,7 +1234,7 @@ async def __call__(self, # Jsonify the query params query_params = _BaseCloudRedisRestTransport._BaseExportInstance._get_query_params_json(transcoded_request) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER # fmt: skip request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] try: @@ -1267,7 +1267,7 @@ async def __call__(self, payload = json.loads(content.decode('utf-8')) request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] - raise core_exceptions.format_http_response_error(response, method, request_url, payload) # type: ignore + raise core_exceptions.format_http_response_error(response, method, request_url, payload) # type: ignore # fmt: skip # Return the response resp = operations_pb2.Operation() @@ -1277,7 +1277,7 @@ async def __call__(self, resp = await self._interceptor.post_export_instance(resp) response_metadata = [(k, str(v)) for k, v in response.headers.items()] resp, _ = await self._interceptor.post_export_instance_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER # fmt: skip try: response_payload = json_format.MessageToJson(resp) except: @@ -1364,7 +1364,7 @@ async def __call__(self, # Jsonify the query params query_params = _BaseCloudRedisRestTransport._BaseFailoverInstance._get_query_params_json(transcoded_request) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER # fmt: skip request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] try: @@ -1397,7 +1397,7 @@ async def __call__(self, payload = json.loads(content.decode('utf-8')) request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] - raise core_exceptions.format_http_response_error(response, method, request_url, payload) # type: ignore + raise core_exceptions.format_http_response_error(response, method, request_url, payload) # type: ignore # fmt: skip # Return the response resp = operations_pb2.Operation() @@ -1407,7 +1407,7 @@ async def __call__(self, resp = await self._interceptor.post_failover_instance(resp) response_metadata = [(k, str(v)) for k, v in response.headers.items()] resp, _ = await self._interceptor.post_failover_instance_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER # fmt: skip try: response_payload = json_format.MessageToJson(resp) except: @@ -1488,7 +1488,7 @@ async def __call__(self, # Jsonify the query params query_params = _BaseCloudRedisRestTransport._BaseGetInstance._get_query_params_json(transcoded_request) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER # fmt: skip request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] try: @@ -1521,7 +1521,7 @@ async def __call__(self, payload = json.loads(content.decode('utf-8')) request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] - raise core_exceptions.format_http_response_error(response, method, request_url, payload) # type: ignore + raise core_exceptions.format_http_response_error(response, method, request_url, payload) # type: ignore # fmt: skip # Return the response resp = cloud_redis.Instance() @@ -1531,7 +1531,7 @@ async def __call__(self, resp = await self._interceptor.post_get_instance(resp) response_metadata = [(k, str(v)) for k, v in response.headers.items()] resp, _ = await self._interceptor.post_get_instance_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER # fmt: skip try: response_payload = cloud_redis.Instance.to_json(response) except: @@ -1612,7 +1612,7 @@ async def __call__(self, # Jsonify the query params query_params = _BaseCloudRedisRestTransport._BaseGetInstanceAuthString._get_query_params_json(transcoded_request) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER # fmt: skip request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] try: @@ -1645,7 +1645,7 @@ async def __call__(self, payload = json.loads(content.decode('utf-8')) request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] - raise core_exceptions.format_http_response_error(response, method, request_url, payload) # type: ignore + raise core_exceptions.format_http_response_error(response, method, request_url, payload) # type: ignore # fmt: skip # Return the response resp = cloud_redis.InstanceAuthString() @@ -1655,7 +1655,7 @@ async def __call__(self, resp = await self._interceptor.post_get_instance_auth_string(resp) response_metadata = [(k, str(v)) for k, v in response.headers.items()] resp, _ = await self._interceptor.post_get_instance_auth_string_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER # fmt: skip try: response_payload = cloud_redis.InstanceAuthString.to_json(response) except: @@ -1742,7 +1742,7 @@ async def __call__(self, # Jsonify the query params query_params = _BaseCloudRedisRestTransport._BaseImportInstance._get_query_params_json(transcoded_request) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER # fmt: skip request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] try: @@ -1775,7 +1775,7 @@ async def __call__(self, payload = json.loads(content.decode('utf-8')) request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] - raise core_exceptions.format_http_response_error(response, method, request_url, payload) # type: ignore + raise core_exceptions.format_http_response_error(response, method, request_url, payload) # type: ignore # fmt: skip # Return the response resp = operations_pb2.Operation() @@ -1785,7 +1785,7 @@ async def __call__(self, resp = await self._interceptor.post_import_instance(resp) response_metadata = [(k, str(v)) for k, v in response.headers.items()] resp, _ = await self._interceptor.post_import_instance_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER # fmt: skip try: response_payload = json_format.MessageToJson(resp) except: @@ -1868,7 +1868,7 @@ async def __call__(self, # Jsonify the query params query_params = _BaseCloudRedisRestTransport._BaseListInstances._get_query_params_json(transcoded_request) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER # fmt: skip request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] try: @@ -1901,7 +1901,7 @@ async def __call__(self, payload = json.loads(content.decode('utf-8')) request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] - raise core_exceptions.format_http_response_error(response, method, request_url, payload) # type: ignore + raise core_exceptions.format_http_response_error(response, method, request_url, payload) # type: ignore # fmt: skip # Return the response resp = cloud_redis.ListInstancesResponse() @@ -1911,7 +1911,7 @@ async def __call__(self, resp = await self._interceptor.post_list_instances(resp) response_metadata = [(k, str(v)) for k, v in response.headers.items()] resp, _ = await self._interceptor.post_list_instances_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER # fmt: skip try: response_payload = cloud_redis.ListInstancesResponse.to_json(response) except: @@ -1998,7 +1998,7 @@ async def __call__(self, # Jsonify the query params query_params = _BaseCloudRedisRestTransport._BaseRescheduleMaintenance._get_query_params_json(transcoded_request) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER # fmt: skip request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] try: @@ -2031,7 +2031,7 @@ async def __call__(self, payload = json.loads(content.decode('utf-8')) request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] - raise core_exceptions.format_http_response_error(response, method, request_url, payload) # type: ignore + raise core_exceptions.format_http_response_error(response, method, request_url, payload) # type: ignore # fmt: skip # Return the response resp = operations_pb2.Operation() @@ -2041,7 +2041,7 @@ async def __call__(self, resp = await self._interceptor.post_reschedule_maintenance(resp) response_metadata = [(k, str(v)) for k, v in response.headers.items()] resp, _ = await self._interceptor.post_reschedule_maintenance_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER # fmt: skip try: response_payload = json_format.MessageToJson(resp) except: @@ -2128,7 +2128,7 @@ async def __call__(self, # Jsonify the query params query_params = _BaseCloudRedisRestTransport._BaseUpdateInstance._get_query_params_json(transcoded_request) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER # fmt: skip request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] try: @@ -2161,7 +2161,7 @@ async def __call__(self, payload = json.loads(content.decode('utf-8')) request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] - raise core_exceptions.format_http_response_error(response, method, request_url, payload) # type: ignore + raise core_exceptions.format_http_response_error(response, method, request_url, payload) # type: ignore # fmt: skip # Return the response resp = operations_pb2.Operation() @@ -2171,7 +2171,7 @@ async def __call__(self, resp = await self._interceptor.post_update_instance(resp) response_metadata = [(k, str(v)) for k, v in response.headers.items()] resp, _ = await self._interceptor.post_update_instance_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER # fmt: skip try: response_payload = json_format.MessageToJson(resp) except: @@ -2258,7 +2258,7 @@ async def __call__(self, # Jsonify the query params query_params = _BaseCloudRedisRestTransport._BaseUpgradeInstance._get_query_params_json(transcoded_request) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER # fmt: skip request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] try: @@ -2291,7 +2291,7 @@ async def __call__(self, payload = json.loads(content.decode('utf-8')) request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] - raise core_exceptions.format_http_response_error(response, method, request_url, payload) # type: ignore + raise core_exceptions.format_http_response_error(response, method, request_url, payload) # type: ignore # fmt: skip # Return the response resp = operations_pb2.Operation() @@ -2301,7 +2301,7 @@ async def __call__(self, resp = await self._interceptor.post_upgrade_instance(resp) response_metadata = [(k, str(v)) for k, v in response.headers.items()] resp, _ = await self._interceptor.post_upgrade_instance_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER # fmt: skip try: response_payload = json_format.MessageToJson(resp) except: @@ -2383,71 +2383,71 @@ def operations_client(self) -> AsyncOperationsRestClient: def create_instance(self) -> Callable[ [cloud_redis.CreateInstanceRequest], operations_pb2.Operation]: - return self._CreateInstance(self._session, self._host, self._interceptor) # type: ignore + return self._CreateInstance(self._session, self._host, self._interceptor) # type: ignore # fmt: skip @property def delete_instance(self) -> Callable[ [cloud_redis.DeleteInstanceRequest], operations_pb2.Operation]: - return self._DeleteInstance(self._session, self._host, self._interceptor) # type: ignore + return self._DeleteInstance(self._session, self._host, self._interceptor) # type: ignore # fmt: skip @property def export_instance(self) -> Callable[ [cloud_redis.ExportInstanceRequest], operations_pb2.Operation]: - return self._ExportInstance(self._session, self._host, self._interceptor) # type: ignore + return self._ExportInstance(self._session, self._host, self._interceptor) # type: ignore # fmt: skip @property def failover_instance(self) -> Callable[ [cloud_redis.FailoverInstanceRequest], operations_pb2.Operation]: - return self._FailoverInstance(self._session, self._host, self._interceptor) # type: ignore + return self._FailoverInstance(self._session, self._host, self._interceptor) # type: ignore # fmt: skip @property def get_instance(self) -> Callable[ [cloud_redis.GetInstanceRequest], cloud_redis.Instance]: - return self._GetInstance(self._session, self._host, self._interceptor) # type: ignore + return self._GetInstance(self._session, self._host, self._interceptor) # type: ignore # fmt: skip @property def get_instance_auth_string(self) -> Callable[ [cloud_redis.GetInstanceAuthStringRequest], cloud_redis.InstanceAuthString]: - return self._GetInstanceAuthString(self._session, self._host, self._interceptor) # type: ignore + return self._GetInstanceAuthString(self._session, self._host, self._interceptor) # type: ignore # fmt: skip @property def import_instance(self) -> Callable[ [cloud_redis.ImportInstanceRequest], operations_pb2.Operation]: - return self._ImportInstance(self._session, self._host, self._interceptor) # type: ignore + return self._ImportInstance(self._session, self._host, self._interceptor) # type: ignore # fmt: skip @property def list_instances(self) -> Callable[ [cloud_redis.ListInstancesRequest], cloud_redis.ListInstancesResponse]: - return self._ListInstances(self._session, self._host, self._interceptor) # type: ignore + return self._ListInstances(self._session, self._host, self._interceptor) # type: ignore # fmt: skip @property def reschedule_maintenance(self) -> Callable[ [cloud_redis.RescheduleMaintenanceRequest], operations_pb2.Operation]: - return self._RescheduleMaintenance(self._session, self._host, self._interceptor) # type: ignore + return self._RescheduleMaintenance(self._session, self._host, self._interceptor) # type: ignore # fmt: skip @property def update_instance(self) -> Callable[ [cloud_redis.UpdateInstanceRequest], operations_pb2.Operation]: - return self._UpdateInstance(self._session, self._host, self._interceptor) # type: ignore + return self._UpdateInstance(self._session, self._host, self._interceptor) # type: ignore # fmt: skip @property def upgrade_instance(self) -> Callable[ [cloud_redis.UpgradeInstanceRequest], operations_pb2.Operation]: - return self._UpgradeInstance(self._session, self._host, self._interceptor) # type: ignore + return self._UpgradeInstance(self._session, self._host, self._interceptor) # type: ignore # fmt: skip @property def get_location(self): - return self._GetLocation(self._session, self._host, self._interceptor) # type: ignore + return self._GetLocation(self._session, self._host, self._interceptor) # type: ignore # fmt: skip class _GetLocation(_BaseCloudRedisRestTransport._BaseGetLocation, AsyncCloudRedisRestStub): def __hash__(self): @@ -2507,7 +2507,7 @@ async def __call__(self, # Jsonify the query params query_params = _BaseCloudRedisRestTransport._BaseGetLocation._get_query_params_json(transcoded_request) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER # fmt: skip request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] try: @@ -2540,13 +2540,13 @@ async def __call__(self, payload = json.loads(content.decode('utf-8')) request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] - raise core_exceptions.format_http_response_error(response, method, request_url, payload) # type: ignore + raise core_exceptions.format_http_response_error(response, method, request_url, payload) # type: ignore # fmt: skip content = await response.read() resp = locations_pb2.Location() resp = json_format.Parse(content, resp) resp = await self._interceptor.post_get_location(resp) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER # fmt: skip try: response_payload = json_format.MessageToJson(resp) except: @@ -2569,7 +2569,7 @@ async def __call__(self, @property def list_locations(self): - return self._ListLocations(self._session, self._host, self._interceptor) # type: ignore + return self._ListLocations(self._session, self._host, self._interceptor) # type: ignore # fmt: skip class _ListLocations(_BaseCloudRedisRestTransport._BaseListLocations, AsyncCloudRedisRestStub): def __hash__(self): @@ -2629,7 +2629,7 @@ async def __call__(self, # Jsonify the query params query_params = _BaseCloudRedisRestTransport._BaseListLocations._get_query_params_json(transcoded_request) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER # fmt: skip request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] try: @@ -2662,13 +2662,13 @@ async def __call__(self, payload = json.loads(content.decode('utf-8')) request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] - raise core_exceptions.format_http_response_error(response, method, request_url, payload) # type: ignore + raise core_exceptions.format_http_response_error(response, method, request_url, payload) # type: ignore # fmt: skip content = await response.read() resp = locations_pb2.ListLocationsResponse() resp = json_format.Parse(content, resp) resp = await self._interceptor.post_list_locations(resp) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER # fmt: skip try: response_payload = json_format.MessageToJson(resp) except: @@ -2691,7 +2691,7 @@ async def __call__(self, @property def cancel_operation(self): - return self._CancelOperation(self._session, self._host, self._interceptor) # type: ignore + return self._CancelOperation(self._session, self._host, self._interceptor) # type: ignore # fmt: skip class _CancelOperation(_BaseCloudRedisRestTransport._BaseCancelOperation, AsyncCloudRedisRestStub): def __hash__(self): @@ -2748,7 +2748,7 @@ async def __call__(self, # Jsonify the query params query_params = _BaseCloudRedisRestTransport._BaseCancelOperation._get_query_params_json(transcoded_request) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER # fmt: skip request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] try: @@ -2781,13 +2781,13 @@ async def __call__(self, payload = json.loads(content.decode('utf-8')) request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] - raise core_exceptions.format_http_response_error(response, method, request_url, payload) # type: ignore + raise core_exceptions.format_http_response_error(response, method, request_url, payload) # type: ignore # fmt: skip return await self._interceptor.post_cancel_operation(None) @property def delete_operation(self): - return self._DeleteOperation(self._session, self._host, self._interceptor) # type: ignore + return self._DeleteOperation(self._session, self._host, self._interceptor) # type: ignore # fmt: skip class _DeleteOperation(_BaseCloudRedisRestTransport._BaseDeleteOperation, AsyncCloudRedisRestStub): def __hash__(self): @@ -2844,7 +2844,7 @@ async def __call__(self, # Jsonify the query params query_params = _BaseCloudRedisRestTransport._BaseDeleteOperation._get_query_params_json(transcoded_request) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER # fmt: skip request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] try: @@ -2877,13 +2877,13 @@ async def __call__(self, payload = json.loads(content.decode('utf-8')) request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] - raise core_exceptions.format_http_response_error(response, method, request_url, payload) # type: ignore + raise core_exceptions.format_http_response_error(response, method, request_url, payload) # type: ignore # fmt: skip return await self._interceptor.post_delete_operation(None) @property def get_operation(self): - return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore + return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore # fmt: skip class _GetOperation(_BaseCloudRedisRestTransport._BaseGetOperation, AsyncCloudRedisRestStub): def __hash__(self): @@ -2943,7 +2943,7 @@ async def __call__(self, # Jsonify the query params query_params = _BaseCloudRedisRestTransport._BaseGetOperation._get_query_params_json(transcoded_request) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER # fmt: skip request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] try: @@ -2976,13 +2976,13 @@ async def __call__(self, payload = json.loads(content.decode('utf-8')) request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] - raise core_exceptions.format_http_response_error(response, method, request_url, payload) # type: ignore + raise core_exceptions.format_http_response_error(response, method, request_url, payload) # type: ignore # fmt: skip content = await response.read() resp = operations_pb2.Operation() resp = json_format.Parse(content, resp) resp = await self._interceptor.post_get_operation(resp) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER # fmt: skip try: response_payload = json_format.MessageToJson(resp) except: @@ -3005,7 +3005,7 @@ async def __call__(self, @property def list_operations(self): - return self._ListOperations(self._session, self._host, self._interceptor) # type: ignore + return self._ListOperations(self._session, self._host, self._interceptor) # type: ignore # fmt: skip class _ListOperations(_BaseCloudRedisRestTransport._BaseListOperations, AsyncCloudRedisRestStub): def __hash__(self): @@ -3065,7 +3065,7 @@ async def __call__(self, # Jsonify the query params query_params = _BaseCloudRedisRestTransport._BaseListOperations._get_query_params_json(transcoded_request) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER # fmt: skip request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] try: @@ -3098,13 +3098,13 @@ async def __call__(self, payload = json.loads(content.decode('utf-8')) request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] - raise core_exceptions.format_http_response_error(response, method, request_url, payload) # type: ignore + raise core_exceptions.format_http_response_error(response, method, request_url, payload) # type: ignore # fmt: skip content = await response.read() resp = operations_pb2.ListOperationsResponse() resp = json_format.Parse(content, resp) resp = await self._interceptor.post_list_operations(resp) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER # fmt: skip try: response_payload = json_format.MessageToJson(resp) except: @@ -3127,7 +3127,7 @@ async def __call__(self, @property def wait_operation(self): - return self._WaitOperation(self._session, self._host, self._interceptor) # type: ignore + return self._WaitOperation(self._session, self._host, self._interceptor) # type: ignore # fmt: skip class _WaitOperation(_BaseCloudRedisRestTransport._BaseWaitOperation, AsyncCloudRedisRestStub): def __hash__(self): @@ -3190,7 +3190,7 @@ async def __call__(self, # Jsonify the query params query_params = _BaseCloudRedisRestTransport._BaseWaitOperation._get_query_params_json(transcoded_request) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER # fmt: skip request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] try: @@ -3223,13 +3223,13 @@ async def __call__(self, payload = json.loads(content.decode('utf-8')) request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] - raise core_exceptions.format_http_response_error(response, method, request_url, payload) # type: ignore + raise core_exceptions.format_http_response_error(response, method, request_url, payload) # type: ignore # fmt: skip content = await response.read() resp = operations_pb2.Operation() resp = json_format.Parse(content, resp) resp = await self._interceptor.post_wait_operation(resp) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER # fmt: skip try: response_payload = json_format.MessageToJson(resp) except: diff --git a/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/rest_base.py b/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/rest_base.py index 56df283219..8b07f4d764 100755 --- a/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/rest_base.py +++ b/tests/integration/goldens/redis/google/cloud/redis_v1/services/cloud_redis/transports/rest_base.py @@ -73,7 +73,7 @@ def __init__(self, *, # Run the base constructor maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) if maybe_url_match is None: - raise ValueError(f"Unexpected hostname structure: {host}") # pragma: NO COVER + raise ValueError(f"Unexpected hostname structure: {host}") # pragma: NO COVER # fmt: skip url_match_items = maybe_url_match.groupdict() diff --git a/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/test_cloud_redis.py b/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/test_cloud_redis.py index ba763932d3..18818ab804 100755 --- a/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/test_cloud_redis.py +++ b/tests/integration/goldens/redis/tests/unit/gapic/redis_v1/test_cloud_redis.py @@ -89,7 +89,7 @@ async def mock_async_gen(data, chunk_size=1): - for i in range(0, len(data)): # pragma: NO COVER + for i in range(0, len(data)): # pragma: NO COVER # fmt: skip chunk = data[i : i + chunk_size] yield chunk.encode("utf-8") @@ -7878,7 +7878,7 @@ def get_message_fields(field): if is_field_type_proto_plus_type: message_fields = field.message.meta.fields.values() # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER + else: # pragma: NO COVER # fmt: skip message_fields = field.message.DESCRIPTOR.fields return message_fields @@ -7892,7 +7892,7 @@ def get_message_fields(field): # For each item in the sample request, create a list of sub fields which are not present at runtime # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["instance"].items(): # pragma: NO COVER + for field, value in request_init["instance"].items(): # pragma: NO COVER # fmt: skip result = None is_repeated = False # For repeated fields @@ -7912,7 +7912,7 @@ def get_message_fields(field): # Remove fields from the sample request which are not present in the runtime version of the dependency # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER # fmt: skip field = subfield_to_delete.get("field") field_repeated = subfield_to_delete.get("is_repeated") subfield = subfield_to_delete.get("subfield") @@ -8043,7 +8043,7 @@ def get_message_fields(field): if is_field_type_proto_plus_type: message_fields = field.message.meta.fields.values() # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER + else: # pragma: NO COVER # fmt: skip message_fields = field.message.DESCRIPTOR.fields return message_fields @@ -8057,7 +8057,7 @@ def get_message_fields(field): # For each item in the sample request, create a list of sub fields which are not present at runtime # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["instance"].items(): # pragma: NO COVER + for field, value in request_init["instance"].items(): # pragma: NO COVER # fmt: skip result = None is_repeated = False # For repeated fields @@ -8077,7 +8077,7 @@ def get_message_fields(field): # Remove fields from the sample request which are not present in the runtime version of the dependency # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER # fmt: skip field = subfield_to_delete.get("field") field_repeated = subfield_to_delete.get("is_repeated") subfield = subfield_to_delete.get("subfield") @@ -9857,7 +9857,7 @@ def get_message_fields(field): if is_field_type_proto_plus_type: message_fields = field.message.meta.fields.values() # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER + else: # pragma: NO COVER # fmt: skip message_fields = field.message.DESCRIPTOR.fields return message_fields @@ -9871,7 +9871,7 @@ def get_message_fields(field): # For each item in the sample request, create a list of sub fields which are not present at runtime # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["instance"].items(): # pragma: NO COVER + for field, value in request_init["instance"].items(): # pragma: NO COVER # fmt: skip result = None is_repeated = False # For repeated fields @@ -9891,7 +9891,7 @@ def get_message_fields(field): # Remove fields from the sample request which are not present in the runtime version of the dependency # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER # fmt: skip field = subfield_to_delete.get("field") field_repeated = subfield_to_delete.get("is_repeated") subfield = subfield_to_delete.get("subfield") @@ -10029,7 +10029,7 @@ def get_message_fields(field): if is_field_type_proto_plus_type: message_fields = field.message.meta.fields.values() # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER + else: # pragma: NO COVER # fmt: skip message_fields = field.message.DESCRIPTOR.fields return message_fields @@ -10043,7 +10043,7 @@ def get_message_fields(field): # For each item in the sample request, create a list of sub fields which are not present at runtime # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["instance"].items(): # pragma: NO COVER + for field, value in request_init["instance"].items(): # pragma: NO COVER # fmt: skip result = None is_repeated = False # For repeated fields @@ -10063,7 +10063,7 @@ def get_message_fields(field): # Remove fields from the sample request which are not present in the runtime version of the dependency # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER # fmt: skip field = subfield_to_delete.get("field") field_repeated = subfield_to_delete.get("is_repeated") subfield = subfield_to_delete.get("subfield") @@ -11489,7 +11489,7 @@ def test_unsupported_parameter_rest_asyncio(): if not HAS_ASYNC_REST_EXTRA: pytest.skip("the library must be installed with the `async_rest` extra to test this feature.") options = client_options.ClientOptions(quota_project_id="octopus") - with pytest.raises(core_exceptions.AsyncRestUnsupportedParameterError, match="google.api_core.client_options.ClientOptions.quota_project_id") as exc: # type: ignore + with pytest.raises(core_exceptions.AsyncRestUnsupportedParameterError, match="google.api_core.client_options.ClientOptions.quota_project_id") as exc: # type: ignore # fmt: skip client = CloudRedisAsyncClient( credentials=async_anonymous_credentials(), transport="rest_asyncio", diff --git a/tests/integration/goldens/redis_selective/google/cloud/redis_v1/__init__.py b/tests/integration/goldens/redis_selective/google/cloud/redis_v1/__init__.py index 1f7bad3796..65d3148b8f 100755 --- a/tests/integration/goldens/redis_selective/google/cloud/redis_v1/__init__.py +++ b/tests/integration/goldens/redis_selective/google/cloud/redis_v1/__init__.py @@ -20,9 +20,9 @@ __version__ = package_version.__version__ -if sys.version_info >= (3, 8): # pragma: NO COVER +if sys.version_info >= (3, 8): # pragma: NO COVER # fmt: skip from importlib import metadata -else: # pragma: NO COVER +else: # pragma: NO COVER # fmt: skip # TODO(https://github.com/googleapis/python-api-core/issues/835): Remove # this code path once we drop support for Python 3.7 import importlib_metadata as metadata @@ -46,10 +46,10 @@ from .types.cloud_redis import UpdateInstanceRequest from .types.cloud_redis import WeeklyMaintenanceWindow -if hasattr(api_core, "check_python_version") and hasattr(api_core, "check_dependency_versions"): # pragma: NO COVER +if hasattr(api_core, "check_python_version") and hasattr(api_core, "check_dependency_versions"): # pragma: NO COVER # fmt: skip api_core.check_python_version("google.cloud.redis_v1") # type: ignore api_core.check_dependency_versions("google.cloud.redis_v1") # type: ignore -else: # pragma: NO COVER +else: # pragma: NO COVER # fmt: skip # An older version of api_core is installed which does not define the # functions above. We do equivalent checks manually. try: diff --git a/tests/integration/goldens/redis_selective/google/cloud/redis_v1/services/cloud_redis/async_client.py b/tests/integration/goldens/redis_selective/google/cloud/redis_v1/services/cloud_redis/async_client.py index cc36acd1f3..a5d849aeee 100755 --- a/tests/integration/goldens/redis_selective/google/cloud/redis_v1/services/cloud_redis/async_client.py +++ b/tests/integration/goldens/redis_selective/google/cloud/redis_v1/services/cloud_redis/async_client.py @@ -114,7 +114,7 @@ def from_service_account_info(cls, info: dict, *args, **kwargs): Returns: CloudRedisAsyncClient: The constructed client. """ - return CloudRedisClient.from_service_account_info.__func__(CloudRedisAsyncClient, info, *args, **kwargs) # type: ignore + return CloudRedisClient.from_service_account_info.__func__(CloudRedisAsyncClient, info, *args, **kwargs) # type: ignore # fmt: skip @classmethod def from_service_account_file(cls, filename: str, *args, **kwargs): @@ -130,7 +130,7 @@ def from_service_account_file(cls, filename: str, *args, **kwargs): Returns: CloudRedisAsyncClient: The constructed client. """ - return CloudRedisClient.from_service_account_file.__func__(CloudRedisAsyncClient, filename, *args, **kwargs) # type: ignore + return CloudRedisClient.from_service_account_file.__func__(CloudRedisAsyncClient, filename, *args, **kwargs) # type: ignore # fmt: skip from_service_account_json = from_service_account_file @@ -166,7 +166,7 @@ def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[ClientOption Raises: google.auth.exceptions.MutualTLSChannelError: If any errors happen. """ - return CloudRedisClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore + return CloudRedisClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore # fmt: skip @property def transport(self) -> CloudRedisTransport: @@ -264,7 +264,7 @@ def __init__(self, *, ) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG): # pragma: NO COVER + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG): # pragma: NO COVER # fmt: skip _LOGGER.debug( "Created client `google.cloud.redis_v1.CloudRedisAsyncClient`.", extra = { @@ -1339,7 +1339,7 @@ async def __aexit__(self, exc_type, exc, tb): DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) -if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER +if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER # fmt: skip DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ diff --git a/tests/integration/goldens/redis_selective/google/cloud/redis_v1/services/cloud_redis/client.py b/tests/integration/goldens/redis_selective/google/cloud/redis_v1/services/cloud_redis/client.py index b8a18ffd5e..02f7c02717 100755 --- a/tests/integration/goldens/redis_selective/google/cloud/redis_v1/services/cloud_redis/client.py +++ b/tests/integration/goldens/redis_selective/google/cloud/redis_v1/services/cloud_redis/client.py @@ -80,7 +80,7 @@ class CloudRedisClientMeta(type): _transport_registry["grpc"] = CloudRedisGrpcTransport _transport_registry["grpc_asyncio"] = CloudRedisGrpcAsyncIOTransport _transport_registry["rest"] = CloudRedisRestTransport - if HAS_ASYNC_REST_DEPENDENCIES: # pragma: NO COVER + if HAS_ASYNC_REST_DEPENDENCIES: # pragma: NO COVER # fmt: skip _transport_registry["rest_asyncio"] = AsyncCloudRedisRestTransport def get_transport_class(cls, @@ -96,7 +96,7 @@ def get_transport_class(cls, The transport class to use. """ # If a specific transport is requested, return that one. - if label == "rest_asyncio" and not HAS_ASYNC_REST_DEPENDENCIES: # pragma: NO COVER + if label == "rest_asyncio" and not HAS_ASYNC_REST_DEPENDENCIES: # pragma: NO COVER # fmt: skip raise ASYNC_REST_EXCEPTION if label: return cls._transport_registry[label] @@ -162,7 +162,7 @@ def _get_default_mtls_endpoint(api_endpoint): # Note: DEFAULT_ENDPOINT is deprecated. Use _DEFAULT_ENDPOINT_TEMPLATE instead. DEFAULT_ENDPOINT = "redis.googleapis.com" - DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore # fmt: skip DEFAULT_ENDPOINT ) @@ -183,7 +183,7 @@ def _use_client_cert_effective(): GOOGLE_API_USE_CLIENT_CERTIFICATE is set to an unexpected value.) """ # check if google-auth version supports should_use_client_cert for automatic mTLS enablement - if hasattr(mtls, "should_use_client_cert"): # pragma: NO COVER + if hasattr(mtls, "should_use_client_cert"): # pragma: NO COVER # fmt: skip return mtls.should_use_client_cert() else: # pragma: NO COVER # if unsupported, fallback to reading from env var @@ -588,7 +588,7 @@ def __init__(self, *, # Initialize the universe domain validation. self._is_universe_domain_valid = False - if CLIENT_LOGGING_SUPPORTED: # pragma: NO COVER + if CLIENT_LOGGING_SUPPORTED: # pragma: NO COVER # fmt: skip # Setup logging. client_logging.initialize_logging() @@ -667,7 +667,7 @@ def __init__(self, *, ) if "async" not in str(self._transport): - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG): # pragma: NO COVER + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG): # pragma: NO COVER # fmt: skip _LOGGER.debug( "Created client `google.cloud.redis_v1.CloudRedisClient`.", extra = { @@ -1765,7 +1765,7 @@ def list_locations( DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) -if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER +if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER # fmt: skip DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ __all__ = ( diff --git a/tests/integration/goldens/redis_selective/google/cloud/redis_v1/services/cloud_redis/transports/__init__.py b/tests/integration/goldens/redis_selective/google/cloud/redis_v1/services/cloud_redis/transports/__init__.py index 1cbbf54c25..b5ec255c25 100755 --- a/tests/integration/goldens/redis_selective/google/cloud/redis_v1/services/cloud_redis/transports/__init__.py +++ b/tests/integration/goldens/redis_selective/google/cloud/redis_v1/services/cloud_redis/transports/__init__.py @@ -37,7 +37,7 @@ _transport_registry['grpc'] = CloudRedisGrpcTransport _transport_registry['grpc_asyncio'] = CloudRedisGrpcAsyncIOTransport _transport_registry['rest'] = CloudRedisRestTransport -if HAS_REST_ASYNC: # pragma: NO COVER +if HAS_REST_ASYNC: # pragma: NO COVER # fmt: skip _transport_registry['rest_asyncio'] = AsyncCloudRedisRestTransport __all__ = ( diff --git a/tests/integration/goldens/redis_selective/google/cloud/redis_v1/services/cloud_redis/transports/base.py b/tests/integration/goldens/redis_selective/google/cloud/redis_v1/services/cloud_redis/transports/base.py index c442745a86..3297876a71 100755 --- a/tests/integration/goldens/redis_selective/google/cloud/redis_v1/services/cloud_redis/transports/base.py +++ b/tests/integration/goldens/redis_selective/google/cloud/redis_v1/services/cloud_redis/transports/base.py @@ -34,7 +34,7 @@ DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) -if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER +if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER # fmt: skip DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ diff --git a/tests/integration/goldens/redis_selective/google/cloud/redis_v1/services/cloud_redis/transports/grpc.py b/tests/integration/goldens/redis_selective/google/cloud/redis_v1/services/cloud_redis/transports/grpc.py index fe65a8baf8..2600202ff0 100755 --- a/tests/integration/goldens/redis_selective/google/cloud/redis_v1/services/cloud_redis/transports/grpc.py +++ b/tests/integration/goldens/redis_selective/google/cloud/redis_v1/services/cloud_redis/transports/grpc.py @@ -48,7 +48,7 @@ class _LoggingClientInterceptor(grpc.UnaryUnaryClientInterceptor): # pragma: NO COVER def intercept_unary_unary(self, continuation, client_call_details, request): logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG) - if logging_enabled: # pragma: NO COVER + if logging_enabled: # pragma: NO COVER # fmt: skip request_metadata = client_call_details.metadata if isinstance(request, proto.Message): request_payload = type(request).to_json(request) @@ -76,7 +76,7 @@ def intercept_unary_unary(self, continuation, client_call_details, request): }, ) response = continuation(client_call_details, request) - if logging_enabled: # pragma: NO COVER + if logging_enabled: # pragma: NO COVER # fmt: skip response_metadata = response.trailing_metadata() # Convert gRPC metadata `` to list of tuples metadata = dict([(k, str(v)) for k, v in response_metadata]) if response_metadata else None diff --git a/tests/integration/goldens/redis_selective/google/cloud/redis_v1/services/cloud_redis/transports/grpc_asyncio.py b/tests/integration/goldens/redis_selective/google/cloud/redis_v1/services/cloud_redis/transports/grpc_asyncio.py index a3393990f2..af3c8391b5 100755 --- a/tests/integration/goldens/redis_selective/google/cloud/redis_v1/services/cloud_redis/transports/grpc_asyncio.py +++ b/tests/integration/goldens/redis_selective/google/cloud/redis_v1/services/cloud_redis/transports/grpc_asyncio.py @@ -52,7 +52,7 @@ class _LoggingClientAIOInterceptor(grpc.aio.UnaryUnaryClientInterceptor): # pragma: NO COVER async def intercept_unary_unary(self, continuation, client_call_details, request): logging_enabled = CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(std_logging.DEBUG) - if logging_enabled: # pragma: NO COVER + if logging_enabled: # pragma: NO COVER # fmt: skip request_metadata = client_call_details.metadata if isinstance(request, proto.Message): request_payload = type(request).to_json(request) @@ -80,7 +80,7 @@ async def intercept_unary_unary(self, continuation, client_call_details, request }, ) response = await continuation(client_call_details, request) - if logging_enabled: # pragma: NO COVER + if logging_enabled: # pragma: NO COVER # fmt: skip response_metadata = await response.trailing_metadata() # Convert gRPC metadata `` to list of tuples metadata = dict([(k, str(v)) for k, v in response_metadata]) if response_metadata else None @@ -579,7 +579,7 @@ def _prep_wrapped_messages(self, client_info): } def _wrap_method(self, func, *args, **kwargs): - if self._wrap_with_kind: # pragma: NO COVER + if self._wrap_with_kind: # pragma: NO COVER # fmt: skip kwargs["kind"] = self.kind return gapic_v1.method_async.wrap_method(func, *args, **kwargs) diff --git a/tests/integration/goldens/redis_selective/google/cloud/redis_v1/services/cloud_redis/transports/rest.py b/tests/integration/goldens/redis_selective/google/cloud/redis_v1/services/cloud_redis/transports/rest.py index 2bcbb50899..099bcc8f66 100755 --- a/tests/integration/goldens/redis_selective/google/cloud/redis_v1/services/cloud_redis/transports/rest.py +++ b/tests/integration/goldens/redis_selective/google/cloud/redis_v1/services/cloud_redis/transports/rest.py @@ -61,7 +61,7 @@ rest_version=f"requests@{requests_version}", ) -if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER +if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER # fmt: skip DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ @@ -684,7 +684,7 @@ def __call__(self, # Jsonify the query params query_params = _BaseCloudRedisRestTransport._BaseCreateInstance._get_query_params_json(transcoded_request) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER # fmt: skip request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] try: @@ -722,7 +722,7 @@ def __call__(self, resp = self._interceptor.post_create_instance(resp) response_metadata = [(k, str(v)) for k, v in response.headers.items()] resp, _ = self._interceptor.post_create_instance_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER # fmt: skip try: response_payload = json_format.MessageToJson(resp) except: @@ -805,7 +805,7 @@ def __call__(self, # Jsonify the query params query_params = _BaseCloudRedisRestTransport._BaseDeleteInstance._get_query_params_json(transcoded_request) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER # fmt: skip request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] try: @@ -843,7 +843,7 @@ def __call__(self, resp = self._interceptor.post_delete_instance(resp) response_metadata = [(k, str(v)) for k, v in response.headers.items()] resp, _ = self._interceptor.post_delete_instance_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER # fmt: skip try: response_payload = json_format.MessageToJson(resp) except: @@ -923,7 +923,7 @@ def __call__(self, # Jsonify the query params query_params = _BaseCloudRedisRestTransport._BaseGetInstance._get_query_params_json(transcoded_request) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER # fmt: skip request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] try: @@ -963,7 +963,7 @@ def __call__(self, resp = self._interceptor.post_get_instance(resp) response_metadata = [(k, str(v)) for k, v in response.headers.items()] resp, _ = self._interceptor.post_get_instance_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER # fmt: skip try: response_payload = cloud_redis.Instance.to_json(response) except: @@ -1045,7 +1045,7 @@ def __call__(self, # Jsonify the query params query_params = _BaseCloudRedisRestTransport._BaseListInstances._get_query_params_json(transcoded_request) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER # fmt: skip request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] try: @@ -1085,7 +1085,7 @@ def __call__(self, resp = self._interceptor.post_list_instances(resp) response_metadata = [(k, str(v)) for k, v in response.headers.items()] resp, _ = self._interceptor.post_list_instances_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER # fmt: skip try: response_payload = cloud_redis.ListInstancesResponse.to_json(response) except: @@ -1171,7 +1171,7 @@ def __call__(self, # Jsonify the query params query_params = _BaseCloudRedisRestTransport._BaseUpdateInstance._get_query_params_json(transcoded_request) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER # fmt: skip request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] try: @@ -1209,7 +1209,7 @@ def __call__(self, resp = self._interceptor.post_update_instance(resp) response_metadata = [(k, str(v)) for k, v in response.headers.items()] resp, _ = self._interceptor.post_update_instance_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER # fmt: skip try: response_payload = json_format.MessageToJson(resp) except: @@ -1236,7 +1236,7 @@ def create_instance(self) -> Callable[ operations_pb2.Operation]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._CreateInstance(self._session, self._host, self._interceptor) # type: ignore + return self._CreateInstance(self._session, self._host, self._interceptor) # type: ignore # fmt: skip @property def delete_instance(self) -> Callable[ @@ -1244,7 +1244,7 @@ def delete_instance(self) -> Callable[ operations_pb2.Operation]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._DeleteInstance(self._session, self._host, self._interceptor) # type: ignore + return self._DeleteInstance(self._session, self._host, self._interceptor) # type: ignore # fmt: skip @property def get_instance(self) -> Callable[ @@ -1252,7 +1252,7 @@ def get_instance(self) -> Callable[ cloud_redis.Instance]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._GetInstance(self._session, self._host, self._interceptor) # type: ignore + return self._GetInstance(self._session, self._host, self._interceptor) # type: ignore # fmt: skip @property def list_instances(self) -> Callable[ @@ -1260,7 +1260,7 @@ def list_instances(self) -> Callable[ cloud_redis.ListInstancesResponse]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._ListInstances(self._session, self._host, self._interceptor) # type: ignore + return self._ListInstances(self._session, self._host, self._interceptor) # type: ignore # fmt: skip @property def update_instance(self) -> Callable[ @@ -1268,11 +1268,11 @@ def update_instance(self) -> Callable[ operations_pb2.Operation]: # The return type is fine, but mypy isn't sophisticated enough to determine what's going on here. # In C++ this would require a dynamic_cast - return self._UpdateInstance(self._session, self._host, self._interceptor) # type: ignore + return self._UpdateInstance(self._session, self._host, self._interceptor) # type: ignore # fmt: skip @property def get_location(self): - return self._GetLocation(self._session, self._host, self._interceptor) # type: ignore + return self._GetLocation(self._session, self._host, self._interceptor) # type: ignore # fmt: skip class _GetLocation(_BaseCloudRedisRestTransport._BaseGetLocation, CloudRedisRestStub): def __hash__(self): @@ -1332,7 +1332,7 @@ def __call__(self, # Jsonify the query params query_params = _BaseCloudRedisRestTransport._BaseGetLocation._get_query_params_json(transcoded_request) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER # fmt: skip request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] try: @@ -1367,7 +1367,7 @@ def __call__(self, resp = locations_pb2.Location() resp = json_format.Parse(content, resp) resp = self._interceptor.post_get_location(resp) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER # fmt: skip try: response_payload = json_format.MessageToJson(resp) except: @@ -1390,7 +1390,7 @@ def __call__(self, @property def list_locations(self): - return self._ListLocations(self._session, self._host, self._interceptor) # type: ignore + return self._ListLocations(self._session, self._host, self._interceptor) # type: ignore # fmt: skip class _ListLocations(_BaseCloudRedisRestTransport._BaseListLocations, CloudRedisRestStub): def __hash__(self): @@ -1450,7 +1450,7 @@ def __call__(self, # Jsonify the query params query_params = _BaseCloudRedisRestTransport._BaseListLocations._get_query_params_json(transcoded_request) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER # fmt: skip request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] try: @@ -1485,7 +1485,7 @@ def __call__(self, resp = locations_pb2.ListLocationsResponse() resp = json_format.Parse(content, resp) resp = self._interceptor.post_list_locations(resp) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER # fmt: skip try: response_payload = json_format.MessageToJson(resp) except: @@ -1508,7 +1508,7 @@ def __call__(self, @property def cancel_operation(self): - return self._CancelOperation(self._session, self._host, self._interceptor) # type: ignore + return self._CancelOperation(self._session, self._host, self._interceptor) # type: ignore # fmt: skip class _CancelOperation(_BaseCloudRedisRestTransport._BaseCancelOperation, CloudRedisRestStub): def __hash__(self): @@ -1565,7 +1565,7 @@ def __call__(self, # Jsonify the query params query_params = _BaseCloudRedisRestTransport._BaseCancelOperation._get_query_params_json(transcoded_request) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER # fmt: skip request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] try: @@ -1600,7 +1600,7 @@ def __call__(self, @property def delete_operation(self): - return self._DeleteOperation(self._session, self._host, self._interceptor) # type: ignore + return self._DeleteOperation(self._session, self._host, self._interceptor) # type: ignore # fmt: skip class _DeleteOperation(_BaseCloudRedisRestTransport._BaseDeleteOperation, CloudRedisRestStub): def __hash__(self): @@ -1657,7 +1657,7 @@ def __call__(self, # Jsonify the query params query_params = _BaseCloudRedisRestTransport._BaseDeleteOperation._get_query_params_json(transcoded_request) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER # fmt: skip request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] try: @@ -1692,7 +1692,7 @@ def __call__(self, @property def get_operation(self): - return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore + return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore # fmt: skip class _GetOperation(_BaseCloudRedisRestTransport._BaseGetOperation, CloudRedisRestStub): def __hash__(self): @@ -1752,7 +1752,7 @@ def __call__(self, # Jsonify the query params query_params = _BaseCloudRedisRestTransport._BaseGetOperation._get_query_params_json(transcoded_request) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER # fmt: skip request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] try: @@ -1787,7 +1787,7 @@ def __call__(self, resp = operations_pb2.Operation() resp = json_format.Parse(content, resp) resp = self._interceptor.post_get_operation(resp) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER # fmt: skip try: response_payload = json_format.MessageToJson(resp) except: @@ -1810,7 +1810,7 @@ def __call__(self, @property def list_operations(self): - return self._ListOperations(self._session, self._host, self._interceptor) # type: ignore + return self._ListOperations(self._session, self._host, self._interceptor) # type: ignore # fmt: skip class _ListOperations(_BaseCloudRedisRestTransport._BaseListOperations, CloudRedisRestStub): def __hash__(self): @@ -1870,7 +1870,7 @@ def __call__(self, # Jsonify the query params query_params = _BaseCloudRedisRestTransport._BaseListOperations._get_query_params_json(transcoded_request) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER # fmt: skip request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] try: @@ -1905,7 +1905,7 @@ def __call__(self, resp = operations_pb2.ListOperationsResponse() resp = json_format.Parse(content, resp) resp = self._interceptor.post_list_operations(resp) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER # fmt: skip try: response_payload = json_format.MessageToJson(resp) except: @@ -1928,7 +1928,7 @@ def __call__(self, @property def wait_operation(self): - return self._WaitOperation(self._session, self._host, self._interceptor) # type: ignore + return self._WaitOperation(self._session, self._host, self._interceptor) # type: ignore # fmt: skip class _WaitOperation(_BaseCloudRedisRestTransport._BaseWaitOperation, CloudRedisRestStub): def __hash__(self): @@ -1991,7 +1991,7 @@ def __call__(self, # Jsonify the query params query_params = _BaseCloudRedisRestTransport._BaseWaitOperation._get_query_params_json(transcoded_request) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER # fmt: skip request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] try: @@ -2026,7 +2026,7 @@ def __call__(self, resp = operations_pb2.Operation() resp = json_format.Parse(content, resp) resp = self._interceptor.post_wait_operation(resp) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER # fmt: skip try: response_payload = json_format.MessageToJson(resp) except: diff --git a/tests/integration/goldens/redis_selective/google/cloud/redis_v1/services/cloud_redis/transports/rest_asyncio.py b/tests/integration/goldens/redis_selective/google/cloud/redis_v1/services/cloud_redis/transports/rest_asyncio.py index 5127f0d171..1aa7043304 100755 --- a/tests/integration/goldens/redis_selective/google/cloud/redis_v1/services/cloud_redis/transports/rest_asyncio.py +++ b/tests/integration/goldens/redis_selective/google/cloud/redis_v1/services/cloud_redis/transports/rest_asyncio.py @@ -73,7 +73,7 @@ rest_version=f"google-auth@{google.auth.__version__}", ) -if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER +if hasattr(DEFAULT_CLIENT_INFO, "protobuf_runtime_version"): # pragma: NO COVER # fmt: skip DEFAULT_CLIENT_INFO.protobuf_runtime_version = google.protobuf.__version__ @@ -614,7 +614,7 @@ def _prep_wrapped_messages(self, client_info): } def _wrap_method(self, func, *args, **kwargs): - if self._wrap_with_kind: # pragma: NO COVER + if self._wrap_with_kind: # pragma: NO COVER # fmt: skip kwargs["kind"] = self.kind return gapic_v1.method_async.wrap_method(func, *args, **kwargs) @@ -683,7 +683,7 @@ async def __call__(self, # Jsonify the query params query_params = _BaseCloudRedisRestTransport._BaseCreateInstance._get_query_params_json(transcoded_request) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER # fmt: skip request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] try: @@ -716,7 +716,7 @@ async def __call__(self, payload = json.loads(content.decode('utf-8')) request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] - raise core_exceptions.format_http_response_error(response, method, request_url, payload) # type: ignore + raise core_exceptions.format_http_response_error(response, method, request_url, payload) # type: ignore # fmt: skip # Return the response resp = operations_pb2.Operation() @@ -726,7 +726,7 @@ async def __call__(self, resp = await self._interceptor.post_create_instance(resp) response_metadata = [(k, str(v)) for k, v in response.headers.items()] resp, _ = await self._interceptor.post_create_instance_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER # fmt: skip try: response_payload = json_format.MessageToJson(resp) except: @@ -810,7 +810,7 @@ async def __call__(self, # Jsonify the query params query_params = _BaseCloudRedisRestTransport._BaseDeleteInstance._get_query_params_json(transcoded_request) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER # fmt: skip request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] try: @@ -843,7 +843,7 @@ async def __call__(self, payload = json.loads(content.decode('utf-8')) request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] - raise core_exceptions.format_http_response_error(response, method, request_url, payload) # type: ignore + raise core_exceptions.format_http_response_error(response, method, request_url, payload) # type: ignore # fmt: skip # Return the response resp = operations_pb2.Operation() @@ -853,7 +853,7 @@ async def __call__(self, resp = await self._interceptor.post_delete_instance(resp) response_metadata = [(k, str(v)) for k, v in response.headers.items()] resp, _ = await self._interceptor.post_delete_instance_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER # fmt: skip try: response_payload = json_format.MessageToJson(resp) except: @@ -934,7 +934,7 @@ async def __call__(self, # Jsonify the query params query_params = _BaseCloudRedisRestTransport._BaseGetInstance._get_query_params_json(transcoded_request) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER # fmt: skip request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] try: @@ -967,7 +967,7 @@ async def __call__(self, payload = json.loads(content.decode('utf-8')) request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] - raise core_exceptions.format_http_response_error(response, method, request_url, payload) # type: ignore + raise core_exceptions.format_http_response_error(response, method, request_url, payload) # type: ignore # fmt: skip # Return the response resp = cloud_redis.Instance() @@ -977,7 +977,7 @@ async def __call__(self, resp = await self._interceptor.post_get_instance(resp) response_metadata = [(k, str(v)) for k, v in response.headers.items()] resp, _ = await self._interceptor.post_get_instance_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER # fmt: skip try: response_payload = cloud_redis.Instance.to_json(response) except: @@ -1060,7 +1060,7 @@ async def __call__(self, # Jsonify the query params query_params = _BaseCloudRedisRestTransport._BaseListInstances._get_query_params_json(transcoded_request) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER # fmt: skip request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] try: @@ -1093,7 +1093,7 @@ async def __call__(self, payload = json.loads(content.decode('utf-8')) request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] - raise core_exceptions.format_http_response_error(response, method, request_url, payload) # type: ignore + raise core_exceptions.format_http_response_error(response, method, request_url, payload) # type: ignore # fmt: skip # Return the response resp = cloud_redis.ListInstancesResponse() @@ -1103,7 +1103,7 @@ async def __call__(self, resp = await self._interceptor.post_list_instances(resp) response_metadata = [(k, str(v)) for k, v in response.headers.items()] resp, _ = await self._interceptor.post_list_instances_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER # fmt: skip try: response_payload = cloud_redis.ListInstancesResponse.to_json(response) except: @@ -1190,7 +1190,7 @@ async def __call__(self, # Jsonify the query params query_params = _BaseCloudRedisRestTransport._BaseUpdateInstance._get_query_params_json(transcoded_request) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER # fmt: skip request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] try: @@ -1223,7 +1223,7 @@ async def __call__(self, payload = json.loads(content.decode('utf-8')) request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] - raise core_exceptions.format_http_response_error(response, method, request_url, payload) # type: ignore + raise core_exceptions.format_http_response_error(response, method, request_url, payload) # type: ignore # fmt: skip # Return the response resp = operations_pb2.Operation() @@ -1233,7 +1233,7 @@ async def __call__(self, resp = await self._interceptor.post_update_instance(resp) response_metadata = [(k, str(v)) for k, v in response.headers.items()] resp, _ = await self._interceptor.post_update_instance_with_metadata(resp, response_metadata) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER # fmt: skip try: response_payload = json_format.MessageToJson(resp) except: @@ -1315,35 +1315,35 @@ def operations_client(self) -> AsyncOperationsRestClient: def create_instance(self) -> Callable[ [cloud_redis.CreateInstanceRequest], operations_pb2.Operation]: - return self._CreateInstance(self._session, self._host, self._interceptor) # type: ignore + return self._CreateInstance(self._session, self._host, self._interceptor) # type: ignore # fmt: skip @property def delete_instance(self) -> Callable[ [cloud_redis.DeleteInstanceRequest], operations_pb2.Operation]: - return self._DeleteInstance(self._session, self._host, self._interceptor) # type: ignore + return self._DeleteInstance(self._session, self._host, self._interceptor) # type: ignore # fmt: skip @property def get_instance(self) -> Callable[ [cloud_redis.GetInstanceRequest], cloud_redis.Instance]: - return self._GetInstance(self._session, self._host, self._interceptor) # type: ignore + return self._GetInstance(self._session, self._host, self._interceptor) # type: ignore # fmt: skip @property def list_instances(self) -> Callable[ [cloud_redis.ListInstancesRequest], cloud_redis.ListInstancesResponse]: - return self._ListInstances(self._session, self._host, self._interceptor) # type: ignore + return self._ListInstances(self._session, self._host, self._interceptor) # type: ignore # fmt: skip @property def update_instance(self) -> Callable[ [cloud_redis.UpdateInstanceRequest], operations_pb2.Operation]: - return self._UpdateInstance(self._session, self._host, self._interceptor) # type: ignore + return self._UpdateInstance(self._session, self._host, self._interceptor) # type: ignore # fmt: skip @property def get_location(self): - return self._GetLocation(self._session, self._host, self._interceptor) # type: ignore + return self._GetLocation(self._session, self._host, self._interceptor) # type: ignore # fmt: skip class _GetLocation(_BaseCloudRedisRestTransport._BaseGetLocation, AsyncCloudRedisRestStub): def __hash__(self): @@ -1403,7 +1403,7 @@ async def __call__(self, # Jsonify the query params query_params = _BaseCloudRedisRestTransport._BaseGetLocation._get_query_params_json(transcoded_request) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER # fmt: skip request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] try: @@ -1436,13 +1436,13 @@ async def __call__(self, payload = json.loads(content.decode('utf-8')) request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] - raise core_exceptions.format_http_response_error(response, method, request_url, payload) # type: ignore + raise core_exceptions.format_http_response_error(response, method, request_url, payload) # type: ignore # fmt: skip content = await response.read() resp = locations_pb2.Location() resp = json_format.Parse(content, resp) resp = await self._interceptor.post_get_location(resp) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER # fmt: skip try: response_payload = json_format.MessageToJson(resp) except: @@ -1465,7 +1465,7 @@ async def __call__(self, @property def list_locations(self): - return self._ListLocations(self._session, self._host, self._interceptor) # type: ignore + return self._ListLocations(self._session, self._host, self._interceptor) # type: ignore # fmt: skip class _ListLocations(_BaseCloudRedisRestTransport._BaseListLocations, AsyncCloudRedisRestStub): def __hash__(self): @@ -1525,7 +1525,7 @@ async def __call__(self, # Jsonify the query params query_params = _BaseCloudRedisRestTransport._BaseListLocations._get_query_params_json(transcoded_request) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER # fmt: skip request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] try: @@ -1558,13 +1558,13 @@ async def __call__(self, payload = json.loads(content.decode('utf-8')) request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] - raise core_exceptions.format_http_response_error(response, method, request_url, payload) # type: ignore + raise core_exceptions.format_http_response_error(response, method, request_url, payload) # type: ignore # fmt: skip content = await response.read() resp = locations_pb2.ListLocationsResponse() resp = json_format.Parse(content, resp) resp = await self._interceptor.post_list_locations(resp) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER # fmt: skip try: response_payload = json_format.MessageToJson(resp) except: @@ -1587,7 +1587,7 @@ async def __call__(self, @property def cancel_operation(self): - return self._CancelOperation(self._session, self._host, self._interceptor) # type: ignore + return self._CancelOperation(self._session, self._host, self._interceptor) # type: ignore # fmt: skip class _CancelOperation(_BaseCloudRedisRestTransport._BaseCancelOperation, AsyncCloudRedisRestStub): def __hash__(self): @@ -1644,7 +1644,7 @@ async def __call__(self, # Jsonify the query params query_params = _BaseCloudRedisRestTransport._BaseCancelOperation._get_query_params_json(transcoded_request) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER # fmt: skip request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] try: @@ -1677,13 +1677,13 @@ async def __call__(self, payload = json.loads(content.decode('utf-8')) request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] - raise core_exceptions.format_http_response_error(response, method, request_url, payload) # type: ignore + raise core_exceptions.format_http_response_error(response, method, request_url, payload) # type: ignore # fmt: skip return await self._interceptor.post_cancel_operation(None) @property def delete_operation(self): - return self._DeleteOperation(self._session, self._host, self._interceptor) # type: ignore + return self._DeleteOperation(self._session, self._host, self._interceptor) # type: ignore # fmt: skip class _DeleteOperation(_BaseCloudRedisRestTransport._BaseDeleteOperation, AsyncCloudRedisRestStub): def __hash__(self): @@ -1740,7 +1740,7 @@ async def __call__(self, # Jsonify the query params query_params = _BaseCloudRedisRestTransport._BaseDeleteOperation._get_query_params_json(transcoded_request) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER # fmt: skip request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] try: @@ -1773,13 +1773,13 @@ async def __call__(self, payload = json.loads(content.decode('utf-8')) request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] - raise core_exceptions.format_http_response_error(response, method, request_url, payload) # type: ignore + raise core_exceptions.format_http_response_error(response, method, request_url, payload) # type: ignore # fmt: skip return await self._interceptor.post_delete_operation(None) @property def get_operation(self): - return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore + return self._GetOperation(self._session, self._host, self._interceptor) # type: ignore # fmt: skip class _GetOperation(_BaseCloudRedisRestTransport._BaseGetOperation, AsyncCloudRedisRestStub): def __hash__(self): @@ -1839,7 +1839,7 @@ async def __call__(self, # Jsonify the query params query_params = _BaseCloudRedisRestTransport._BaseGetOperation._get_query_params_json(transcoded_request) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER # fmt: skip request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] try: @@ -1872,13 +1872,13 @@ async def __call__(self, payload = json.loads(content.decode('utf-8')) request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] - raise core_exceptions.format_http_response_error(response, method, request_url, payload) # type: ignore + raise core_exceptions.format_http_response_error(response, method, request_url, payload) # type: ignore # fmt: skip content = await response.read() resp = operations_pb2.Operation() resp = json_format.Parse(content, resp) resp = await self._interceptor.post_get_operation(resp) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER # fmt: skip try: response_payload = json_format.MessageToJson(resp) except: @@ -1901,7 +1901,7 @@ async def __call__(self, @property def list_operations(self): - return self._ListOperations(self._session, self._host, self._interceptor) # type: ignore + return self._ListOperations(self._session, self._host, self._interceptor) # type: ignore # fmt: skip class _ListOperations(_BaseCloudRedisRestTransport._BaseListOperations, AsyncCloudRedisRestStub): def __hash__(self): @@ -1961,7 +1961,7 @@ async def __call__(self, # Jsonify the query params query_params = _BaseCloudRedisRestTransport._BaseListOperations._get_query_params_json(transcoded_request) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER # fmt: skip request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] try: @@ -1994,13 +1994,13 @@ async def __call__(self, payload = json.loads(content.decode('utf-8')) request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] - raise core_exceptions.format_http_response_error(response, method, request_url, payload) # type: ignore + raise core_exceptions.format_http_response_error(response, method, request_url, payload) # type: ignore # fmt: skip content = await response.read() resp = operations_pb2.ListOperationsResponse() resp = json_format.Parse(content, resp) resp = await self._interceptor.post_list_operations(resp) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER # fmt: skip try: response_payload = json_format.MessageToJson(resp) except: @@ -2023,7 +2023,7 @@ async def __call__(self, @property def wait_operation(self): - return self._WaitOperation(self._session, self._host, self._interceptor) # type: ignore + return self._WaitOperation(self._session, self._host, self._interceptor) # type: ignore # fmt: skip class _WaitOperation(_BaseCloudRedisRestTransport._BaseWaitOperation, AsyncCloudRedisRestStub): def __hash__(self): @@ -2086,7 +2086,7 @@ async def __call__(self, # Jsonify the query params query_params = _BaseCloudRedisRestTransport._BaseWaitOperation._get_query_params_json(transcoded_request) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER # fmt: skip request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] try: @@ -2119,13 +2119,13 @@ async def __call__(self, payload = json.loads(content.decode('utf-8')) request_url = "{host}{uri}".format(host=self._host, uri=transcoded_request['uri']) method = transcoded_request['method'] - raise core_exceptions.format_http_response_error(response, method, request_url, payload) # type: ignore + raise core_exceptions.format_http_response_error(response, method, request_url, payload) # type: ignore # fmt: skip content = await response.read() resp = operations_pb2.Operation() resp = json_format.Parse(content, resp) resp = await self._interceptor.post_wait_operation(resp) - if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER + if CLIENT_LOGGING_SUPPORTED and _LOGGER.isEnabledFor(logging.DEBUG): # pragma: NO COVER # fmt: skip try: response_payload = json_format.MessageToJson(resp) except: diff --git a/tests/integration/goldens/redis_selective/google/cloud/redis_v1/services/cloud_redis/transports/rest_base.py b/tests/integration/goldens/redis_selective/google/cloud/redis_v1/services/cloud_redis/transports/rest_base.py index 85b3522cb2..2784276147 100755 --- a/tests/integration/goldens/redis_selective/google/cloud/redis_v1/services/cloud_redis/transports/rest_base.py +++ b/tests/integration/goldens/redis_selective/google/cloud/redis_v1/services/cloud_redis/transports/rest_base.py @@ -73,7 +73,7 @@ def __init__(self, *, # Run the base constructor maybe_url_match = re.match("^(?Phttp(?:s)?://)?(?P.*)$", host) if maybe_url_match is None: - raise ValueError(f"Unexpected hostname structure: {host}") # pragma: NO COVER + raise ValueError(f"Unexpected hostname structure: {host}") # pragma: NO COVER # fmt: skip url_match_items = maybe_url_match.groupdict() diff --git a/tests/integration/goldens/redis_selective/tests/unit/gapic/redis_v1/test_cloud_redis.py b/tests/integration/goldens/redis_selective/tests/unit/gapic/redis_v1/test_cloud_redis.py index f58069c220..160581609f 100755 --- a/tests/integration/goldens/redis_selective/tests/unit/gapic/redis_v1/test_cloud_redis.py +++ b/tests/integration/goldens/redis_selective/tests/unit/gapic/redis_v1/test_cloud_redis.py @@ -89,7 +89,7 @@ async def mock_async_gen(data, chunk_size=1): - for i in range(0, len(data)): # pragma: NO COVER + for i in range(0, len(data)): # pragma: NO COVER # fmt: skip chunk = data[i : i + chunk_size] yield chunk.encode("utf-8") @@ -4508,7 +4508,7 @@ def get_message_fields(field): if is_field_type_proto_plus_type: message_fields = field.message.meta.fields.values() # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER + else: # pragma: NO COVER # fmt: skip message_fields = field.message.DESCRIPTOR.fields return message_fields @@ -4522,7 +4522,7 @@ def get_message_fields(field): # For each item in the sample request, create a list of sub fields which are not present at runtime # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["instance"].items(): # pragma: NO COVER + for field, value in request_init["instance"].items(): # pragma: NO COVER # fmt: skip result = None is_repeated = False # For repeated fields @@ -4542,7 +4542,7 @@ def get_message_fields(field): # Remove fields from the sample request which are not present in the runtime version of the dependency # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER # fmt: skip field = subfield_to_delete.get("field") field_repeated = subfield_to_delete.get("is_repeated") subfield = subfield_to_delete.get("subfield") @@ -4673,7 +4673,7 @@ def get_message_fields(field): if is_field_type_proto_plus_type: message_fields = field.message.meta.fields.values() # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER + else: # pragma: NO COVER # fmt: skip message_fields = field.message.DESCRIPTOR.fields return message_fields @@ -4687,7 +4687,7 @@ def get_message_fields(field): # For each item in the sample request, create a list of sub fields which are not present at runtime # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["instance"].items(): # pragma: NO COVER + for field, value in request_init["instance"].items(): # pragma: NO COVER # fmt: skip result = None is_repeated = False # For repeated fields @@ -4707,7 +4707,7 @@ def get_message_fields(field): # Remove fields from the sample request which are not present in the runtime version of the dependency # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER # fmt: skip field = subfield_to_delete.get("field") field_repeated = subfield_to_delete.get("is_repeated") subfield = subfield_to_delete.get("subfield") @@ -5737,7 +5737,7 @@ def get_message_fields(field): if is_field_type_proto_plus_type: message_fields = field.message.meta.fields.values() # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER + else: # pragma: NO COVER # fmt: skip message_fields = field.message.DESCRIPTOR.fields return message_fields @@ -5751,7 +5751,7 @@ def get_message_fields(field): # For each item in the sample request, create a list of sub fields which are not present at runtime # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["instance"].items(): # pragma: NO COVER + for field, value in request_init["instance"].items(): # pragma: NO COVER # fmt: skip result = None is_repeated = False # For repeated fields @@ -5771,7 +5771,7 @@ def get_message_fields(field): # Remove fields from the sample request which are not present in the runtime version of the dependency # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER # fmt: skip field = subfield_to_delete.get("field") field_repeated = subfield_to_delete.get("is_repeated") subfield = subfield_to_delete.get("subfield") @@ -5909,7 +5909,7 @@ def get_message_fields(field): if is_field_type_proto_plus_type: message_fields = field.message.meta.fields.values() # Add `# pragma: NO COVER` because there may not be any `*_pb2` field types - else: # pragma: NO COVER + else: # pragma: NO COVER # fmt: skip message_fields = field.message.DESCRIPTOR.fields return message_fields @@ -5923,7 +5923,7 @@ def get_message_fields(field): # For each item in the sample request, create a list of sub fields which are not present at runtime # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for field, value in request_init["instance"].items(): # pragma: NO COVER + for field, value in request_init["instance"].items(): # pragma: NO COVER # fmt: skip result = None is_repeated = False # For repeated fields @@ -5943,7 +5943,7 @@ def get_message_fields(field): # Remove fields from the sample request which are not present in the runtime version of the dependency # Add `# pragma: NO COVER` because this test code will not run if all subfields are present at runtime - for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER + for subfield_to_delete in subfields_not_in_runtime: # pragma: NO COVER # fmt: skip field = subfield_to_delete.get("field") field_repeated = subfield_to_delete.get("is_repeated") subfield = subfield_to_delete.get("subfield") @@ -6679,7 +6679,7 @@ def test_unsupported_parameter_rest_asyncio(): if not HAS_ASYNC_REST_EXTRA: pytest.skip("the library must be installed with the `async_rest` extra to test this feature.") options = client_options.ClientOptions(quota_project_id="octopus") - with pytest.raises(core_exceptions.AsyncRestUnsupportedParameterError, match="google.api_core.client_options.ClientOptions.quota_project_id") as exc: # type: ignore + with pytest.raises(core_exceptions.AsyncRestUnsupportedParameterError, match="google.api_core.client_options.ClientOptions.quota_project_id") as exc: # type: ignore # fmt: skip client = CloudRedisAsyncClient( credentials=async_anonymous_credentials(), transport="rest_asyncio",