From 65f05b0c41ad40fab404301e0c4a674c07b65f18 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Wed, 3 Nov 2021 18:53:02 +0100 Subject: [PATCH 01/34] chore(deps): update all dependencies (#436) --- samples/snippets/requirements.txt | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/samples/snippets/requirements.txt b/samples/snippets/requirements.txt index c3c03d970..81596164b 100644 --- a/samples/snippets/requirements.txt +++ b/samples/snippets/requirements.txt @@ -1,4 +1,4 @@ -google-cloud-logging==2.6.0 -google-cloud-bigquery==2.28.1 +google-cloud-logging==2.7.0 +google-cloud-bigquery==2.29.0 google-cloud-storage==1.42.3 google-cloud-pubsub==2.8.0 From 9cd7e0f47b65e9bf5bdae5da20c836a3a5d90ec9 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Thu, 4 Nov 2021 11:36:47 +0100 Subject: [PATCH 02/34] chore(deps): update dependency google-cloud-bigquery to v2.30.0 (#437) --- samples/snippets/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/samples/snippets/requirements.txt b/samples/snippets/requirements.txt index 81596164b..050b1ed8f 100644 --- a/samples/snippets/requirements.txt +++ b/samples/snippets/requirements.txt @@ -1,4 +1,4 @@ google-cloud-logging==2.7.0 -google-cloud-bigquery==2.29.0 +google-cloud-bigquery==2.30.0 google-cloud-storage==1.42.3 google-cloud-pubsub==2.8.0 From 05433a95c83b037db3e5385ab14a1f6441cf8649 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Fri, 5 Nov 2021 10:54:50 +0100 Subject: [PATCH 03/34] chore(deps): update all dependencies (#438) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * chore(deps): update all dependencies * 🦉 Updates from OwlBot See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md Co-authored-by: Owl Bot --- samples/snippets/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/samples/snippets/requirements.txt b/samples/snippets/requirements.txt index 050b1ed8f..f5941a7d2 100644 --- a/samples/snippets/requirements.txt +++ b/samples/snippets/requirements.txt @@ -1,4 +1,4 @@ google-cloud-logging==2.7.0 -google-cloud-bigquery==2.30.0 +google-cloud-bigquery==2.30.1 google-cloud-storage==1.42.3 google-cloud-pubsub==2.8.0 From 9badefe88cffe906bc2574c3b1fc04e155572f2a Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Tue, 9 Nov 2021 17:58:21 -0500 Subject: [PATCH 04/34] chore: use gapic-generator-python 0.56.2 (#440) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * chore: update Java and Python dependencies PiperOrigin-RevId: 408420890 Source-Link: https://github.com/googleapis/googleapis/commit/2921f9fb3bfbd16f6b2da0104373e2b47a80a65e Source-Link: https://github.com/googleapis/googleapis-gen/commit/6598ca8cbbf5226733a099c4506518a5af6ff74c Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiNjU5OGNhOGNiYmY1MjI2NzMzYTA5OWM0NTA2NTE4YTVhZjZmZjc0YyJ9 * 🦉 Updates from OwlBot See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md Co-authored-by: Owl Bot --- .../config_service_v2/async_client.py | 13 +- .../services/config_service_v2/client.py | 25 ++- .../config_service_v2/transports/base.py | 8 +- .../config_service_v2/transports/grpc.py | 4 +- .../transports/grpc_asyncio.py | 4 +- .../logging_service_v2/async_client.py | 13 +- .../services/logging_service_v2/client.py | 25 ++- .../logging_service_v2/transports/base.py | 8 +- .../logging_service_v2/transports/grpc.py | 4 +- .../transports/grpc_asyncio.py | 4 +- .../metrics_service_v2/async_client.py | 13 +- .../services/metrics_service_v2/client.py | 25 ++- .../metrics_service_v2/transports/base.py | 8 +- .../metrics_service_v2/transports/grpc.py | 4 +- .../transports/grpc_asyncio.py | 4 +- google/cloud/logging_v2/types/log_entry.py | 3 + .../cloud/logging_v2/types/logging_config.py | 1 + .../logging_v2/test_config_service_v2.py | 144 +++++++++++++----- .../logging_v2/test_logging_service_v2.py | 76 ++++++--- .../logging_v2/test_metrics_service_v2.py | 56 +++++-- 20 files changed, 307 insertions(+), 135 deletions(-) diff --git a/google/cloud/logging_v2/services/config_service_v2/async_client.py b/google/cloud/logging_v2/services/config_service_v2/async_client.py index 23db3b5c1..2be78b15b 100644 --- a/google/cloud/logging_v2/services/config_service_v2/async_client.py +++ b/google/cloud/logging_v2/services/config_service_v2/async_client.py @@ -19,14 +19,17 @@ from typing import Dict, Sequence, Tuple, Type, Union import pkg_resources -from google.api_core.client_options import ClientOptions # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.api_core import gapic_v1 # type: ignore -from google.api_core import retry as retries # type: ignore +from google.api_core.client_options import ClientOptions +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore -OptionalRetry = Union[retries.Retry, object] +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore from google.cloud.logging_v2.services.config_service_v2 import pagers from google.cloud.logging_v2.types import logging_config diff --git a/google/cloud/logging_v2/services/config_service_v2/client.py b/google/cloud/logging_v2/services/config_service_v2/client.py index e2c8fe0a5..0de49b68f 100644 --- a/google/cloud/logging_v2/services/config_service_v2/client.py +++ b/google/cloud/logging_v2/services/config_service_v2/client.py @@ -14,23 +14,25 @@ # limitations under the License. # from collections import OrderedDict -from distutils import util import os import re from typing import Dict, Optional, Sequence, Tuple, Type, Union import pkg_resources -from google.api_core import client_options as client_options_lib # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.api_core import gapic_v1 # type: ignore -from google.api_core import retry as retries # type: ignore +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport import mtls # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore from google.auth.exceptions import MutualTLSChannelError # type: ignore from google.oauth2 import service_account # type: ignore -OptionalRetry = Union[retries.Retry, object] +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore from google.cloud.logging_v2.services.config_service_v2 import pagers from google.cloud.logging_v2.types import logging_config @@ -336,8 +338,15 @@ def __init__( client_options = client_options_lib.ClientOptions() # Create SSL credentials for mutual TLS if needed. - use_client_cert = bool( - util.strtobool(os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false")) + if os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") not in ( + "true", + "false", + ): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + use_client_cert = ( + os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") == "true" ) client_cert_source_func = None diff --git a/google/cloud/logging_v2/services/config_service_v2/transports/base.py b/google/cloud/logging_v2/services/config_service_v2/transports/base.py index 00557f640..2f41a0107 100644 --- a/google/cloud/logging_v2/services/config_service_v2/transports/base.py +++ b/google/cloud/logging_v2/services/config_service_v2/transports/base.py @@ -18,10 +18,10 @@ import pkg_resources import google.auth # type: ignore -import google.api_core # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.api_core import gapic_v1 # type: ignore -from google.api_core import retry as retries # type: ignore +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore diff --git a/google/cloud/logging_v2/services/config_service_v2/transports/grpc.py b/google/cloud/logging_v2/services/config_service_v2/transports/grpc.py index cd06eac41..b34d0a121 100644 --- a/google/cloud/logging_v2/services/config_service_v2/transports/grpc.py +++ b/google/cloud/logging_v2/services/config_service_v2/transports/grpc.py @@ -16,8 +16,8 @@ import warnings from typing import Callable, Dict, Optional, Sequence, Tuple, Union -from google.api_core import grpc_helpers # type: ignore -from google.api_core import gapic_v1 # type: ignore +from google.api_core import grpc_helpers +from google.api_core import gapic_v1 import google.auth # type: ignore from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore diff --git a/google/cloud/logging_v2/services/config_service_v2/transports/grpc_asyncio.py b/google/cloud/logging_v2/services/config_service_v2/transports/grpc_asyncio.py index 54615a125..1cf4f3121 100644 --- a/google/cloud/logging_v2/services/config_service_v2/transports/grpc_asyncio.py +++ b/google/cloud/logging_v2/services/config_service_v2/transports/grpc_asyncio.py @@ -16,8 +16,8 @@ import warnings from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union -from google.api_core import gapic_v1 # type: ignore -from google.api_core import grpc_helpers_async # type: ignore +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers_async from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore diff --git a/google/cloud/logging_v2/services/logging_service_v2/async_client.py b/google/cloud/logging_v2/services/logging_service_v2/async_client.py index 0b927dea9..325fe7193 100644 --- a/google/cloud/logging_v2/services/logging_service_v2/async_client.py +++ b/google/cloud/logging_v2/services/logging_service_v2/async_client.py @@ -28,14 +28,17 @@ ) import pkg_resources -from google.api_core.client_options import ClientOptions # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.api_core import gapic_v1 # type: ignore -from google.api_core import retry as retries # type: ignore +from google.api_core.client_options import ClientOptions +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore -OptionalRetry = Union[retries.Retry, object] +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore from google.api import monitored_resource_pb2 # type: ignore from google.cloud.logging_v2.services.logging_service_v2 import pagers diff --git a/google/cloud/logging_v2/services/logging_service_v2/client.py b/google/cloud/logging_v2/services/logging_service_v2/client.py index 8e64d9024..44ec1a85f 100644 --- a/google/cloud/logging_v2/services/logging_service_v2/client.py +++ b/google/cloud/logging_v2/services/logging_service_v2/client.py @@ -14,23 +14,25 @@ # limitations under the License. # from collections import OrderedDict -from distutils import util import os import re from typing import Dict, Optional, Iterable, Iterator, Sequence, Tuple, Type, Union import pkg_resources -from google.api_core import client_options as client_options_lib # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.api_core import gapic_v1 # type: ignore -from google.api_core import retry as retries # type: ignore +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport import mtls # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore from google.auth.exceptions import MutualTLSChannelError # type: ignore from google.oauth2 import service_account # type: ignore -OptionalRetry = Union[retries.Retry, object] +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore from google.api import monitored_resource_pb2 # type: ignore from google.cloud.logging_v2.services.logging_service_v2 import pagers @@ -280,8 +282,15 @@ def __init__( client_options = client_options_lib.ClientOptions() # Create SSL credentials for mutual TLS if needed. - use_client_cert = bool( - util.strtobool(os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false")) + if os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") not in ( + "true", + "false", + ): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + use_client_cert = ( + os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") == "true" ) client_cert_source_func = None diff --git a/google/cloud/logging_v2/services/logging_service_v2/transports/base.py b/google/cloud/logging_v2/services/logging_service_v2/transports/base.py index 556488467..cfafe30e4 100644 --- a/google/cloud/logging_v2/services/logging_service_v2/transports/base.py +++ b/google/cloud/logging_v2/services/logging_service_v2/transports/base.py @@ -18,10 +18,10 @@ import pkg_resources import google.auth # type: ignore -import google.api_core # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.api_core import gapic_v1 # type: ignore -from google.api_core import retry as retries # type: ignore +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore diff --git a/google/cloud/logging_v2/services/logging_service_v2/transports/grpc.py b/google/cloud/logging_v2/services/logging_service_v2/transports/grpc.py index 146f97cbb..0379cbecf 100644 --- a/google/cloud/logging_v2/services/logging_service_v2/transports/grpc.py +++ b/google/cloud/logging_v2/services/logging_service_v2/transports/grpc.py @@ -16,8 +16,8 @@ import warnings from typing import Callable, Dict, Optional, Sequence, Tuple, Union -from google.api_core import grpc_helpers # type: ignore -from google.api_core import gapic_v1 # type: ignore +from google.api_core import grpc_helpers +from google.api_core import gapic_v1 import google.auth # type: ignore from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore diff --git a/google/cloud/logging_v2/services/logging_service_v2/transports/grpc_asyncio.py b/google/cloud/logging_v2/services/logging_service_v2/transports/grpc_asyncio.py index 84e765cf4..16602c2b4 100644 --- a/google/cloud/logging_v2/services/logging_service_v2/transports/grpc_asyncio.py +++ b/google/cloud/logging_v2/services/logging_service_v2/transports/grpc_asyncio.py @@ -16,8 +16,8 @@ import warnings from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union -from google.api_core import gapic_v1 # type: ignore -from google.api_core import grpc_helpers_async # type: ignore +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers_async from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore diff --git a/google/cloud/logging_v2/services/metrics_service_v2/async_client.py b/google/cloud/logging_v2/services/metrics_service_v2/async_client.py index 1e3213a4c..f034fd9f5 100644 --- a/google/cloud/logging_v2/services/metrics_service_v2/async_client.py +++ b/google/cloud/logging_v2/services/metrics_service_v2/async_client.py @@ -19,14 +19,17 @@ from typing import Dict, Sequence, Tuple, Type, Union import pkg_resources -from google.api_core.client_options import ClientOptions # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.api_core import gapic_v1 # type: ignore -from google.api_core import retry as retries # type: ignore +from google.api_core.client_options import ClientOptions +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore -OptionalRetry = Union[retries.Retry, object] +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore from google.api import distribution_pb2 # type: ignore from google.api import metric_pb2 # type: ignore diff --git a/google/cloud/logging_v2/services/metrics_service_v2/client.py b/google/cloud/logging_v2/services/metrics_service_v2/client.py index 9290d62cc..2f339a130 100644 --- a/google/cloud/logging_v2/services/metrics_service_v2/client.py +++ b/google/cloud/logging_v2/services/metrics_service_v2/client.py @@ -14,23 +14,25 @@ # limitations under the License. # from collections import OrderedDict -from distutils import util import os import re from typing import Dict, Optional, Sequence, Tuple, Type, Union import pkg_resources -from google.api_core import client_options as client_options_lib # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.api_core import gapic_v1 # type: ignore -from google.api_core import retry as retries # type: ignore +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport import mtls # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore from google.auth.exceptions import MutualTLSChannelError # type: ignore from google.oauth2 import service_account # type: ignore -OptionalRetry = Union[retries.Retry, object] +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore from google.api import distribution_pb2 # type: ignore from google.api import metric_pb2 # type: ignore @@ -283,8 +285,15 @@ def __init__( client_options = client_options_lib.ClientOptions() # Create SSL credentials for mutual TLS if needed. - use_client_cert = bool( - util.strtobool(os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false")) + if os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") not in ( + "true", + "false", + ): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + use_client_cert = ( + os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") == "true" ) client_cert_source_func = None diff --git a/google/cloud/logging_v2/services/metrics_service_v2/transports/base.py b/google/cloud/logging_v2/services/metrics_service_v2/transports/base.py index e3105748a..7137678ab 100644 --- a/google/cloud/logging_v2/services/metrics_service_v2/transports/base.py +++ b/google/cloud/logging_v2/services/metrics_service_v2/transports/base.py @@ -18,10 +18,10 @@ import pkg_resources import google.auth # type: ignore -import google.api_core # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.api_core import gapic_v1 # type: ignore -from google.api_core import retry as retries # type: ignore +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore diff --git a/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc.py b/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc.py index 4dc00d79d..194d341f3 100644 --- a/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc.py +++ b/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc.py @@ -16,8 +16,8 @@ import warnings from typing import Callable, Dict, Optional, Sequence, Tuple, Union -from google.api_core import grpc_helpers # type: ignore -from google.api_core import gapic_v1 # type: ignore +from google.api_core import grpc_helpers +from google.api_core import gapic_v1 import google.auth # type: ignore from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore diff --git a/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc_asyncio.py b/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc_asyncio.py index 956f53b1d..37cec4a63 100644 --- a/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc_asyncio.py +++ b/google/cloud/logging_v2/services/metrics_service_v2/transports/grpc_asyncio.py @@ -16,8 +16,8 @@ import warnings from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union -from google.api_core import gapic_v1 # type: ignore -from google.api_core import grpc_helpers_async # type: ignore +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers_async from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore diff --git a/google/cloud/logging_v2/types/log_entry.py b/google/cloud/logging_v2/types/log_entry.py index 99331d327..93e428622 100644 --- a/google/cloud/logging_v2/types/log_entry.py +++ b/google/cloud/logging_v2/types/log_entry.py @@ -87,14 +87,17 @@ class LogEntry(proto.Message): "type.googleapis.com/google.cloud.audit.AuditLog" "type.googleapis.com/google.appengine.logging.v1.RequestLog". + This field is a member of `oneof`_ ``payload``. text_payload (str): The log entry payload, represented as a Unicode string (UTF-8). + This field is a member of `oneof`_ ``payload``. json_payload (google.protobuf.struct_pb2.Struct): The log entry payload, represented as a structure that is expressed as a JSON object. + This field is a member of `oneof`_ ``payload``. timestamp (google.protobuf.timestamp_pb2.Timestamp): Optional. The time the event described by the log entry diff --git a/google/cloud/logging_v2/types/logging_config.py b/google/cloud/logging_v2/types/logging_config.py index 0724911b9..3ea70506c 100644 --- a/google/cloud/logging_v2/types/logging_config.py +++ b/google/cloud/logging_v2/types/logging_config.py @@ -241,6 +241,7 @@ class LogSink(proto.Message): bigquery_options (google.cloud.logging_v2.types.BigQueryOptions): Optional. Options that affect sinks exporting data to BigQuery. + This field is a member of `oneof`_ ``options``. create_time (google.protobuf.timestamp_pb2.Timestamp): Output only. The creation timestamp of the diff --git a/tests/unit/gapic/logging_v2/test_config_service_v2.py b/tests/unit/gapic/logging_v2/test_config_service_v2.py index 0d96a08fc..054982f12 100644 --- a/tests/unit/gapic/logging_v2/test_config_service_v2.py +++ b/tests/unit/gapic/logging_v2/test_config_service_v2.py @@ -642,7 +642,9 @@ def test_list_buckets_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].parent == "parent_value" + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val def test_list_buckets_flattened_error(): @@ -678,7 +680,9 @@ async def test_list_buckets_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].parent == "parent_value" + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val @pytest.mark.asyncio @@ -1706,7 +1710,9 @@ def test_list_views_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].parent == "parent_value" + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val def test_list_views_flattened_error(): @@ -1742,7 +1748,9 @@ async def test_list_views_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].parent == "parent_value" + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val @pytest.mark.asyncio @@ -2610,7 +2618,9 @@ def test_list_sinks_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].parent == "parent_value" + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val def test_list_sinks_flattened_error(): @@ -2646,7 +2656,9 @@ async def test_list_sinks_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].parent == "parent_value" + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val @pytest.mark.asyncio @@ -2987,7 +2999,9 @@ def test_get_sink_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].sink_name == "sink_name_value" + arg = args[0].sink_name + mock_val = "sink_name_value" + assert arg == mock_val def test_get_sink_flattened_error(): @@ -3023,7 +3037,9 @@ async def test_get_sink_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].sink_name == "sink_name_value" + arg = args[0].sink_name + mock_val = "sink_name_value" + assert arg == mock_val @pytest.mark.asyncio @@ -3226,8 +3242,12 @@ def test_create_sink_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].parent == "parent_value" - assert args[0].sink == logging_config.LogSink(name="name_value") + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].sink + mock_val = logging_config.LogSink(name="name_value") + assert arg == mock_val def test_create_sink_flattened_error(): @@ -3267,8 +3287,12 @@ async def test_create_sink_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].parent == "parent_value" - assert args[0].sink == logging_config.LogSink(name="name_value") + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].sink + mock_val = logging_config.LogSink(name="name_value") + assert arg == mock_val @pytest.mark.asyncio @@ -3475,9 +3499,15 @@ def test_update_sink_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].sink_name == "sink_name_value" - assert args[0].sink == logging_config.LogSink(name="name_value") - assert args[0].update_mask == field_mask_pb2.FieldMask(paths=["paths_value"]) + arg = args[0].sink_name + mock_val = "sink_name_value" + assert arg == mock_val + arg = args[0].sink + mock_val = logging_config.LogSink(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val def test_update_sink_flattened_error(): @@ -3520,9 +3550,15 @@ async def test_update_sink_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].sink_name == "sink_name_value" - assert args[0].sink == logging_config.LogSink(name="name_value") - assert args[0].update_mask == field_mask_pb2.FieldMask(paths=["paths_value"]) + arg = args[0].sink_name + mock_val = "sink_name_value" + assert arg == mock_val + arg = args[0].sink + mock_val = logging_config.LogSink(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val @pytest.mark.asyncio @@ -3685,7 +3721,9 @@ def test_delete_sink_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].sink_name == "sink_name_value" + arg = args[0].sink_name + mock_val = "sink_name_value" + assert arg == mock_val def test_delete_sink_flattened_error(): @@ -3719,7 +3757,9 @@ async def test_delete_sink_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].sink_name == "sink_name_value" + arg = args[0].sink_name + mock_val = "sink_name_value" + assert arg == mock_val @pytest.mark.asyncio @@ -3889,7 +3929,9 @@ def test_list_exclusions_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].parent == "parent_value" + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val def test_list_exclusions_flattened_error(): @@ -3925,7 +3967,9 @@ async def test_list_exclusions_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].parent == "parent_value" + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val @pytest.mark.asyncio @@ -4269,7 +4313,9 @@ def test_get_exclusion_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].name == "name_value" + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val def test_get_exclusion_flattened_error(): @@ -4305,7 +4351,9 @@ async def test_get_exclusion_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].name == "name_value" + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val @pytest.mark.asyncio @@ -4490,8 +4538,12 @@ def test_create_exclusion_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].parent == "parent_value" - assert args[0].exclusion == logging_config.LogExclusion(name="name_value") + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].exclusion + mock_val = logging_config.LogExclusion(name="name_value") + assert arg == mock_val def test_create_exclusion_flattened_error(): @@ -4532,8 +4584,12 @@ async def test_create_exclusion_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].parent == "parent_value" - assert args[0].exclusion == logging_config.LogExclusion(name="name_value") + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].exclusion + mock_val = logging_config.LogExclusion(name="name_value") + assert arg == mock_val @pytest.mark.asyncio @@ -4721,9 +4777,15 @@ def test_update_exclusion_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].name == "name_value" - assert args[0].exclusion == logging_config.LogExclusion(name="name_value") - assert args[0].update_mask == field_mask_pb2.FieldMask(paths=["paths_value"]) + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + arg = args[0].exclusion + mock_val = logging_config.LogExclusion(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val def test_update_exclusion_flattened_error(): @@ -4766,9 +4828,15 @@ async def test_update_exclusion_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].name == "name_value" - assert args[0].exclusion == logging_config.LogExclusion(name="name_value") - assert args[0].update_mask == field_mask_pb2.FieldMask(paths=["paths_value"]) + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val + arg = args[0].exclusion + mock_val = logging_config.LogExclusion(name="name_value") + assert arg == mock_val + arg = args[0].update_mask + mock_val = field_mask_pb2.FieldMask(paths=["paths_value"]) + assert arg == mock_val @pytest.mark.asyncio @@ -4931,7 +4999,9 @@ def test_delete_exclusion_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].name == "name_value" + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val def test_delete_exclusion_flattened_error(): @@ -4965,7 +5035,9 @@ async def test_delete_exclusion_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].name == "name_value" + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val @pytest.mark.asyncio diff --git a/tests/unit/gapic/logging_v2/test_logging_service_v2.py b/tests/unit/gapic/logging_v2/test_logging_service_v2.py index 5d6021f9a..eac24ed2c 100644 --- a/tests/unit/gapic/logging_v2/test_logging_service_v2.py +++ b/tests/unit/gapic/logging_v2/test_logging_service_v2.py @@ -639,7 +639,9 @@ def test_delete_log_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].log_name == "log_name_value" + arg = args[0].log_name + mock_val = "log_name_value" + assert arg == mock_val def test_delete_log_flattened_error(): @@ -673,7 +675,9 @@ async def test_delete_log_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].log_name == "log_name_value" + arg = args[0].log_name + mock_val = "log_name_value" + assert arg == mock_val @pytest.mark.asyncio @@ -797,12 +801,18 @@ def test_write_log_entries_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].log_name == "log_name_value" - assert args[0].resource == monitored_resource_pb2.MonitoredResource( - type="type__value" - ) - assert args[0].labels == {"key_value": "value_value"} - assert args[0].entries == [log_entry.LogEntry(log_name="log_name_value")] + arg = args[0].log_name + mock_val = "log_name_value" + assert arg == mock_val + arg = args[0].resource + mock_val = monitored_resource_pb2.MonitoredResource(type="type__value") + assert arg == mock_val + arg = args[0].labels + mock_val = {"key_value": "value_value"} + assert arg == mock_val + arg = args[0].entries + mock_val = [log_entry.LogEntry(log_name="log_name_value")] + assert arg == mock_val def test_write_log_entries_flattened_error(): @@ -849,12 +859,18 @@ async def test_write_log_entries_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].log_name == "log_name_value" - assert args[0].resource == monitored_resource_pb2.MonitoredResource( - type="type__value" - ) - assert args[0].labels == {"key_value": "value_value"} - assert args[0].entries == [log_entry.LogEntry(log_name="log_name_value")] + arg = args[0].log_name + mock_val = "log_name_value" + assert arg == mock_val + arg = args[0].resource + mock_val = monitored_resource_pb2.MonitoredResource(type="type__value") + assert arg == mock_val + arg = args[0].labels + mock_val = {"key_value": "value_value"} + assert arg == mock_val + arg = args[0].entries + mock_val = [log_entry.LogEntry(log_name="log_name_value")] + assert arg == mock_val @pytest.mark.asyncio @@ -977,9 +993,15 @@ def test_list_log_entries_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].resource_names == ["resource_names_value"] - assert args[0].filter == "filter_value" - assert args[0].order_by == "order_by_value" + arg = args[0].resource_names + mock_val = ["resource_names_value"] + assert arg == mock_val + arg = args[0].filter + mock_val = "filter_value" + assert arg == mock_val + arg = args[0].order_by + mock_val = "order_by_value" + assert arg == mock_val def test_list_log_entries_flattened_error(): @@ -1022,9 +1044,15 @@ async def test_list_log_entries_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].resource_names == ["resource_names_value"] - assert args[0].filter == "filter_value" - assert args[0].order_by == "order_by_value" + arg = args[0].resource_names + mock_val = ["resource_names_value"] + assert arg == mock_val + arg = args[0].filter + mock_val = "filter_value" + assert arg == mock_val + arg = args[0].order_by + mock_val = "order_by_value" + assert arg == mock_val @pytest.mark.asyncio @@ -1616,7 +1644,9 @@ def test_list_logs_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].parent == "parent_value" + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val def test_list_logs_flattened_error(): @@ -1652,7 +1682,9 @@ async def test_list_logs_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].parent == "parent_value" + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val @pytest.mark.asyncio diff --git a/tests/unit/gapic/logging_v2/test_metrics_service_v2.py b/tests/unit/gapic/logging_v2/test_metrics_service_v2.py index 9c14746d0..1d42212a1 100644 --- a/tests/unit/gapic/logging_v2/test_metrics_service_v2.py +++ b/tests/unit/gapic/logging_v2/test_metrics_service_v2.py @@ -649,7 +649,9 @@ def test_list_log_metrics_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].parent == "parent_value" + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val def test_list_log_metrics_flattened_error(): @@ -685,7 +687,9 @@ async def test_list_log_metrics_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].parent == "parent_value" + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val @pytest.mark.asyncio @@ -1013,7 +1017,9 @@ def test_get_log_metric_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].metric_name == "metric_name_value" + arg = args[0].metric_name + mock_val = "metric_name_value" + assert arg == mock_val def test_get_log_metric_flattened_error(): @@ -1049,7 +1055,9 @@ async def test_get_log_metric_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].metric_name == "metric_name_value" + arg = args[0].metric_name + mock_val = "metric_name_value" + assert arg == mock_val @pytest.mark.asyncio @@ -1249,8 +1257,12 @@ def test_create_log_metric_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].parent == "parent_value" - assert args[0].metric == logging_metrics.LogMetric(name="name_value") + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].metric + mock_val = logging_metrics.LogMetric(name="name_value") + assert arg == mock_val def test_create_log_metric_flattened_error(): @@ -1292,8 +1304,12 @@ async def test_create_log_metric_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].parent == "parent_value" - assert args[0].metric == logging_metrics.LogMetric(name="name_value") + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].metric + mock_val = logging_metrics.LogMetric(name="name_value") + assert arg == mock_val @pytest.mark.asyncio @@ -1496,8 +1512,12 @@ def test_update_log_metric_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].metric_name == "metric_name_value" - assert args[0].metric == logging_metrics.LogMetric(name="name_value") + arg = args[0].metric_name + mock_val = "metric_name_value" + assert arg == mock_val + arg = args[0].metric + mock_val = logging_metrics.LogMetric(name="name_value") + assert arg == mock_val def test_update_log_metric_flattened_error(): @@ -1540,8 +1560,12 @@ async def test_update_log_metric_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].metric_name == "metric_name_value" - assert args[0].metric == logging_metrics.LogMetric(name="name_value") + arg = args[0].metric_name + mock_val = "metric_name_value" + assert arg == mock_val + arg = args[0].metric + mock_val = logging_metrics.LogMetric(name="name_value") + assert arg == mock_val @pytest.mark.asyncio @@ -1715,7 +1739,9 @@ def test_delete_log_metric_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].metric_name == "metric_name_value" + arg = args[0].metric_name + mock_val = "metric_name_value" + assert arg == mock_val def test_delete_log_metric_flattened_error(): @@ -1751,7 +1777,9 @@ async def test_delete_log_metric_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].metric_name == "metric_name_value" + arg = args[0].metric_name + mock_val = "metric_name_value" + assert arg == mock_val @pytest.mark.asyncio From 97804c63869f2962318e493f542b1b80fa54dc8b Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Thu, 11 Nov 2021 02:09:40 +0100 Subject: [PATCH 05/34] chore(deps): update dependency google-cloud-pubsub to v2.9.0 (#441) --- samples/snippets/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/samples/snippets/requirements.txt b/samples/snippets/requirements.txt index f5941a7d2..122299946 100644 --- a/samples/snippets/requirements.txt +++ b/samples/snippets/requirements.txt @@ -1,4 +1,4 @@ google-cloud-logging==2.7.0 google-cloud-bigquery==2.30.1 google-cloud-storage==1.42.3 -google-cloud-pubsub==2.8.0 +google-cloud-pubsub==2.9.0 From 872365855ff571d8f62fc85e9dbcd5511045f9d4 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Thu, 11 Nov 2021 12:25:34 -0500 Subject: [PATCH 06/34] chore(python): add .github/CODEOWNERS as a templated file (#442) Source-Link: https://github.com/googleapis/synthtool/commit/c5026b3217973a8db55db8ee85feee0e9a65e295 Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:0e18b9475fbeb12d9ad4302283171edebb6baf2dfca1bd215ee3b34ed79d95d7 Co-authored-by: Owl Bot --- .github/.OwlBot.lock.yaml | 2 +- .github/CODEOWNERS | 9 +++++---- 2 files changed, 6 insertions(+), 5 deletions(-) diff --git a/.github/.OwlBot.lock.yaml b/.github/.OwlBot.lock.yaml index cb89b2e32..7519fa3a2 100644 --- a/.github/.OwlBot.lock.yaml +++ b/.github/.OwlBot.lock.yaml @@ -1,3 +1,3 @@ docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:ec49167c606648a063d1222220b48119c912562849a0528f35bfb592a9f72737 + digest: sha256:0e18b9475fbeb12d9ad4302283171edebb6baf2dfca1bd215ee3b34ed79d95d7 diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS index 64d2aaa36..21b5b8b8c 100644 --- a/.github/CODEOWNERS +++ b/.github/CODEOWNERS @@ -3,9 +3,10 @@ # # For syntax help see: # https://help.github.com/en/github/creating-cloning-and-archiving-repositories/about-code-owners#codeowners-syntax +# Note: This file is autogenerated. To make changes to the codeowner team, please update .repo-metadata.json. -# The @googleapis/yoshi-python is the default owner for changes in this repo -* @googleapis/api-logging @googleapis/yoshi-python +# @googleapis/yoshi-python @googleapis/api-logging are the default owners for changes in this repo +* @googleapis/yoshi-python @googleapis/api-logging -# The python-samples-reviewers team is the default owner for samples changes -/samples/ @googleapis/python-samples-owners \ No newline at end of file +# @googleapis/python-samples-owners @googleapis/api-logging are the default owners for samples changes +/samples/ @googleapis/python-samples-owners @googleapis/api-logging From bdbbd3cc25a3b533fcc0cfab4006cd7da2fb98b3 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Thu, 18 Nov 2021 19:03:59 +0100 Subject: [PATCH 07/34] chore(deps): update dependency google-cloud-storage to v1.43.0 (#449) --- samples/snippets/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/samples/snippets/requirements.txt b/samples/snippets/requirements.txt index 122299946..43a218aa5 100644 --- a/samples/snippets/requirements.txt +++ b/samples/snippets/requirements.txt @@ -1,4 +1,4 @@ google-cloud-logging==2.7.0 google-cloud-bigquery==2.30.1 -google-cloud-storage==1.42.3 +google-cloud-storage==1.43.0 google-cloud-pubsub==2.9.0 From 4726a2761c8b51bc770c5760bf8f7b68b1ee3b68 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Fri, 3 Dec 2021 11:53:38 +0100 Subject: [PATCH 08/34] chore(deps): update dependency google-cloud-bigquery to v2.31.0 (#451) --- samples/snippets/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/samples/snippets/requirements.txt b/samples/snippets/requirements.txt index 43a218aa5..fa8f6386f 100644 --- a/samples/snippets/requirements.txt +++ b/samples/snippets/requirements.txt @@ -1,4 +1,4 @@ google-cloud-logging==2.7.0 -google-cloud-bigquery==2.30.1 +google-cloud-bigquery==2.31.0 google-cloud-storage==1.43.0 google-cloud-pubsub==2.9.0 From fed1a757e7d9b97eaa17ec351af8ee215ad6e343 Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Tue, 28 Dec 2021 13:20:31 -0500 Subject: [PATCH 09/34] chore: update .repo-metadata.json (#457) --- .repo-metadata.json | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/.repo-metadata.json b/.repo-metadata.json index fdb0a66c2..9dac57e33 100644 --- a/.repo-metadata.json +++ b/.repo-metadata.json @@ -2,14 +2,15 @@ "name": "logging", "name_pretty": "Cloud Logging", "product_documentation": "https://cloud.google.com/logging/docs", - "client_documentation": "https://googleapis.dev/python/logging/latest", + "client_documentation": "https://cloud.google.com/python/docs/reference/logging/latest", "issue_tracker": "https://issuetracker.google.com/savedsearches/559764", - "release_level": "ga", + "release_level": "stable", "language": "python", "library_type": "GAPIC_COMBO", "repo": "googleapis/python-logging", "distribution_name": "google-cloud-logging", "api_id": "logging.googleapis.com", "codeowner_team": "@googleapis/api-logging", - "default_version": "v2" + "default_version": "v2", + "api_shortname": "logging" } From 32f4acc1f6f48832bce163aa5a17b3d6fba4135a Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Thu, 6 Jan 2022 17:10:12 +0000 Subject: [PATCH 10/34] chore: use python-samples-reviewers (#461) --- .github/.OwlBot.lock.yaml | 2 +- .github/CODEOWNERS | 4 ++-- samples/AUTHORING_GUIDE.md | 2 +- samples/CONTRIBUTING.md | 2 +- 4 files changed, 5 insertions(+), 5 deletions(-) diff --git a/.github/.OwlBot.lock.yaml b/.github/.OwlBot.lock.yaml index 7519fa3a2..f33299ddb 100644 --- a/.github/.OwlBot.lock.yaml +++ b/.github/.OwlBot.lock.yaml @@ -1,3 +1,3 @@ docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:0e18b9475fbeb12d9ad4302283171edebb6baf2dfca1bd215ee3b34ed79d95d7 + digest: sha256:899d5d7cc340fa8ef9d8ae1a8cfba362c6898584f779e156f25ee828ba824610 diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS index 21b5b8b8c..2a3b42055 100644 --- a/.github/CODEOWNERS +++ b/.github/CODEOWNERS @@ -8,5 +8,5 @@ # @googleapis/yoshi-python @googleapis/api-logging are the default owners for changes in this repo * @googleapis/yoshi-python @googleapis/api-logging -# @googleapis/python-samples-owners @googleapis/api-logging are the default owners for samples changes -/samples/ @googleapis/python-samples-owners @googleapis/api-logging +# @googleapis/python-samples-reviewers @googleapis/api-logging are the default owners for samples changes +/samples/ @googleapis/python-samples-reviewers @googleapis/api-logging diff --git a/samples/AUTHORING_GUIDE.md b/samples/AUTHORING_GUIDE.md index 55c97b32f..8249522ff 100644 --- a/samples/AUTHORING_GUIDE.md +++ b/samples/AUTHORING_GUIDE.md @@ -1 +1 @@ -See https://github.com/GoogleCloudPlatform/python-docs-samples/blob/master/AUTHORING_GUIDE.md \ No newline at end of file +See https://github.com/GoogleCloudPlatform/python-docs-samples/blob/main/AUTHORING_GUIDE.md \ No newline at end of file diff --git a/samples/CONTRIBUTING.md b/samples/CONTRIBUTING.md index 34c882b6f..f5fe2e6ba 100644 --- a/samples/CONTRIBUTING.md +++ b/samples/CONTRIBUTING.md @@ -1 +1 @@ -See https://github.com/GoogleCloudPlatform/python-docs-samples/blob/master/CONTRIBUTING.md \ No newline at end of file +See https://github.com/GoogleCloudPlatform/python-docs-samples/blob/main/CONTRIBUTING.md \ No newline at end of file From 1f37542dc83aa22c6af41802e378db97951a1852 Mon Sep 17 00:00:00 2001 From: losalex <90795544+losalex@users.noreply.github.com> Date: Thu, 6 Jan 2022 12:38:25 -0800 Subject: [PATCH 11/34] chore: enable staleness and pull request size bots on repository (#458) --- .github/auto-label.yaml | 7 +++++++ 1 file changed, 7 insertions(+) create mode 100644 .github/auto-label.yaml diff --git a/.github/auto-label.yaml b/.github/auto-label.yaml new file mode 100644 index 000000000..1e4706499 --- /dev/null +++ b/.github/auto-label.yaml @@ -0,0 +1,7 @@ +product: true +requestsize: + enabled: true +staleness: + pullrequest: true + old: 30 + extraold: 60 From aa1dd7c8f7de9002a6f8d938a5b084041e176fc2 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Sat, 8 Jan 2022 06:14:25 -0500 Subject: [PATCH 12/34] chore: use gapic-generator-python 0.58.4 (#459) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * chore: use gapic-generator-python 0.58.4 fix: provide appropriate mock values for message body fields committer: dovs PiperOrigin-RevId: 419025932 Source-Link: https://github.com/googleapis/googleapis/commit/73da6697f598f1ba30618924936a59f8e457ec89 Source-Link: https://github.com/googleapis/googleapis-gen/commit/46df624a54b9ed47c1a7eefb7a49413cf7b82f98 Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiNDZkZjYyNGE1NGI5ZWQ0N2MxYTdlZWZiN2E0OTQxM2NmN2I4MmY5OCJ9 * 🦉 Updates from OwlBot See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md Co-authored-by: Owl Bot Co-authored-by: Anthonios Partheniou --- .../config_service_v2/transports/base.py | 1 - .../logging_service_v2/transports/base.py | 1 - .../metrics_service_v2/transports/base.py | 1 - .../logging_v2/test_config_service_v2.py | 267 ++++++------------ .../logging_v2/test_logging_service_v2.py | 103 +++---- .../logging_v2/test_metrics_service_v2.py | 77 ++--- 6 files changed, 163 insertions(+), 287 deletions(-) diff --git a/google/cloud/logging_v2/services/config_service_v2/transports/base.py b/google/cloud/logging_v2/services/config_service_v2/transports/base.py index 2f41a0107..90e305488 100644 --- a/google/cloud/logging_v2/services/config_service_v2/transports/base.py +++ b/google/cloud/logging_v2/services/config_service_v2/transports/base.py @@ -105,7 +105,6 @@ def __init__( credentials, _ = google.auth.load_credentials_from_file( credentials_file, **scopes_kwargs, quota_project_id=quota_project_id ) - elif credentials is None: credentials, _ = google.auth.default( **scopes_kwargs, quota_project_id=quota_project_id diff --git a/google/cloud/logging_v2/services/logging_service_v2/transports/base.py b/google/cloud/logging_v2/services/logging_service_v2/transports/base.py index cfafe30e4..6fe2e9e8a 100644 --- a/google/cloud/logging_v2/services/logging_service_v2/transports/base.py +++ b/google/cloud/logging_v2/services/logging_service_v2/transports/base.py @@ -106,7 +106,6 @@ def __init__( credentials, _ = google.auth.load_credentials_from_file( credentials_file, **scopes_kwargs, quota_project_id=quota_project_id ) - elif credentials is None: credentials, _ = google.auth.default( **scopes_kwargs, quota_project_id=quota_project_id diff --git a/google/cloud/logging_v2/services/metrics_service_v2/transports/base.py b/google/cloud/logging_v2/services/metrics_service_v2/transports/base.py index 7137678ab..fef40f239 100644 --- a/google/cloud/logging_v2/services/metrics_service_v2/transports/base.py +++ b/google/cloud/logging_v2/services/metrics_service_v2/transports/base.py @@ -106,7 +106,6 @@ def __init__( credentials, _ = google.auth.load_credentials_from_file( credentials_file, **scopes_kwargs, quota_project_id=quota_project_id ) - elif credentials is None: credentials, _ = google.auth.default( **scopes_kwargs, quota_project_id=quota_project_id diff --git a/tests/unit/gapic/logging_v2/test_config_service_v2.py b/tests/unit/gapic/logging_v2/test_config_service_v2.py index 054982f12..5a82cec81 100644 --- a/tests/unit/gapic/logging_v2/test_config_service_v2.py +++ b/tests/unit/gapic/logging_v2/test_config_service_v2.py @@ -250,20 +250,20 @@ def test_config_service_v2_client_client_options( # unsupported value. with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): with pytest.raises(MutualTLSChannelError): - client = client_class() + client = client_class(transport=transport_name) # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. with mock.patch.dict( os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} ): with pytest.raises(ValueError): - client = client_class() + client = client_class(transport=transport_name) # Check the case quota_project_id is provided options = client_options.ClientOptions(quota_project_id="octopus") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(transport=transport_name, client_options=options) + client = client_class(client_options=options, transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -332,7 +332,7 @@ def test_config_service_v2_client_mtls_env_auto( ) with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(transport=transport_name, client_options=options) + client = client_class(client_options=options, transport=transport_name) if use_client_cert_env == "false": expected_client_cert_source = None @@ -427,7 +427,7 @@ def test_config_service_v2_client_client_options_scopes( options = client_options.ClientOptions(scopes=["1", "2"],) with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(transport=transport_name, client_options=options) + client = client_class(client_options=options, transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -458,7 +458,7 @@ def test_config_service_v2_client_client_options_credentials_file( options = client_options.ClientOptions(credentials_file="credentials.json") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(transport=transport_name, client_options=options) + client = client_class(client_options=options, transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file="credentials.json", @@ -491,9 +491,8 @@ def test_config_service_v2_client_client_options_from_dict(): ) -def test_list_buckets( - transport: str = "grpc", request_type=logging_config.ListBucketsRequest -): +@pytest.mark.parametrize("request_type", [logging_config.ListBucketsRequest, dict,]) +def test_list_buckets(request_type, transport: str = "grpc"): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -520,10 +519,6 @@ def test_list_buckets( assert response.next_page_token == "next_page_token_value" -def test_list_buckets_from_dict(): - test_list_buckets(request_type=dict) - - def test_list_buckets_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -699,8 +694,10 @@ async def test_list_buckets_flattened_error_async(): ) -def test_list_buckets_pager(): - client = ConfigServiceV2Client(credentials=ga_credentials.AnonymousCredentials,) +def test_list_buckets_pager(transport_name: str = "grpc"): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials, transport=transport_name, + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_buckets), "__call__") as call: @@ -737,8 +734,10 @@ def test_list_buckets_pager(): assert all(isinstance(i, logging_config.LogBucket) for i in results) -def test_list_buckets_pages(): - client = ConfigServiceV2Client(credentials=ga_credentials.AnonymousCredentials,) +def test_list_buckets_pages(transport_name: str = "grpc"): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials, transport=transport_name, + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_buckets), "__call__") as call: @@ -841,9 +840,8 @@ async def test_list_buckets_async_pages(): assert page_.raw_page.next_page_token == token -def test_get_bucket( - transport: str = "grpc", request_type=logging_config.GetBucketRequest -): +@pytest.mark.parametrize("request_type", [logging_config.GetBucketRequest, dict,]) +def test_get_bucket(request_type, transport: str = "grpc"): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -878,10 +876,6 @@ def test_get_bucket( assert response.lifecycle_state == logging_config.LifecycleState.ACTIVE -def test_get_bucket_from_dict(): - test_get_bucket(request_type=dict) - - def test_get_bucket_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -995,9 +989,8 @@ async def test_get_bucket_field_headers_async(): assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] -def test_create_bucket( - transport: str = "grpc", request_type=logging_config.CreateBucketRequest -): +@pytest.mark.parametrize("request_type", [logging_config.CreateBucketRequest, dict,]) +def test_create_bucket(request_type, transport: str = "grpc"): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -1032,10 +1025,6 @@ def test_create_bucket( assert response.lifecycle_state == logging_config.LifecycleState.ACTIVE -def test_create_bucket_from_dict(): - test_create_bucket(request_type=dict) - - def test_create_bucket_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -1149,9 +1138,8 @@ async def test_create_bucket_field_headers_async(): assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] -def test_update_bucket( - transport: str = "grpc", request_type=logging_config.UpdateBucketRequest -): +@pytest.mark.parametrize("request_type", [logging_config.UpdateBucketRequest, dict,]) +def test_update_bucket(request_type, transport: str = "grpc"): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -1186,10 +1174,6 @@ def test_update_bucket( assert response.lifecycle_state == logging_config.LifecycleState.ACTIVE -def test_update_bucket_from_dict(): - test_update_bucket(request_type=dict) - - def test_update_bucket_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -1303,9 +1287,8 @@ async def test_update_bucket_field_headers_async(): assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] -def test_delete_bucket( - transport: str = "grpc", request_type=logging_config.DeleteBucketRequest -): +@pytest.mark.parametrize("request_type", [logging_config.DeleteBucketRequest, dict,]) +def test_delete_bucket(request_type, transport: str = "grpc"): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -1329,10 +1312,6 @@ def test_delete_bucket( assert response is None -def test_delete_bucket_from_dict(): - test_delete_bucket(request_type=dict) - - def test_delete_bucket_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -1431,9 +1410,8 @@ async def test_delete_bucket_field_headers_async(): assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] -def test_undelete_bucket( - transport: str = "grpc", request_type=logging_config.UndeleteBucketRequest -): +@pytest.mark.parametrize("request_type", [logging_config.UndeleteBucketRequest, dict,]) +def test_undelete_bucket(request_type, transport: str = "grpc"): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -1457,10 +1435,6 @@ def test_undelete_bucket( assert response is None -def test_undelete_bucket_from_dict(): - test_undelete_bucket(request_type=dict) - - def test_undelete_bucket_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -1559,9 +1533,8 @@ async def test_undelete_bucket_field_headers_async(): assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] -def test_list_views( - transport: str = "grpc", request_type=logging_config.ListViewsRequest -): +@pytest.mark.parametrize("request_type", [logging_config.ListViewsRequest, dict,]) +def test_list_views(request_type, transport: str = "grpc"): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -1588,10 +1561,6 @@ def test_list_views( assert response.next_page_token == "next_page_token_value" -def test_list_views_from_dict(): - test_list_views(request_type=dict) - - def test_list_views_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -1767,8 +1736,10 @@ async def test_list_views_flattened_error_async(): ) -def test_list_views_pager(): - client = ConfigServiceV2Client(credentials=ga_credentials.AnonymousCredentials,) +def test_list_views_pager(transport_name: str = "grpc"): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials, transport=transport_name, + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_views), "__call__") as call: @@ -1805,8 +1776,10 @@ def test_list_views_pager(): assert all(isinstance(i, logging_config.LogView) for i in results) -def test_list_views_pages(): - client = ConfigServiceV2Client(credentials=ga_credentials.AnonymousCredentials,) +def test_list_views_pages(transport_name: str = "grpc"): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials, transport=transport_name, + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_views), "__call__") as call: @@ -1909,7 +1882,8 @@ async def test_list_views_async_pages(): assert page_.raw_page.next_page_token == token -def test_get_view(transport: str = "grpc", request_type=logging_config.GetViewRequest): +@pytest.mark.parametrize("request_type", [logging_config.GetViewRequest, dict,]) +def test_get_view(request_type, transport: str = "grpc"): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -1938,10 +1912,6 @@ def test_get_view(transport: str = "grpc", request_type=logging_config.GetViewRe assert response.filter == "filter_value" -def test_get_view_from_dict(): - test_get_view(request_type=dict) - - def test_get_view_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -2051,9 +2021,8 @@ async def test_get_view_field_headers_async(): assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] -def test_create_view( - transport: str = "grpc", request_type=logging_config.CreateViewRequest -): +@pytest.mark.parametrize("request_type", [logging_config.CreateViewRequest, dict,]) +def test_create_view(request_type, transport: str = "grpc"): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -2082,10 +2051,6 @@ def test_create_view( assert response.filter == "filter_value" -def test_create_view_from_dict(): - test_create_view(request_type=dict) - - def test_create_view_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -2195,9 +2160,8 @@ async def test_create_view_field_headers_async(): assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] -def test_update_view( - transport: str = "grpc", request_type=logging_config.UpdateViewRequest -): +@pytest.mark.parametrize("request_type", [logging_config.UpdateViewRequest, dict,]) +def test_update_view(request_type, transport: str = "grpc"): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -2226,10 +2190,6 @@ def test_update_view( assert response.filter == "filter_value" -def test_update_view_from_dict(): - test_update_view(request_type=dict) - - def test_update_view_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -2339,9 +2299,8 @@ async def test_update_view_field_headers_async(): assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] -def test_delete_view( - transport: str = "grpc", request_type=logging_config.DeleteViewRequest -): +@pytest.mark.parametrize("request_type", [logging_config.DeleteViewRequest, dict,]) +def test_delete_view(request_type, transport: str = "grpc"): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -2365,10 +2324,6 @@ def test_delete_view( assert response is None -def test_delete_view_from_dict(): - test_delete_view(request_type=dict) - - def test_delete_view_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -2467,9 +2422,8 @@ async def test_delete_view_field_headers_async(): assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] -def test_list_sinks( - transport: str = "grpc", request_type=logging_config.ListSinksRequest -): +@pytest.mark.parametrize("request_type", [logging_config.ListSinksRequest, dict,]) +def test_list_sinks(request_type, transport: str = "grpc"): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -2496,10 +2450,6 @@ def test_list_sinks( assert response.next_page_token == "next_page_token_value" -def test_list_sinks_from_dict(): - test_list_sinks(request_type=dict) - - def test_list_sinks_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -2675,8 +2625,10 @@ async def test_list_sinks_flattened_error_async(): ) -def test_list_sinks_pager(): - client = ConfigServiceV2Client(credentials=ga_credentials.AnonymousCredentials,) +def test_list_sinks_pager(transport_name: str = "grpc"): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials, transport=transport_name, + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_sinks), "__call__") as call: @@ -2713,8 +2665,10 @@ def test_list_sinks_pager(): assert all(isinstance(i, logging_config.LogSink) for i in results) -def test_list_sinks_pages(): - client = ConfigServiceV2Client(credentials=ga_credentials.AnonymousCredentials,) +def test_list_sinks_pages(transport_name: str = "grpc"): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials, transport=transport_name, + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_sinks), "__call__") as call: @@ -2817,7 +2771,8 @@ async def test_list_sinks_async_pages(): assert page_.raw_page.next_page_token == token -def test_get_sink(transport: str = "grpc", request_type=logging_config.GetSinkRequest): +@pytest.mark.parametrize("request_type", [logging_config.GetSinkRequest, dict,]) +def test_get_sink(request_type, transport: str = "grpc"): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -2861,10 +2816,6 @@ def test_get_sink(transport: str = "grpc", request_type=logging_config.GetSinkRe assert response.include_children is True -def test_get_sink_from_dict(): - test_get_sink(request_type=dict) - - def test_get_sink_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -3056,9 +3007,8 @@ async def test_get_sink_flattened_error_async(): ) -def test_create_sink( - transport: str = "grpc", request_type=logging_config.CreateSinkRequest -): +@pytest.mark.parametrize("request_type", [logging_config.CreateSinkRequest, dict,]) +def test_create_sink(request_type, transport: str = "grpc"): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -3102,10 +3052,6 @@ def test_create_sink( assert response.include_children is True -def test_create_sink_from_dict(): - test_create_sink(request_type=dict) - - def test_create_sink_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -3311,9 +3257,8 @@ async def test_create_sink_flattened_error_async(): ) -def test_update_sink( - transport: str = "grpc", request_type=logging_config.UpdateSinkRequest -): +@pytest.mark.parametrize("request_type", [logging_config.UpdateSinkRequest, dict,]) +def test_update_sink(request_type, transport: str = "grpc"): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -3357,10 +3302,6 @@ def test_update_sink( assert response.include_children is True -def test_update_sink_from_dict(): - test_update_sink(request_type=dict) - - def test_update_sink_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -3578,9 +3519,8 @@ async def test_update_sink_flattened_error_async(): ) -def test_delete_sink( - transport: str = "grpc", request_type=logging_config.DeleteSinkRequest -): +@pytest.mark.parametrize("request_type", [logging_config.DeleteSinkRequest, dict,]) +def test_delete_sink(request_type, transport: str = "grpc"): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -3604,10 +3544,6 @@ def test_delete_sink( assert response is None -def test_delete_sink_from_dict(): - test_delete_sink(request_type=dict) - - def test_delete_sink_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -3776,9 +3712,8 @@ async def test_delete_sink_flattened_error_async(): ) -def test_list_exclusions( - transport: str = "grpc", request_type=logging_config.ListExclusionsRequest -): +@pytest.mark.parametrize("request_type", [logging_config.ListExclusionsRequest, dict,]) +def test_list_exclusions(request_type, transport: str = "grpc"): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -3805,10 +3740,6 @@ def test_list_exclusions( assert response.next_page_token == "next_page_token_value" -def test_list_exclusions_from_dict(): - test_list_exclusions(request_type=dict) - - def test_list_exclusions_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -3986,8 +3917,10 @@ async def test_list_exclusions_flattened_error_async(): ) -def test_list_exclusions_pager(): - client = ConfigServiceV2Client(credentials=ga_credentials.AnonymousCredentials,) +def test_list_exclusions_pager(transport_name: str = "grpc"): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials, transport=transport_name, + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_exclusions), "__call__") as call: @@ -4029,8 +3962,10 @@ def test_list_exclusions_pager(): assert all(isinstance(i, logging_config.LogExclusion) for i in results) -def test_list_exclusions_pages(): - client = ConfigServiceV2Client(credentials=ga_credentials.AnonymousCredentials,) +def test_list_exclusions_pages(transport_name: str = "grpc"): + client = ConfigServiceV2Client( + credentials=ga_credentials.AnonymousCredentials, transport=transport_name, + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_exclusions), "__call__") as call: @@ -4148,9 +4083,8 @@ async def test_list_exclusions_async_pages(): assert page_.raw_page.next_page_token == token -def test_get_exclusion( - transport: str = "grpc", request_type=logging_config.GetExclusionRequest -): +@pytest.mark.parametrize("request_type", [logging_config.GetExclusionRequest, dict,]) +def test_get_exclusion(request_type, transport: str = "grpc"): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -4183,10 +4117,6 @@ def test_get_exclusion( assert response.disabled is True -def test_get_exclusion_from_dict(): - test_get_exclusion(request_type=dict) - - def test_get_exclusion_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -4370,9 +4300,8 @@ async def test_get_exclusion_flattened_error_async(): ) -def test_create_exclusion( - transport: str = "grpc", request_type=logging_config.CreateExclusionRequest -): +@pytest.mark.parametrize("request_type", [logging_config.CreateExclusionRequest, dict,]) +def test_create_exclusion(request_type, transport: str = "grpc"): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -4405,10 +4334,6 @@ def test_create_exclusion( assert response.disabled is True -def test_create_exclusion_from_dict(): - test_create_exclusion(request_type=dict) - - def test_create_exclusion_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -4608,9 +4533,8 @@ async def test_create_exclusion_flattened_error_async(): ) -def test_update_exclusion( - transport: str = "grpc", request_type=logging_config.UpdateExclusionRequest -): +@pytest.mark.parametrize("request_type", [logging_config.UpdateExclusionRequest, dict,]) +def test_update_exclusion(request_type, transport: str = "grpc"): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -4643,10 +4567,6 @@ def test_update_exclusion( assert response.disabled is True -def test_update_exclusion_from_dict(): - test_update_exclusion(request_type=dict) - - def test_update_exclusion_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -4856,9 +4776,8 @@ async def test_update_exclusion_flattened_error_async(): ) -def test_delete_exclusion( - transport: str = "grpc", request_type=logging_config.DeleteExclusionRequest -): +@pytest.mark.parametrize("request_type", [logging_config.DeleteExclusionRequest, dict,]) +def test_delete_exclusion(request_type, transport: str = "grpc"): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -4882,10 +4801,6 @@ def test_delete_exclusion( assert response is None -def test_delete_exclusion_from_dict(): - test_delete_exclusion(request_type=dict) - - def test_delete_exclusion_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -5054,9 +4969,8 @@ async def test_delete_exclusion_flattened_error_async(): ) -def test_get_cmek_settings( - transport: str = "grpc", request_type=logging_config.GetCmekSettingsRequest -): +@pytest.mark.parametrize("request_type", [logging_config.GetCmekSettingsRequest, dict,]) +def test_get_cmek_settings(request_type, transport: str = "grpc"): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -5089,10 +5003,6 @@ def test_get_cmek_settings( assert response.service_account_id == "service_account_id_value" -def test_get_cmek_settings_from_dict(): - test_get_cmek_settings(request_type=dict) - - def test_get_cmek_settings_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -5210,9 +5120,10 @@ async def test_get_cmek_settings_field_headers_async(): assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] -def test_update_cmek_settings( - transport: str = "grpc", request_type=logging_config.UpdateCmekSettingsRequest -): +@pytest.mark.parametrize( + "request_type", [logging_config.UpdateCmekSettingsRequest, dict,] +) +def test_update_cmek_settings(request_type, transport: str = "grpc"): client = ConfigServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -5245,10 +5156,6 @@ def test_update_cmek_settings( assert response.service_account_id == "service_account_id_value" -def test_update_cmek_settings_from_dict(): - test_update_cmek_settings(request_type=dict) - - def test_update_cmek_settings_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -6009,7 +5916,7 @@ def test_parse_common_location_path(): assert expected == actual -def test_client_withDEFAULT_CLIENT_INFO(): +def test_client_with_default_client_info(): client_info = gapic_v1.client_info.ClientInfo() with mock.patch.object( diff --git a/tests/unit/gapic/logging_v2/test_logging_service_v2.py b/tests/unit/gapic/logging_v2/test_logging_service_v2.py index eac24ed2c..f73ef775c 100644 --- a/tests/unit/gapic/logging_v2/test_logging_service_v2.py +++ b/tests/unit/gapic/logging_v2/test_logging_service_v2.py @@ -257,20 +257,20 @@ def test_logging_service_v2_client_client_options( # unsupported value. with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): with pytest.raises(MutualTLSChannelError): - client = client_class() + client = client_class(transport=transport_name) # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. with mock.patch.dict( os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} ): with pytest.raises(ValueError): - client = client_class() + client = client_class(transport=transport_name) # Check the case quota_project_id is provided options = client_options.ClientOptions(quota_project_id="octopus") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(transport=transport_name, client_options=options) + client = client_class(client_options=options, transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -339,7 +339,7 @@ def test_logging_service_v2_client_mtls_env_auto( ) with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(transport=transport_name, client_options=options) + client = client_class(client_options=options, transport=transport_name) if use_client_cert_env == "false": expected_client_cert_source = None @@ -434,7 +434,7 @@ def test_logging_service_v2_client_client_options_scopes( options = client_options.ClientOptions(scopes=["1", "2"],) with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(transport=transport_name, client_options=options) + client = client_class(client_options=options, transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -465,7 +465,7 @@ def test_logging_service_v2_client_client_options_credentials_file( options = client_options.ClientOptions(credentials_file="credentials.json") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(transport=transport_name, client_options=options) + client = client_class(client_options=options, transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file="credentials.json", @@ -498,7 +498,8 @@ def test_logging_service_v2_client_client_options_from_dict(): ) -def test_delete_log(transport: str = "grpc", request_type=logging.DeleteLogRequest): +@pytest.mark.parametrize("request_type", [logging.DeleteLogRequest, dict,]) +def test_delete_log(request_type, transport: str = "grpc"): client = LoggingServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -522,10 +523,6 @@ def test_delete_log(transport: str = "grpc", request_type=logging.DeleteLogReque assert response is None -def test_delete_log_from_dict(): - test_delete_log(request_type=dict) - - def test_delete_log_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -694,9 +691,8 @@ async def test_delete_log_flattened_error_async(): ) -def test_write_log_entries( - transport: str = "grpc", request_type=logging.WriteLogEntriesRequest -): +@pytest.mark.parametrize("request_type", [logging.WriteLogEntriesRequest, dict,]) +def test_write_log_entries(request_type, transport: str = "grpc"): client = LoggingServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -722,10 +718,6 @@ def test_write_log_entries( assert isinstance(response, logging.WriteLogEntriesResponse) -def test_write_log_entries_from_dict(): - test_write_log_entries(request_type=dict) - - def test_write_log_entries_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -891,9 +883,8 @@ async def test_write_log_entries_flattened_error_async(): ) -def test_list_log_entries( - transport: str = "grpc", request_type=logging.ListLogEntriesRequest -): +@pytest.mark.parametrize("request_type", [logging.ListLogEntriesRequest, dict,]) +def test_list_log_entries(request_type, transport: str = "grpc"): client = LoggingServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -920,10 +911,6 @@ def test_list_log_entries( assert response.next_page_token == "next_page_token_value" -def test_list_log_entries_from_dict(): - test_list_log_entries(request_type=dict) - - def test_list_log_entries_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -1072,8 +1059,10 @@ async def test_list_log_entries_flattened_error_async(): ) -def test_list_log_entries_pager(): - client = LoggingServiceV2Client(credentials=ga_credentials.AnonymousCredentials,) +def test_list_log_entries_pager(transport_name: str = "grpc"): + client = LoggingServiceV2Client( + credentials=ga_credentials.AnonymousCredentials, transport=transport_name, + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_log_entries), "__call__") as call: @@ -1107,8 +1096,10 @@ def test_list_log_entries_pager(): assert all(isinstance(i, log_entry.LogEntry) for i in results) -def test_list_log_entries_pages(): - client = LoggingServiceV2Client(credentials=ga_credentials.AnonymousCredentials,) +def test_list_log_entries_pages(transport_name: str = "grpc"): + client = LoggingServiceV2Client( + credentials=ga_credentials.AnonymousCredentials, transport=transport_name, + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_log_entries), "__call__") as call: @@ -1211,10 +1202,10 @@ async def test_list_log_entries_async_pages(): assert page_.raw_page.next_page_token == token -def test_list_monitored_resource_descriptors( - transport: str = "grpc", - request_type=logging.ListMonitoredResourceDescriptorsRequest, -): +@pytest.mark.parametrize( + "request_type", [logging.ListMonitoredResourceDescriptorsRequest, dict,] +) +def test_list_monitored_resource_descriptors(request_type, transport: str = "grpc"): client = LoggingServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -1243,10 +1234,6 @@ def test_list_monitored_resource_descriptors( assert response.next_page_token == "next_page_token_value" -def test_list_monitored_resource_descriptors_from_dict(): - test_list_monitored_resource_descriptors(request_type=dict) - - def test_list_monitored_resource_descriptors_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -1304,8 +1291,10 @@ async def test_list_monitored_resource_descriptors_async_from_dict(): await test_list_monitored_resource_descriptors_async(request_type=dict) -def test_list_monitored_resource_descriptors_pager(): - client = LoggingServiceV2Client(credentials=ga_credentials.AnonymousCredentials,) +def test_list_monitored_resource_descriptors_pager(transport_name: str = "grpc"): + client = LoggingServiceV2Client( + credentials=ga_credentials.AnonymousCredentials, transport=transport_name, + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1352,8 +1341,10 @@ def test_list_monitored_resource_descriptors_pager(): ) -def test_list_monitored_resource_descriptors_pages(): - client = LoggingServiceV2Client(credentials=ga_credentials.AnonymousCredentials,) +def test_list_monitored_resource_descriptors_pages(transport_name: str = "grpc"): + client = LoggingServiceV2Client( + credentials=ga_credentials.AnonymousCredentials, transport=transport_name, + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1491,7 +1482,8 @@ async def test_list_monitored_resource_descriptors_async_pages(): assert page_.raw_page.next_page_token == token -def test_list_logs(transport: str = "grpc", request_type=logging.ListLogsRequest): +@pytest.mark.parametrize("request_type", [logging.ListLogsRequest, dict,]) +def test_list_logs(request_type, transport: str = "grpc"): client = LoggingServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -1519,10 +1511,6 @@ def test_list_logs(transport: str = "grpc", request_type=logging.ListLogsRequest assert response.next_page_token == "next_page_token_value" -def test_list_logs_from_dict(): - test_list_logs(request_type=dict) - - def test_list_logs_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -1701,8 +1689,10 @@ async def test_list_logs_flattened_error_async(): ) -def test_list_logs_pager(): - client = LoggingServiceV2Client(credentials=ga_credentials.AnonymousCredentials,) +def test_list_logs_pager(transport_name: str = "grpc"): + client = LoggingServiceV2Client( + credentials=ga_credentials.AnonymousCredentials, transport=transport_name, + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_logs), "__call__") as call: @@ -1730,8 +1720,10 @@ def test_list_logs_pager(): assert all(isinstance(i, str) for i in results) -def test_list_logs_pages(): - client = LoggingServiceV2Client(credentials=ga_credentials.AnonymousCredentials,) +def test_list_logs_pages(transport_name: str = "grpc"): + client = LoggingServiceV2Client( + credentials=ga_credentials.AnonymousCredentials, transport=transport_name, + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_logs), "__call__") as call: @@ -1807,9 +1799,8 @@ async def test_list_logs_async_pages(): assert page_.raw_page.next_page_token == token -def test_tail_log_entries( - transport: str = "grpc", request_type=logging.TailLogEntriesRequest -): +@pytest.mark.parametrize("request_type", [logging.TailLogEntriesRequest, dict,]) +def test_tail_log_entries(request_type, transport: str = "grpc"): client = LoggingServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -1835,10 +1826,6 @@ def test_tail_log_entries( assert isinstance(message, logging.TailLogEntriesResponse) -def test_tail_log_entries_from_dict(): - test_tail_log_entries(request_type=dict) - - @pytest.mark.asyncio async def test_tail_log_entries_async( transport: str = "grpc_asyncio", request_type=logging.TailLogEntriesRequest @@ -2415,7 +2402,7 @@ def test_parse_common_location_path(): assert expected == actual -def test_client_withDEFAULT_CLIENT_INFO(): +def test_client_with_default_client_info(): client_info = gapic_v1.client_info.ClientInfo() with mock.patch.object( diff --git a/tests/unit/gapic/logging_v2/test_metrics_service_v2.py b/tests/unit/gapic/logging_v2/test_metrics_service_v2.py index 1d42212a1..dcb87b2ab 100644 --- a/tests/unit/gapic/logging_v2/test_metrics_service_v2.py +++ b/tests/unit/gapic/logging_v2/test_metrics_service_v2.py @@ -255,20 +255,20 @@ def test_metrics_service_v2_client_client_options( # unsupported value. with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): with pytest.raises(MutualTLSChannelError): - client = client_class() + client = client_class(transport=transport_name) # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. with mock.patch.dict( os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} ): with pytest.raises(ValueError): - client = client_class() + client = client_class(transport=transport_name) # Check the case quota_project_id is provided options = client_options.ClientOptions(quota_project_id="octopus") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(transport=transport_name, client_options=options) + client = client_class(client_options=options, transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -337,7 +337,7 @@ def test_metrics_service_v2_client_mtls_env_auto( ) with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(transport=transport_name, client_options=options) + client = client_class(client_options=options, transport=transport_name) if use_client_cert_env == "false": expected_client_cert_source = None @@ -432,7 +432,7 @@ def test_metrics_service_v2_client_client_options_scopes( options = client_options.ClientOptions(scopes=["1", "2"],) with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(transport=transport_name, client_options=options) + client = client_class(client_options=options, transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -463,7 +463,7 @@ def test_metrics_service_v2_client_client_options_credentials_file( options = client_options.ClientOptions(credentials_file="credentials.json") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(transport=transport_name, client_options=options) + client = client_class(client_options=options, transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file="credentials.json", @@ -496,9 +496,8 @@ def test_metrics_service_v2_client_client_options_from_dict(): ) -def test_list_log_metrics( - transport: str = "grpc", request_type=logging_metrics.ListLogMetricsRequest -): +@pytest.mark.parametrize("request_type", [logging_metrics.ListLogMetricsRequest, dict,]) +def test_list_log_metrics(request_type, transport: str = "grpc"): client = MetricsServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -525,10 +524,6 @@ def test_list_log_metrics( assert response.next_page_token == "next_page_token_value" -def test_list_log_metrics_from_dict(): - test_list_log_metrics(request_type=dict) - - def test_list_log_metrics_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -706,8 +701,10 @@ async def test_list_log_metrics_flattened_error_async(): ) -def test_list_log_metrics_pager(): - client = MetricsServiceV2Client(credentials=ga_credentials.AnonymousCredentials,) +def test_list_log_metrics_pager(transport_name: str = "grpc"): + client = MetricsServiceV2Client( + credentials=ga_credentials.AnonymousCredentials, transport=transport_name, + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_log_metrics), "__call__") as call: @@ -744,8 +741,10 @@ def test_list_log_metrics_pager(): assert all(isinstance(i, logging_metrics.LogMetric) for i in results) -def test_list_log_metrics_pages(): - client = MetricsServiceV2Client(credentials=ga_credentials.AnonymousCredentials,) +def test_list_log_metrics_pages(transport_name: str = "grpc"): + client = MetricsServiceV2Client( + credentials=ga_credentials.AnonymousCredentials, transport=transport_name, + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_log_metrics), "__call__") as call: @@ -848,9 +847,8 @@ async def test_list_log_metrics_async_pages(): assert page_.raw_page.next_page_token == token -def test_get_log_metric( - transport: str = "grpc", request_type=logging_metrics.GetLogMetricRequest -): +@pytest.mark.parametrize("request_type", [logging_metrics.GetLogMetricRequest, dict,]) +def test_get_log_metric(request_type, transport: str = "grpc"): client = MetricsServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -885,10 +883,6 @@ def test_get_log_metric( assert response.version == logging_metrics.LogMetric.ApiVersion.V1 -def test_get_log_metric_from_dict(): - test_get_log_metric(request_type=dict) - - def test_get_log_metric_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -1074,9 +1068,10 @@ async def test_get_log_metric_flattened_error_async(): ) -def test_create_log_metric( - transport: str = "grpc", request_type=logging_metrics.CreateLogMetricRequest -): +@pytest.mark.parametrize( + "request_type", [logging_metrics.CreateLogMetricRequest, dict,] +) +def test_create_log_metric(request_type, transport: str = "grpc"): client = MetricsServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -1113,10 +1108,6 @@ def test_create_log_metric( assert response.version == logging_metrics.LogMetric.ApiVersion.V1 -def test_create_log_metric_from_dict(): - test_create_log_metric(request_type=dict) - - def test_create_log_metric_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -1328,9 +1319,10 @@ async def test_create_log_metric_flattened_error_async(): ) -def test_update_log_metric( - transport: str = "grpc", request_type=logging_metrics.UpdateLogMetricRequest -): +@pytest.mark.parametrize( + "request_type", [logging_metrics.UpdateLogMetricRequest, dict,] +) +def test_update_log_metric(request_type, transport: str = "grpc"): client = MetricsServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -1367,10 +1359,6 @@ def test_update_log_metric( assert response.version == logging_metrics.LogMetric.ApiVersion.V1 -def test_update_log_metric_from_dict(): - test_update_log_metric(request_type=dict) - - def test_update_log_metric_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -1584,9 +1572,10 @@ async def test_update_log_metric_flattened_error_async(): ) -def test_delete_log_metric( - transport: str = "grpc", request_type=logging_metrics.DeleteLogMetricRequest -): +@pytest.mark.parametrize( + "request_type", [logging_metrics.DeleteLogMetricRequest, dict,] +) +def test_delete_log_metric(request_type, transport: str = "grpc"): client = MetricsServiceV2Client( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -1612,10 +1601,6 @@ def test_delete_log_metric( assert response is None -def test_delete_log_metric_from_dict(): - test_delete_log_metric(request_type=dict) - - def test_delete_log_metric_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -2336,7 +2321,7 @@ def test_parse_common_location_path(): assert expected == actual -def test_client_withDEFAULT_CLIENT_INFO(): +def test_client_with_default_client_info(): client_info = gapic_v1.client_info.ClientInfo() with mock.patch.object( From bc8f7ac444195701795375d082a948bc2346fa2e Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Tue, 11 Jan 2022 10:35:23 -0500 Subject: [PATCH 13/34] chore(samples): Add check for tests in directory (#463) Source-Link: https://github.com/googleapis/synthtool/commit/52aef91f8d25223d9dbdb4aebd94ba8eea2101f3 Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:36a95b8f494e4674dc9eee9af98961293b51b86b3649942aac800ae6c1f796d4 Co-authored-by: Owl Bot --- .github/.OwlBot.lock.yaml | 2 +- samples/snippets/noxfile.py | 70 +++++++++++++++++++++---------------- 2 files changed, 40 insertions(+), 32 deletions(-) diff --git a/.github/.OwlBot.lock.yaml b/.github/.OwlBot.lock.yaml index f33299ddb..6b8a73b31 100644 --- a/.github/.OwlBot.lock.yaml +++ b/.github/.OwlBot.lock.yaml @@ -1,3 +1,3 @@ docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:899d5d7cc340fa8ef9d8ae1a8cfba362c6898584f779e156f25ee828ba824610 + digest: sha256:36a95b8f494e4674dc9eee9af98961293b51b86b3649942aac800ae6c1f796d4 diff --git a/samples/snippets/noxfile.py b/samples/snippets/noxfile.py index 93a9122cc..3bbef5d54 100644 --- a/samples/snippets/noxfile.py +++ b/samples/snippets/noxfile.py @@ -14,6 +14,7 @@ from __future__ import print_function +import glob import os from pathlib import Path import sys @@ -184,37 +185,44 @@ def blacken(session: nox.sessions.Session) -> None: def _session_tests( session: nox.sessions.Session, post_install: Callable = None ) -> None: - if TEST_CONFIG["pip_version_override"]: - pip_version = TEST_CONFIG["pip_version_override"] - session.install(f"pip=={pip_version}") - """Runs py.test for a particular project.""" - if os.path.exists("requirements.txt"): - if os.path.exists("constraints.txt"): - session.install("-r", "requirements.txt", "-c", "constraints.txt") - else: - session.install("-r", "requirements.txt") - - if os.path.exists("requirements-test.txt"): - if os.path.exists("constraints-test.txt"): - session.install("-r", "requirements-test.txt", "-c", "constraints-test.txt") - else: - session.install("-r", "requirements-test.txt") - - if INSTALL_LIBRARY_FROM_SOURCE: - session.install("-e", _get_repo_root()) - - if post_install: - post_install(session) - - session.run( - "pytest", - *(PYTEST_COMMON_ARGS + session.posargs), - # Pytest will return 5 when no tests are collected. This can happen - # on travis where slow and flaky tests are excluded. - # See http://doc.pytest.org/en/latest/_modules/_pytest/main.html - success_codes=[0, 5], - env=get_pytest_env_vars(), - ) + # check for presence of tests + test_list = glob.glob("*_test.py") + glob.glob("test_*.py") + if len(test_list) == 0: + print("No tests found, skipping directory.") + else: + if TEST_CONFIG["pip_version_override"]: + pip_version = TEST_CONFIG["pip_version_override"] + session.install(f"pip=={pip_version}") + """Runs py.test for a particular project.""" + if os.path.exists("requirements.txt"): + if os.path.exists("constraints.txt"): + session.install("-r", "requirements.txt", "-c", "constraints.txt") + else: + session.install("-r", "requirements.txt") + + if os.path.exists("requirements-test.txt"): + if os.path.exists("constraints-test.txt"): + session.install( + "-r", "requirements-test.txt", "-c", "constraints-test.txt" + ) + else: + session.install("-r", "requirements-test.txt") + + if INSTALL_LIBRARY_FROM_SOURCE: + session.install("-e", _get_repo_root()) + + if post_install: + post_install(session) + + session.run( + "pytest", + *(PYTEST_COMMON_ARGS + session.posargs), + # Pytest will return 5 when no tests are collected. This can happen + # on travis where slow and flaky tests are excluded. + # See http://doc.pytest.org/en/latest/_modules/_pytest/main.html + success_codes=[0, 5], + env=get_pytest_env_vars(), + ) @nox.session(python=ALL_VERSIONS) From 50ff41babc99ed38baa16eb69dc12bd92054b98e Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Fri, 14 Jan 2022 12:03:12 -0500 Subject: [PATCH 14/34] build: switch to release-please for tagging (#464) Source-Link: https://github.com/googleapis/synthtool/commit/f8077d237e0df2cb0066dfc6e09fc41e1c59646a Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:dfa9b663b32de8b5b327e32c1da665a80de48876558dd58091d8160c60ad7355 Co-authored-by: Owl Bot --- .github/.OwlBot.lock.yaml | 2 +- .github/release-please.yml | 1 + .github/release-trigger.yml | 1 + 3 files changed, 3 insertions(+), 1 deletion(-) create mode 100644 .github/release-trigger.yml diff --git a/.github/.OwlBot.lock.yaml b/.github/.OwlBot.lock.yaml index 6b8a73b31..ff5126c18 100644 --- a/.github/.OwlBot.lock.yaml +++ b/.github/.OwlBot.lock.yaml @@ -1,3 +1,3 @@ docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:36a95b8f494e4674dc9eee9af98961293b51b86b3649942aac800ae6c1f796d4 + digest: sha256:dfa9b663b32de8b5b327e32c1da665a80de48876558dd58091d8160c60ad7355 diff --git a/.github/release-please.yml b/.github/release-please.yml index 4507ad059..466597e5b 100644 --- a/.github/release-please.yml +++ b/.github/release-please.yml @@ -1 +1,2 @@ releaseType: python +handleGHRelease: true diff --git a/.github/release-trigger.yml b/.github/release-trigger.yml new file mode 100644 index 000000000..d4ca94189 --- /dev/null +++ b/.github/release-trigger.yml @@ -0,0 +1 @@ +enabled: true From 614e2d30fcdf0f3e54725697704ed265318845a4 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Fri, 14 Jan 2022 22:24:14 +0100 Subject: [PATCH 15/34] chore(deps): update dependency google-cloud-storage to v1.44.0 (#460) Co-authored-by: Anthonios Partheniou --- samples/snippets/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/samples/snippets/requirements.txt b/samples/snippets/requirements.txt index fa8f6386f..283735f7a 100644 --- a/samples/snippets/requirements.txt +++ b/samples/snippets/requirements.txt @@ -1,4 +1,4 @@ google-cloud-logging==2.7.0 google-cloud-bigquery==2.31.0 -google-cloud-storage==1.43.0 +google-cloud-storage==1.44.0 google-cloud-pubsub==2.9.0 From 78263fd4c9e783aed84cf7aaedb9e1f9f77e621e Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Sun, 16 Jan 2022 15:26:12 +0100 Subject: [PATCH 16/34] chore(deps): update all dependencies (#466) --- samples/snippets/requirements.txt | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/samples/snippets/requirements.txt b/samples/snippets/requirements.txt index 283735f7a..d75e274c2 100644 --- a/samples/snippets/requirements.txt +++ b/samples/snippets/requirements.txt @@ -1,4 +1,4 @@ google-cloud-logging==2.7.0 -google-cloud-bigquery==2.31.0 -google-cloud-storage==1.44.0 +google-cloud-bigquery==2.32.0 +google-cloud-storage==2.0.0 google-cloud-pubsub==2.9.0 From 8d4ea6f0335673b419be8bb72c8cbe1b08196d82 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Sun, 16 Jan 2022 10:28:20 -0500 Subject: [PATCH 17/34] chore(python): update release.sh to use keystore (#465) Source-Link: https://github.com/googleapis/synthtool/commit/69fda12e2994f0b595a397e8bb6e3e9f380524eb Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:ae600f36b6bc972b368367b6f83a1d91ec2c82a4a116b383d67d547c56fe6de3 Co-authored-by: Owl Bot Co-authored-by: Anthonios Partheniou --- .github/.OwlBot.lock.yaml | 2 +- .kokoro/release.sh | 2 +- .kokoro/release/common.cfg | 12 +++++++++++- 3 files changed, 13 insertions(+), 3 deletions(-) diff --git a/.github/.OwlBot.lock.yaml b/.github/.OwlBot.lock.yaml index ff5126c18..eecb84c21 100644 --- a/.github/.OwlBot.lock.yaml +++ b/.github/.OwlBot.lock.yaml @@ -1,3 +1,3 @@ docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:dfa9b663b32de8b5b327e32c1da665a80de48876558dd58091d8160c60ad7355 + digest: sha256:ae600f36b6bc972b368367b6f83a1d91ec2c82a4a116b383d67d547c56fe6de3 diff --git a/.kokoro/release.sh b/.kokoro/release.sh index f8994b034..8a4d1f432 100755 --- a/.kokoro/release.sh +++ b/.kokoro/release.sh @@ -26,7 +26,7 @@ python3 -m pip install --upgrade twine wheel setuptools export PYTHONUNBUFFERED=1 # Move into the package, build the distribution and upload. -TWINE_PASSWORD=$(cat "${KOKORO_GFILE_DIR}/secret_manager/google-cloud-pypi-token") +TWINE_PASSWORD=$(cat "${KOKORO_KEYSTORE_DIR}/73713_google-cloud-pypi-token-keystore-1") cd github/python-logging python3 setup.py sdist bdist_wheel twine upload --username __token__ --password "${TWINE_PASSWORD}" dist/* diff --git a/.kokoro/release/common.cfg b/.kokoro/release/common.cfg index e0012bf9c..637885e85 100644 --- a/.kokoro/release/common.cfg +++ b/.kokoro/release/common.cfg @@ -23,8 +23,18 @@ env_vars: { value: "github/python-logging/.kokoro/release.sh" } +# Fetch PyPI password +before_action { + fetch_keystore { + keystore_resource { + keystore_config_id: 73713 + keyname: "google-cloud-pypi-token-keystore-1" + } + } +} + # Tokens needed to report release status back to GitHub env_vars: { key: "SECRET_MANAGER_KEYS" - value: "releasetool-publish-reporter-app,releasetool-publish-reporter-googleapis-installation,releasetool-publish-reporter-pem,google-cloud-pypi-token" + value: "releasetool-publish-reporter-app,releasetool-publish-reporter-googleapis-installation,releasetool-publish-reporter-pem" } From 926e295a642480c9851bffe5bf2a5ef3c8053516 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Tue, 18 Jan 2022 20:28:51 -0500 Subject: [PATCH 18/34] chore(python): Noxfile recognizes that tests can live in a folder (#468) Source-Link: https://github.com/googleapis/synthtool/commit/4760d8dce1351d93658cb11d02a1b7ceb23ae5d7 Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:f0e4b51deef56bed74d3e2359c583fc104a8d6367da3984fc5c66938db738828 Co-authored-by: Owl Bot --- .github/.OwlBot.lock.yaml | 2 +- samples/snippets/noxfile.py | 1 + 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/.github/.OwlBot.lock.yaml b/.github/.OwlBot.lock.yaml index eecb84c21..52d79c11f 100644 --- a/.github/.OwlBot.lock.yaml +++ b/.github/.OwlBot.lock.yaml @@ -1,3 +1,3 @@ docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:ae600f36b6bc972b368367b6f83a1d91ec2c82a4a116b383d67d547c56fe6de3 + digest: sha256:f0e4b51deef56bed74d3e2359c583fc104a8d6367da3984fc5c66938db738828 diff --git a/samples/snippets/noxfile.py b/samples/snippets/noxfile.py index 3bbef5d54..20cdfc620 100644 --- a/samples/snippets/noxfile.py +++ b/samples/snippets/noxfile.py @@ -187,6 +187,7 @@ def _session_tests( ) -> None: # check for presence of tests test_list = glob.glob("*_test.py") + glob.glob("test_*.py") + test_list.extend(glob.glob("tests")) if len(test_list) == 0: print("No tests found, skipping directory.") else: From 6dfee4ea8f1a3d195a955e399b0bcc205f9bb2b4 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Thu, 20 Jan 2022 12:32:39 -0500 Subject: [PATCH 19/34] chore(python): exclude templated GH action workflows (#470) * ci(python): run lint / unit tests / docs as GH actions Source-Link: https://github.com/googleapis/synthtool/commit/57be0cdb0b94e1669cee0ca38d790de1dfdbcd44 Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:ed1f9983d5a935a89fe8085e8bb97d94e41015252c5b6c9771257cf8624367e6 * exclude templated github actions Co-authored-by: Owl Bot Co-authored-by: Anthonios Partheniou --- .github/.OwlBot.lock.yaml | 15 ++++++++++++++- owlbot.py | 7 ++++++- 2 files changed, 20 insertions(+), 2 deletions(-) diff --git a/.github/.OwlBot.lock.yaml b/.github/.OwlBot.lock.yaml index 52d79c11f..8cb43804d 100644 --- a/.github/.OwlBot.lock.yaml +++ b/.github/.OwlBot.lock.yaml @@ -1,3 +1,16 @@ +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:f0e4b51deef56bed74d3e2359c583fc104a8d6367da3984fc5c66938db738828 + digest: sha256:ed1f9983d5a935a89fe8085e8bb97d94e41015252c5b6c9771257cf8624367e6 diff --git a/owlbot.py b/owlbot.py index ad8e32d4c..b266ed13f 100644 --- a/owlbot.py +++ b/owlbot.py @@ -61,7 +61,12 @@ unit_test_external_dependencies=["flask", "webob", "django"], samples=True, ) -s.move(templated_files, excludes=[".coveragerc", "docs/multiprocessing.rst"]) +s.move(templated_files, + excludes=[ + ".coveragerc", + "docs/multiprocessing.rst", + ".github/workflows", # exclude gh actions as credentials are needed for tests + ]) # adjust .trampolinerc for environment tests s.replace( From 7ab93045c8cb0edc2161a3243192d8baa48d9c09 Mon Sep 17 00:00:00 2001 From: Bu Sun Kim <8822365+busunkim96@users.noreply.github.com> Date: Mon, 24 Jan 2022 13:08:18 -0700 Subject: [PATCH 20/34] chore: make samples 3.6 check optional (#471) --- .github/sync-repo-settings.yaml | 1 - 1 file changed, 1 deletion(-) diff --git a/.github/sync-repo-settings.yaml b/.github/sync-repo-settings.yaml index 3e98ae70f..37438d33d 100644 --- a/.github/sync-repo-settings.yaml +++ b/.github/sync-repo-settings.yaml @@ -10,6 +10,5 @@ branchProtectionRules: - 'Kokoro' - 'cla/google' - 'Samples - Lint' - - 'Samples - Python 3.6' - 'Samples - Python 3.7' - 'Samples - Python 3.8' From 81ca8c616acb988be1fbecfc2a0b1a5b39280149 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Tue, 25 Jan 2022 12:23:46 -0500 Subject: [PATCH 21/34] feat: add api key support (#472) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * chore: upgrade gapic-generator-java, gax-java and gapic-generator-python PiperOrigin-RevId: 423842556 Source-Link: https://github.com/googleapis/googleapis/commit/a616ca08f4b1416abbac7bc5dd6d61c791756a81 Source-Link: https://github.com/googleapis/googleapis-gen/commit/29b938c58c1e51d019f2ee539d55dc0a3c86a905 Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiMjliOTM4YzU4YzFlNTFkMDE5ZjJlZTUzOWQ1NWRjMGEzYzg2YTkwNSJ9 * 🦉 Updates from OwlBot See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md Co-authored-by: Owl Bot --- .../config_service_v2/async_client.py | 38 +++++- .../services/config_service_v2/client.py | 127 +++++++++++------ .../logging_service_v2/async_client.py | 37 +++++ .../services/logging_service_v2/client.py | 127 +++++++++++------ .../metrics_service_v2/async_client.py | 38 +++++- .../services/metrics_service_v2/client.py | 127 +++++++++++------ .../logging_v2/test_config_service_v2.py | 128 ++++++++++++++++++ .../logging_v2/test_logging_service_v2.py | 128 ++++++++++++++++++ .../logging_v2/test_metrics_service_v2.py | 128 ++++++++++++++++++ 9 files changed, 747 insertions(+), 131 deletions(-) diff --git a/google/cloud/logging_v2/services/config_service_v2/async_client.py b/google/cloud/logging_v2/services/config_service_v2/async_client.py index 2be78b15b..664f10ada 100644 --- a/google/cloud/logging_v2/services/config_service_v2/async_client.py +++ b/google/cloud/logging_v2/services/config_service_v2/async_client.py @@ -16,7 +16,7 @@ from collections import OrderedDict import functools import re -from typing import Dict, Sequence, Tuple, Type, Union +from typing import Dict, Optional, Sequence, Tuple, Type, Union import pkg_resources from google.api_core.client_options import ClientOptions @@ -120,6 +120,42 @@ def from_service_account_file(cls, filename: str, *args, **kwargs): from_service_account_json = from_service_account_file + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variabel is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + return ConfigServiceV2Client.get_mtls_endpoint_and_cert_source(client_options) # type: ignore + @property def transport(self) -> ConfigServiceV2Transport: """Returns the transport used by the client instance. diff --git a/google/cloud/logging_v2/services/config_service_v2/client.py b/google/cloud/logging_v2/services/config_service_v2/client.py index 0de49b68f..f4a1be57c 100644 --- a/google/cloud/logging_v2/services/config_service_v2/client.py +++ b/google/cloud/logging_v2/services/config_service_v2/client.py @@ -287,6 +287,73 @@ def parse_common_location_path(path: str) -> Dict[str, str]: m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) return m.groupdict() if m else {} + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[client_options_lib.ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variabel is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + def __init__( self, *, @@ -337,57 +404,22 @@ def __init__( if client_options is None: client_options = client_options_lib.ClientOptions() - # Create SSL credentials for mutual TLS if needed. - if os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") not in ( - "true", - "false", - ): - raise ValueError( - "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" - ) - use_client_cert = ( - os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") == "true" + api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source( + client_options ) - client_cert_source_func = None - is_mtls = False - if use_client_cert: - if client_options.client_cert_source: - is_mtls = True - client_cert_source_func = client_options.client_cert_source - else: - is_mtls = mtls.has_default_client_cert_source() - if is_mtls: - client_cert_source_func = mtls.default_client_cert_source() - else: - client_cert_source_func = None - - # Figure out which api endpoint to use. - if client_options.api_endpoint is not None: - api_endpoint = client_options.api_endpoint - else: - use_mtls_env = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") - if use_mtls_env == "never": - api_endpoint = self.DEFAULT_ENDPOINT - elif use_mtls_env == "always": - api_endpoint = self.DEFAULT_MTLS_ENDPOINT - elif use_mtls_env == "auto": - if is_mtls: - api_endpoint = self.DEFAULT_MTLS_ENDPOINT - else: - api_endpoint = self.DEFAULT_ENDPOINT - else: - raise MutualTLSChannelError( - "Unsupported GOOGLE_API_USE_MTLS_ENDPOINT value. Accepted " - "values: never, auto, always" - ) + api_key_value = getattr(client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError( + "client_options.api_key and credentials are mutually exclusive" + ) # Save or instantiate the transport. # Ordinarily, we provide the transport, but allowing a custom transport # instance provides an extensibility point for unusual situations. if isinstance(transport, ConfigServiceV2Transport): # transport is a ConfigServiceV2Transport instance. - if credentials or client_options.credentials_file: + if credentials or client_options.credentials_file or api_key_value: raise ValueError( "When providing a transport instance, " "provide its credentials directly." @@ -399,6 +431,15 @@ def __init__( ) self._transport = transport else: + import google.auth._default # type: ignore + + if api_key_value and hasattr( + google.auth._default, "get_api_key_credentials" + ): + credentials = google.auth._default.get_api_key_credentials( + api_key_value + ) + Transport = type(self).get_transport_class(transport) self._transport = Transport( credentials=credentials, diff --git a/google/cloud/logging_v2/services/logging_service_v2/async_client.py b/google/cloud/logging_v2/services/logging_service_v2/async_client.py index 325fe7193..e14453424 100644 --- a/google/cloud/logging_v2/services/logging_service_v2/async_client.py +++ b/google/cloud/logging_v2/services/logging_service_v2/async_client.py @@ -18,6 +18,7 @@ import re from typing import ( Dict, + Optional, AsyncIterable, Awaitable, AsyncIterator, @@ -117,6 +118,42 @@ def from_service_account_file(cls, filename: str, *args, **kwargs): from_service_account_json = from_service_account_file + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variabel is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + return LoggingServiceV2Client.get_mtls_endpoint_and_cert_source(client_options) # type: ignore + @property def transport(self) -> LoggingServiceV2Transport: """Returns the transport used by the client instance. diff --git a/google/cloud/logging_v2/services/logging_service_v2/client.py b/google/cloud/logging_v2/services/logging_service_v2/client.py index 44ec1a85f..5815c8d19 100644 --- a/google/cloud/logging_v2/services/logging_service_v2/client.py +++ b/google/cloud/logging_v2/services/logging_service_v2/client.py @@ -231,6 +231,73 @@ def parse_common_location_path(path: str) -> Dict[str, str]: m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) return m.groupdict() if m else {} + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[client_options_lib.ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variabel is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + def __init__( self, *, @@ -281,57 +348,22 @@ def __init__( if client_options is None: client_options = client_options_lib.ClientOptions() - # Create SSL credentials for mutual TLS if needed. - if os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") not in ( - "true", - "false", - ): - raise ValueError( - "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" - ) - use_client_cert = ( - os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") == "true" + api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source( + client_options ) - client_cert_source_func = None - is_mtls = False - if use_client_cert: - if client_options.client_cert_source: - is_mtls = True - client_cert_source_func = client_options.client_cert_source - else: - is_mtls = mtls.has_default_client_cert_source() - if is_mtls: - client_cert_source_func = mtls.default_client_cert_source() - else: - client_cert_source_func = None - - # Figure out which api endpoint to use. - if client_options.api_endpoint is not None: - api_endpoint = client_options.api_endpoint - else: - use_mtls_env = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") - if use_mtls_env == "never": - api_endpoint = self.DEFAULT_ENDPOINT - elif use_mtls_env == "always": - api_endpoint = self.DEFAULT_MTLS_ENDPOINT - elif use_mtls_env == "auto": - if is_mtls: - api_endpoint = self.DEFAULT_MTLS_ENDPOINT - else: - api_endpoint = self.DEFAULT_ENDPOINT - else: - raise MutualTLSChannelError( - "Unsupported GOOGLE_API_USE_MTLS_ENDPOINT value. Accepted " - "values: never, auto, always" - ) + api_key_value = getattr(client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError( + "client_options.api_key and credentials are mutually exclusive" + ) # Save or instantiate the transport. # Ordinarily, we provide the transport, but allowing a custom transport # instance provides an extensibility point for unusual situations. if isinstance(transport, LoggingServiceV2Transport): # transport is a LoggingServiceV2Transport instance. - if credentials or client_options.credentials_file: + if credentials or client_options.credentials_file or api_key_value: raise ValueError( "When providing a transport instance, " "provide its credentials directly." @@ -343,6 +375,15 @@ def __init__( ) self._transport = transport else: + import google.auth._default # type: ignore + + if api_key_value and hasattr( + google.auth._default, "get_api_key_credentials" + ): + credentials = google.auth._default.get_api_key_credentials( + api_key_value + ) + Transport = type(self).get_transport_class(transport) self._transport = Transport( credentials=credentials, diff --git a/google/cloud/logging_v2/services/metrics_service_v2/async_client.py b/google/cloud/logging_v2/services/metrics_service_v2/async_client.py index f034fd9f5..eb7321ab7 100644 --- a/google/cloud/logging_v2/services/metrics_service_v2/async_client.py +++ b/google/cloud/logging_v2/services/metrics_service_v2/async_client.py @@ -16,7 +16,7 @@ from collections import OrderedDict import functools import re -from typing import Dict, Sequence, Tuple, Type, Union +from typing import Dict, Optional, Sequence, Tuple, Type, Union import pkg_resources from google.api_core.client_options import ClientOptions @@ -109,6 +109,42 @@ def from_service_account_file(cls, filename: str, *args, **kwargs): from_service_account_json = from_service_account_file + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variabel is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + return MetricsServiceV2Client.get_mtls_endpoint_and_cert_source(client_options) # type: ignore + @property def transport(self) -> MetricsServiceV2Transport: """Returns the transport used by the client instance. diff --git a/google/cloud/logging_v2/services/metrics_service_v2/client.py b/google/cloud/logging_v2/services/metrics_service_v2/client.py index 2f339a130..ced653a51 100644 --- a/google/cloud/logging_v2/services/metrics_service_v2/client.py +++ b/google/cloud/logging_v2/services/metrics_service_v2/client.py @@ -234,6 +234,73 @@ def parse_common_location_path(path: str) -> Dict[str, str]: m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) return m.groupdict() if m else {} + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[client_options_lib.ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variabel is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + def __init__( self, *, @@ -284,57 +351,22 @@ def __init__( if client_options is None: client_options = client_options_lib.ClientOptions() - # Create SSL credentials for mutual TLS if needed. - if os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") not in ( - "true", - "false", - ): - raise ValueError( - "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" - ) - use_client_cert = ( - os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") == "true" + api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source( + client_options ) - client_cert_source_func = None - is_mtls = False - if use_client_cert: - if client_options.client_cert_source: - is_mtls = True - client_cert_source_func = client_options.client_cert_source - else: - is_mtls = mtls.has_default_client_cert_source() - if is_mtls: - client_cert_source_func = mtls.default_client_cert_source() - else: - client_cert_source_func = None - - # Figure out which api endpoint to use. - if client_options.api_endpoint is not None: - api_endpoint = client_options.api_endpoint - else: - use_mtls_env = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") - if use_mtls_env == "never": - api_endpoint = self.DEFAULT_ENDPOINT - elif use_mtls_env == "always": - api_endpoint = self.DEFAULT_MTLS_ENDPOINT - elif use_mtls_env == "auto": - if is_mtls: - api_endpoint = self.DEFAULT_MTLS_ENDPOINT - else: - api_endpoint = self.DEFAULT_ENDPOINT - else: - raise MutualTLSChannelError( - "Unsupported GOOGLE_API_USE_MTLS_ENDPOINT value. Accepted " - "values: never, auto, always" - ) + api_key_value = getattr(client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError( + "client_options.api_key and credentials are mutually exclusive" + ) # Save or instantiate the transport. # Ordinarily, we provide the transport, but allowing a custom transport # instance provides an extensibility point for unusual situations. if isinstance(transport, MetricsServiceV2Transport): # transport is a MetricsServiceV2Transport instance. - if credentials or client_options.credentials_file: + if credentials or client_options.credentials_file or api_key_value: raise ValueError( "When providing a transport instance, " "provide its credentials directly." @@ -346,6 +378,15 @@ def __init__( ) self._transport = transport else: + import google.auth._default # type: ignore + + if api_key_value and hasattr( + google.auth._default, "get_api_key_credentials" + ): + credentials = google.auth._default.get_api_key_credentials( + api_key_value + ) + Transport = type(self).get_transport_class(transport) self._transport = Transport( credentials=credentials, diff --git a/tests/unit/gapic/logging_v2/test_config_service_v2.py b/tests/unit/gapic/logging_v2/test_config_service_v2.py index 5a82cec81..efb46eaad 100644 --- a/tests/unit/gapic/logging_v2/test_config_service_v2.py +++ b/tests/unit/gapic/logging_v2/test_config_service_v2.py @@ -409,6 +409,87 @@ def test_config_service_v2_client_mtls_env_auto( ) +@pytest.mark.parametrize( + "client_class", [ConfigServiceV2Client, ConfigServiceV2AsyncClient] +) +@mock.patch.object( + ConfigServiceV2Client, + "DEFAULT_ENDPOINT", + modify_default_endpoint(ConfigServiceV2Client), +) +@mock.patch.object( + ConfigServiceV2AsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(ConfigServiceV2AsyncClient), +) +def test_config_service_v2_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_client_cert_source, + ): + ( + api_endpoint, + cert_source, + ) = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + @pytest.mark.parametrize( "client_class,transport_class,transport_name", [ @@ -5294,6 +5375,23 @@ def test_credentials_transport_error(): transport=transport, ) + # It is an error to provide an api_key and a transport instance. + transport = transports.ConfigServiceV2GrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = ConfigServiceV2Client(client_options=options, transport=transport,) + + # It is an error to provide an api_key and a credential. + options = mock.Mock() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = ConfigServiceV2Client( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + # It is an error to provide scopes and a transport instance. transport = transports.ConfigServiceV2GrpcTransport( credentials=ga_credentials.AnonymousCredentials(), @@ -5981,3 +6079,33 @@ def test_client_ctx(): with client: pass close.assert_called() + + +@pytest.mark.parametrize( + "client_class,transport_class", + [ + (ConfigServiceV2Client, transports.ConfigServiceV2GrpcTransport), + (ConfigServiceV2AsyncClient, transports.ConfigServiceV2GrpcAsyncIOTransport), + ], +) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + ) diff --git a/tests/unit/gapic/logging_v2/test_logging_service_v2.py b/tests/unit/gapic/logging_v2/test_logging_service_v2.py index f73ef775c..9f11a0210 100644 --- a/tests/unit/gapic/logging_v2/test_logging_service_v2.py +++ b/tests/unit/gapic/logging_v2/test_logging_service_v2.py @@ -416,6 +416,87 @@ def test_logging_service_v2_client_mtls_env_auto( ) +@pytest.mark.parametrize( + "client_class", [LoggingServiceV2Client, LoggingServiceV2AsyncClient] +) +@mock.patch.object( + LoggingServiceV2Client, + "DEFAULT_ENDPOINT", + modify_default_endpoint(LoggingServiceV2Client), +) +@mock.patch.object( + LoggingServiceV2AsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(LoggingServiceV2AsyncClient), +) +def test_logging_service_v2_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_client_cert_source, + ): + ( + api_endpoint, + cert_source, + ) = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + @pytest.mark.parametrize( "client_class,transport_class,transport_name", [ @@ -1883,6 +1964,23 @@ def test_credentials_transport_error(): transport=transport, ) + # It is an error to provide an api_key and a transport instance. + transport = transports.LoggingServiceV2GrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = LoggingServiceV2Client(client_options=options, transport=transport,) + + # It is an error to provide an api_key and a credential. + options = mock.Mock() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = LoggingServiceV2Client( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + # It is an error to provide scopes and a transport instance. transport = transports.LoggingServiceV2GrpcTransport( credentials=ga_credentials.AnonymousCredentials(), @@ -2467,3 +2565,33 @@ def test_client_ctx(): with client: pass close.assert_called() + + +@pytest.mark.parametrize( + "client_class,transport_class", + [ + (LoggingServiceV2Client, transports.LoggingServiceV2GrpcTransport), + (LoggingServiceV2AsyncClient, transports.LoggingServiceV2GrpcAsyncIOTransport), + ], +) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + ) diff --git a/tests/unit/gapic/logging_v2/test_metrics_service_v2.py b/tests/unit/gapic/logging_v2/test_metrics_service_v2.py index dcb87b2ab..97a2c4a99 100644 --- a/tests/unit/gapic/logging_v2/test_metrics_service_v2.py +++ b/tests/unit/gapic/logging_v2/test_metrics_service_v2.py @@ -414,6 +414,87 @@ def test_metrics_service_v2_client_mtls_env_auto( ) +@pytest.mark.parametrize( + "client_class", [MetricsServiceV2Client, MetricsServiceV2AsyncClient] +) +@mock.patch.object( + MetricsServiceV2Client, + "DEFAULT_ENDPOINT", + modify_default_endpoint(MetricsServiceV2Client), +) +@mock.patch.object( + MetricsServiceV2AsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(MetricsServiceV2AsyncClient), +) +def test_metrics_service_v2_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_client_cert_source, + ): + ( + api_endpoint, + cert_source, + ) = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + @pytest.mark.parametrize( "client_class,transport_class,transport_name", [ @@ -1801,6 +1882,23 @@ def test_credentials_transport_error(): transport=transport, ) + # It is an error to provide an api_key and a transport instance. + transport = transports.MetricsServiceV2GrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = MetricsServiceV2Client(client_options=options, transport=transport,) + + # It is an error to provide an api_key and a credential. + options = mock.Mock() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = MetricsServiceV2Client( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + # It is an error to provide scopes and a transport instance. transport = transports.MetricsServiceV2GrpcTransport( credentials=ga_credentials.AnonymousCredentials(), @@ -2386,3 +2484,33 @@ def test_client_ctx(): with client: pass close.assert_called() + + +@pytest.mark.parametrize( + "client_class,transport_class", + [ + (MetricsServiceV2Client, transports.MetricsServiceV2GrpcTransport), + (MetricsServiceV2AsyncClient, transports.MetricsServiceV2GrpcAsyncIOTransport), + ], +) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + ) From e3cac888d40bf67af11e57b74615b0c3b8e8aa3e Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Wed, 26 Jan 2022 16:44:10 -0800 Subject: [PATCH 22/34] chore!: deprecate AppEngineHandler and ContainerEngineHandler (#310) --- google/cloud/logging_v2/client.py | 4 +-- google/cloud/logging_v2/handlers/_helpers.py | 21 +++++---------- .../cloud/logging_v2/handlers/app_engine.py | 10 ++++++- .../logging_v2/handlers/container_engine.py | 8 ++++++ google/cloud/logging_v2/handlers/handlers.py | 27 ++++++++++--------- tests/environment | 2 +- tests/unit/test_client.py | 8 +++--- 7 files changed, 44 insertions(+), 36 deletions(-) diff --git a/google/cloud/logging_v2/client.py b/google/cloud/logging_v2/client.py index e9b432eb2..c9bbe1fe0 100644 --- a/google/cloud/logging_v2/client.py +++ b/google/cloud/logging_v2/client.py @@ -35,8 +35,6 @@ from google.cloud.logging_v2._http import _MetricsAPI as JSONMetricsAPI from google.cloud.logging_v2._http import _SinksAPI as JSONSinksAPI from google.cloud.logging_v2.handlers import CloudLoggingHandler -from google.cloud.logging_v2.handlers import AppEngineHandler -from google.cloud.logging_v2.handlers import ContainerEngineHandler from google.cloud.logging_v2.handlers import StructuredLogHandler from google.cloud.logging_v2.handlers import setup_logging from google.cloud.logging_v2.handlers.handlers import EXCLUDED_LOGGER_DEFAULTS @@ -352,7 +350,7 @@ def get_default_handler(self, **kw): if isinstance(monitored_resource, Resource): if monitored_resource.type == _GAE_RESOURCE_TYPE: - return AppEngineHandler(self, **kw) + return CloudLoggingHandler(self, resource=monitored_resource, **kw) elif monitored_resource.type == _GKE_RESOURCE_TYPE: return ContainerEngineHandler(**kw) elif monitored_resource.type == _GCF_RESOURCE_TYPE: diff --git a/google/cloud/logging_v2/handlers/_helpers.py b/google/cloud/logging_v2/handlers/_helpers.py index 931b7a2f5..f5dfb7c55 100644 --- a/google/cloud/logging_v2/handlers/_helpers.py +++ b/google/cloud/logging_v2/handlers/_helpers.py @@ -17,6 +17,7 @@ import math import json import re +import warnings try: import flask @@ -39,6 +40,8 @@ def format_stackdriver_json(record, message): Returns: str: JSON str to be written to the log file. + + DEPRECATED: use StructuredLogHandler to write formatted logs to standard out instead. """ subsecond, second = math.modf(record.created) @@ -48,7 +51,10 @@ def format_stackdriver_json(record, message): "thread": record.thread, "severity": record.levelname, } - + warnings.warn( + "format_stackdriver_json is deprecated. Use StructuredLogHandler instead.", + DeprecationWarning, + ) return json.dumps(payload, ensure_ascii=False) @@ -68,10 +74,7 @@ def get_request_data_from_flask(): http_request = { "requestMethod": flask.request.method, "requestUrl": flask.request.url, - "requestSize": flask.request.content_length, "userAgent": flask.request.user_agent.string, - "remoteIp": flask.request.remote_addr, - "referer": flask.request.referrer, "protocol": flask.request.environ.get(_PROTOCOL_HEADER), } @@ -96,21 +99,11 @@ def get_request_data_from_django(): if request is None: return None, None, None - # convert content_length to int if it exists - content_length = None - try: - content_length = int(request.META.get(_DJANGO_CONTENT_LENGTH)) - except (ValueError, TypeError): - content_length = None - # build http_request http_request = { "requestMethod": request.method, "requestUrl": request.build_absolute_uri(), - "requestSize": content_length, "userAgent": request.META.get(_DJANGO_USERAGENT_HEADER), - "remoteIp": request.META.get(_DJANGO_REMOTE_ADDR_HEADER), - "referer": request.META.get(_DJANGO_REFERER_HEADER), "protocol": request.META.get(_PROTOCOL_HEADER), } diff --git a/google/cloud/logging_v2/handlers/app_engine.py b/google/cloud/logging_v2/handlers/app_engine.py index 874a9d608..abd16664f 100644 --- a/google/cloud/logging_v2/handlers/app_engine.py +++ b/google/cloud/logging_v2/handlers/app_engine.py @@ -20,6 +20,7 @@ import logging import os +import warnings from google.cloud.logging_v2.handlers._helpers import get_request_data from google.cloud.logging_v2.handlers._monitored_resources import ( @@ -36,9 +37,14 @@ _TRACE_ID_LABEL = "appengine.googleapis.com/trace_id" +_DEPRECATION_MSG = "AppEngineHandler is deprecated. Use CloudLoggingHandler instead." + class AppEngineHandler(logging.StreamHandler): - """A logging handler that sends App Engine-formatted logs to Stackdriver.""" + """A logging handler that sends App Engine-formatted logs to Stackdriver. + + DEPRECATED: use CloudLoggingHandler instead. + """ def __init__( self, @@ -71,6 +77,8 @@ def __init__( self.version_id = os.environ.get(_GAE_VERSION_ENV, "") self.resource = self.get_gae_resource() + warnings.warn(_DEPRECATION_MSG, DeprecationWarning) + def get_gae_resource(self): """Return the GAE resource using the environment variables. diff --git a/google/cloud/logging_v2/handlers/container_engine.py b/google/cloud/logging_v2/handlers/container_engine.py index a4bd0f848..3842111b4 100644 --- a/google/cloud/logging_v2/handlers/container_engine.py +++ b/google/cloud/logging_v2/handlers/container_engine.py @@ -20,15 +20,22 @@ """ import logging.handlers +import warnings from google.cloud.logging_v2.handlers._helpers import format_stackdriver_json +_DEPRECATION_MSG = ( + "ContainerEngineHandler is deprecated. Use StructuredLogHandler instead." +) + class ContainerEngineHandler(logging.StreamHandler): """Handler to format log messages the format expected by GKE fluent. This handler is written to format messages for the Google Container Engine (GKE) fluentd plugin, so that metadata such as log level are properly set. + + DEPRECATED: use StructuredLogHandler to write formatted logs to standard out instead. """ def __init__(self, *, name=None, stream=None): @@ -40,6 +47,7 @@ def __init__(self, *, name=None, stream=None): """ super(ContainerEngineHandler, self).__init__(stream=stream) self.name = name + warnings.warn(_DEPRECATION_MSG, DeprecationWarning) def format(self, record): """Format the message into JSON expected by fluentd. diff --git a/google/cloud/logging_v2/handlers/handlers.py b/google/cloud/logging_v2/handlers/handlers.py index b3b787fe2..46922d54f 100644 --- a/google/cloud/logging_v2/handlers/handlers.py +++ b/google/cloud/logging_v2/handlers/handlers.py @@ -33,8 +33,15 @@ "werkzeug", ) +"""These environments require us to remove extra handlers on setup""" _CLEAR_HANDLER_RESOURCE_TYPES = ("gae_app", "cloud_function") +"""Extra trace label to be added on App Engine environments""" +_GAE_TRACE_ID_LABEL = "appengine.googleapis.com/trace_id" + +"""Resource name for App Engine environments""" +_GAE_RESOURCE_TYPE = "gae_app" + class CloudLoggingFilter(logging.Filter): """Python standard ``logging`` Filter class to add Cloud Logging @@ -45,10 +52,6 @@ class CloudLoggingFilter(logging.Filter): overwritten using the `extras` argument when writing logs. """ - # The subset of http_request fields have been tested to work consistently across GCP environments - # https://cloud.google.com/logging/docs/reference/v2/rest/v2/LogEntry#httprequest - _supported_http_fields = ("requestMethod", "requestUrl", "userAgent", "protocol") - def __init__(self, project=None, default_labels=None): self.project = project self.default_labels = default_labels if default_labels else {} @@ -80,13 +83,6 @@ def filter(self, record): user_labels = getattr(record, "labels", {}) # infer request data from the environment inferred_http, inferred_trace, inferred_span = get_request_data() - if inferred_http is not None: - # filter inferred_http to include only well-supported fields - inferred_http = { - k: v - for (k, v) in inferred_http.items() - if k in self._supported_http_fields and v is not None - } if inferred_trace is not None and self.project is not None: # add full path for detected trace inferred_trace = f"projects/{self.project}/traces/{inferred_trace}" @@ -188,12 +184,17 @@ def emit(self, record): record (logging.LogRecord): The record to be logged. """ message = super(CloudLoggingHandler, self).format(record) + labels = record._labels + resource = record._resource or self.resource + if resource.type == _GAE_RESOURCE_TYPE and record._trace is not None: + # add GAE-specific label + labels = {_GAE_TRACE_ID_LABEL: record._trace, **(labels or {})} # send off request self.transport.send( record, message, - resource=(record._resource or self.resource), - labels=record._labels, + resource=resource, + labels=labels, trace=record._trace, span_id=record._span_id, http_request=record._http_request, diff --git a/tests/environment b/tests/environment index dc8506605..41c32ce34 160000 --- a/tests/environment +++ b/tests/environment @@ -1 +1 @@ -Subproject commit dc85066053b8dc2246c8b72f93a5b97f92885eb2 +Subproject commit 41c32ce3425529680e32701549d3f682f9c82b63 diff --git a/tests/unit/test_client.py b/tests/unit/test_client.py index 9dbfa87fd..11ccd7e37 100644 --- a/tests/unit/test_client.py +++ b/tests/unit/test_client.py @@ -719,7 +719,7 @@ def test_get_default_handler_app_engine(self): import os from google.cloud._testing import _Monkey from google.cloud.logging_v2.handlers._monitored_resources import _GAE_ENV_VARS - from google.cloud.logging.handlers import AppEngineHandler + from google.cloud.logging.handlers import CloudLoggingHandler credentials = _make_credentials() client = self._make_one( @@ -733,10 +733,10 @@ def test_get_default_handler_app_engine(self): handler.transport.worker.stop() - self.assertIsInstance(handler, AppEngineHandler) + self.assertIsInstance(handler, CloudLoggingHandler) def test_get_default_handler_container_engine(self): - from google.cloud.logging.handlers import ContainerEngineHandler + from google.cloud.logging.handlers import StructuredLogHandler credentials = _make_credentials() client = self._make_one( @@ -751,7 +751,7 @@ def test_get_default_handler_container_engine(self): with patch: handler = client.get_default_handler() - self.assertIsInstance(handler, ContainerEngineHandler) + self.assertIsInstance(handler, StructuredLogHandler) def test_get_default_handler_general(self): import io From 5267152574b2ee96eb6f5c536a762f58bd2f886e Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Wed, 26 Jan 2022 16:46:47 -0800 Subject: [PATCH 23/34] feat!: support json logs (#316) --- .../handlers/_monitored_resources.py | 2 + google/cloud/logging_v2/handlers/handlers.py | 27 ++- .../logging_v2/handlers/structured_log.py | 31 ++- .../handlers/transports/background_thread.py | 14 +- .../logging_v2/handlers/transports/base.py | 2 +- .../logging_v2/handlers/transports/sync.py | 16 +- google/cloud/logging_v2/logger.py | 45 +++++ tests/system/test_system.py | 100 +++++---- tests/unit/handlers/test_handlers.py | 34 +++- tests/unit/handlers/test_structured_log.py | 5 +- .../transports/test_background_thread.py | 22 +- tests/unit/handlers/transports/test_sync.py | 39 +++- tests/unit/test_logger.py | 191 ++++++++++++++++++ 13 files changed, 437 insertions(+), 91 deletions(-) diff --git a/google/cloud/logging_v2/handlers/_monitored_resources.py b/google/cloud/logging_v2/handlers/_monitored_resources.py index e257f08e4..144258749 100644 --- a/google/cloud/logging_v2/handlers/_monitored_resources.py +++ b/google/cloud/logging_v2/handlers/_monitored_resources.py @@ -169,6 +169,8 @@ def _create_global_resource(project): def detect_resource(project=""): """Return the default monitored resource based on the local environment. + If GCP resource not found, defaults to `global`. + Args: project (str): The project ID to pass on to the resource (if needed) Returns: diff --git a/google/cloud/logging_v2/handlers/handlers.py b/google/cloud/logging_v2/handlers/handlers.py index 46922d54f..5d16e74b5 100644 --- a/google/cloud/logging_v2/handlers/handlers.py +++ b/google/cloud/logging_v2/handlers/handlers.py @@ -14,6 +14,7 @@ """Python :mod:`logging` handlers for Cloud Logging.""" +import collections import json import logging @@ -92,15 +93,19 @@ def filter(self, record): record._span_id = getattr(record, "span_id", inferred_span) or None record._http_request = getattr(record, "http_request", inferred_http) record._source_location = CloudLoggingFilter._infer_source_location(record) - record._labels = {**self.default_labels, **user_labels} or None + # add logger name as a label if possible + logger_label = {"python_logger": record.name} if record.name else {} + record._labels = {**logger_label, **self.default_labels, **user_labels} or None # create string representations for structured logging record._trace_str = record._trace or "" record._span_id_str = record._span_id or "" - record._http_request_str = json.dumps(record._http_request or {}) - record._source_location_str = json.dumps(record._source_location or {}) - record._labels_str = json.dumps(record._labels or {}) - # break quotes for parsing through structured logging - record._msg_str = str(record.msg).replace('"', '\\"') if record.msg else "" + record._http_request_str = json.dumps( + record._http_request or {}, ensure_ascii=False + ) + record._source_location_str = json.dumps( + record._source_location or {}, ensure_ascii=False + ) + record._labels_str = json.dumps(record._labels or {}, ensure_ascii=False) return True @@ -183,9 +188,15 @@ def emit(self, record): Args: record (logging.LogRecord): The record to be logged. """ - message = super(CloudLoggingHandler, self).format(record) - labels = record._labels resource = record._resource or self.resource + labels = record._labels + message = None + if isinstance(record.msg, collections.abc.Mapping): + # if input is a dictionary, pass as-is for structured logging + message = record.msg + elif record.msg: + # otherwise, format message string based on superclass + message = super(CloudLoggingHandler, self).format(record) if resource.type == _GAE_RESOURCE_TYPE and record._trace is not None: # add GAE-specific label labels = {_GAE_TRACE_ID_LABEL: record._trace, **(labels or {})} diff --git a/google/cloud/logging_v2/handlers/structured_log.py b/google/cloud/logging_v2/handlers/structured_log.py index 43e1250a3..2d7c5e078 100644 --- a/google/cloud/logging_v2/handlers/structured_log.py +++ b/google/cloud/logging_v2/handlers/structured_log.py @@ -14,19 +14,23 @@ """Logging handler for printing formatted structured logs to standard output. """ +import collections import json import logging.handlers from google.cloud.logging_v2.handlers.handlers import CloudLoggingFilter +from google.cloud.logging_v2.handlers.handlers import _format_and_parse_message GCP_FORMAT = ( - '{"message": %(_formatted_msg)s, ' + "{%(_payload_str)s" '"severity": "%(levelname)s", ' '"logging.googleapis.com/labels": %(_labels_str)s, ' '"logging.googleapis.com/trace": "%(_trace_str)s", ' '"logging.googleapis.com/spanId": "%(_span_id_str)s", ' + '"logging.googleapis.com/trace_sampled": %(_trace_sampled_str)s, ' '"logging.googleapis.com/sourceLocation": %(_source_location_str)s, ' - '"httpRequest": %(_http_request_str)s }' + '"httpRequest": %(_http_request_str)s ' + "}" ) @@ -57,15 +61,22 @@ def format(self, record): Args: record (logging.LogRecord): The log record. Returns: - str: A JSON string formatted for GKE fluentd. + str: A JSON string formatted for GCP structured logging. """ - # let other formatters alter the message - super_payload = None - if record.msg: - # format the message using default handler behaviors - super_payload = super(StructuredLogHandler, self).format(record) - # properly break any formatting in string to make it json safe - record._formatted_msg = json.dumps(super_payload or "") + payload = None + message = _format_and_parse_message(record, super(StructuredLogHandler, self)) + + if isinstance(message, collections.abc.Mapping): + # if input is a dictionary, encode it as a json string + encoded_msg = json.dumps(message, ensure_ascii=False) + # strip out open and close parentheses + payload = encoded_msg.lstrip("{").rstrip("}") + "," + elif message: + # properly break any formatting in string to make it json safe + encoded_message = json.dumps(message, ensure_ascii=False) + payload = '"message": {},'.format(encoded_message) + + record._payload_str = payload or "" # remove exception info to avoid duplicating it # https://github.com/googleapis/python-logging/issues/382 record.exc_info = None diff --git a/google/cloud/logging_v2/handlers/transports/background_thread.py b/google/cloud/logging_v2/handlers/transports/background_thread.py index 60828a117..1097830a8 100644 --- a/google/cloud/logging_v2/handlers/transports/background_thread.py +++ b/google/cloud/logging_v2/handlers/transports/background_thread.py @@ -137,7 +137,7 @@ def _thread_main(self): if item is _WORKER_TERMINATOR: done = True # Continue processing items. else: - batch.log_struct(**item) + batch.log(**item) self._safely_commit_batch(batch) @@ -226,12 +226,18 @@ def enqueue(self, record, message, **kwargs): Args: record (logging.LogRecord): Python log record that the handler was called with. - message (str): The message from the ``LogRecord`` after being + message (str or dict): The message from the ``LogRecord`` after being formatted by the associated log formatters. kwargs: Additional optional arguments for the logger """ + # set python logger name as label if missing + labels = kwargs.pop("labels", {}) + if record.name: + labels["python_logger"] = labels.get("python_logger", record.name) + kwargs["labels"] = labels + # enqueue new entry queue_entry = { - "info": {"message": message, "python_logger": record.name}, + "message": message, "severity": _helpers._normalize_severity(record.levelno), "timestamp": datetime.datetime.utcfromtimestamp(record.created), } @@ -285,7 +291,7 @@ def send(self, record, message, **kwargs): Args: record (logging.LogRecord): Python log record that the handler was called with. - message (str): The message from the ``LogRecord`` after being + message (str or dict): The message from the ``LogRecord`` after being formatted by the associated log formatters. kwargs: Additional optional arguments for the logger """ diff --git a/google/cloud/logging_v2/handlers/transports/base.py b/google/cloud/logging_v2/handlers/transports/base.py index d60a5a070..bd52b4e75 100644 --- a/google/cloud/logging_v2/handlers/transports/base.py +++ b/google/cloud/logging_v2/handlers/transports/base.py @@ -27,7 +27,7 @@ def send(self, record, message, **kwargs): Args: record (logging.LogRecord): Python log record that the handler was called with. - message (str): The message from the ``LogRecord`` after being + message (str or dict): The message from the ``LogRecord`` after being formatted by the associated log formatters. kwargs: Additional optional arguments for the logger """ diff --git a/google/cloud/logging_v2/handlers/transports/sync.py b/google/cloud/logging_v2/handlers/transports/sync.py index 35ee73daa..796f0d2ff 100644 --- a/google/cloud/logging_v2/handlers/transports/sync.py +++ b/google/cloud/logging_v2/handlers/transports/sync.py @@ -16,7 +16,6 @@ Logs directly to the the Cloud Logging API with a synchronous call. """ - from google.cloud.logging_v2 import _helpers from google.cloud.logging_v2.handlers.transports.base import Transport @@ -36,11 +35,18 @@ def send(self, record, message, **kwargs): Args: record (logging.LogRecord): Python log record that the handler was called with. - message (str): The message from the ``LogRecord`` after being + message (str or dict): The message from the ``LogRecord`` after being formatted by the associated log formatters. kwargs: Additional optional arguments for the logger """ - info = {"message": message, "python_logger": record.name} - self.logger.log_struct( - info, severity=_helpers._normalize_severity(record.levelno), **kwargs, + # set python logger name as label if missing + labels = kwargs.pop("labels", {}) + if record.name: + labels["python_logger"] = labels.get("python_logger", record.name) + # send log synchronously + self.logger.log( + message, + severity=_helpers._normalize_severity(record.levelno), + labels=labels, + **kwargs, ) diff --git a/google/cloud/logging_v2/logger.py b/google/cloud/logging_v2/logger.py index fafb70629..ffe7ea706 100644 --- a/google/cloud/logging_v2/logger.py +++ b/google/cloud/logging_v2/logger.py @@ -14,6 +14,8 @@ """Define API Loggers.""" +import collections + from google.cloud.logging_v2._helpers import _add_defaults_to_filter from google.cloud.logging_v2.entries import LogEntry from google.cloud.logging_v2.entries import ProtobufEntry @@ -21,6 +23,7 @@ from google.cloud.logging_v2.entries import TextEntry from google.cloud.logging_v2.resource import Resource +import google.protobuf.message _GLOBAL_RESOURCE = Resource(type="global", labels={}) @@ -197,6 +200,30 @@ def log_proto(self, message, *, client=None, **kw): """ self._do_log(client, ProtobufEntry, message, **kw) + def log(self, message=None, *, client=None, **kw): + """Log an arbitrary message via a POST request. + Type will be inferred based on the input message. + + See + https://cloud.google.com/logging/docs/reference/v2/rest/v2/entries/list + + Args: + message (Optional[str or dict or google.protobuf.Message]): The message. to log + client (Optional[~logging_v2.client.Client]): + The client to use. If not passed, falls back to the + ``client`` stored on the current sink. + kw (Optional[dict]): additional keyword arguments for the entry. + See :class:`~logging_v2.entries.LogEntry`. + """ + entry_type = LogEntry + if isinstance(message, google.protobuf.message.Message): + entry_type = ProtobufEntry + elif isinstance(message, collections.abc.Mapping): + entry_type = StructEntry + elif isinstance(message, str): + entry_type = TextEntry + self._do_log(client, entry_type, message, **kw) + def delete(self, logger_name=None, *, client=None): """Delete all entries in a logger via a DELETE request @@ -361,6 +388,24 @@ def log_proto(self, message, **kw): """ self.entries.append(ProtobufEntry(payload=message, **kw)) + def log(self, message=None, **kw): + """Add an arbitrary message to be logged during :meth:`commit`. + Type will be inferred based on the input message. + + Args: + message (Optional[str or dict or google.protobuf.Message]): The message. to log + kw (Optional[dict]): Additional keyword arguments for the entry. + See :class:`~logging_v2.entries.LogEntry`. + """ + entry_type = LogEntry + if isinstance(message, google.protobuf.message.Message): + entry_type = ProtobufEntry + elif isinstance(message, collections.abc.Mapping): + entry_type = StructEntry + elif isinstance(message, str): + entry_type = TextEntry + self.entries.append(entry_type(payload=message, **kw)) + def commit(self, *, client=None): """Send saved log entries as a single API call. diff --git a/tests/system/test_system.py b/tests/system/test_system.py index 532eea96b..836339f0b 100644 --- a/tests/system/test_system.py +++ b/tests/system/test_system.py @@ -32,7 +32,6 @@ from google.api_core.exceptions import ServiceUnavailable import google.cloud.logging from google.cloud._helpers import UTC -from google.cloud.logging_v2.handlers import AppEngineHandler from google.cloud.logging_v2.handlers import CloudLoggingHandler from google.cloud.logging_v2.handlers.transports import SyncTransport from google.cloud.logging_v2 import client @@ -401,6 +400,35 @@ def test_log_struct_w_metadata(self): self.assertEqual(request["requestUrl"], URI) self.assertEqual(request["status"], STATUS) + def test_log_w_text(self): + TEXT_PAYLOAD = "System test: test_log_w_text" + logger = Config.CLIENT.logger(self._logger_name("log_w_text")) + self.to_delete.append(logger) + logger.log(TEXT_PAYLOAD) + entries = _list_entries(logger) + self.assertEqual(len(entries), 1) + self.assertEqual(entries[0].payload, TEXT_PAYLOAD) + + def test_log_w_struct(self): + logger = Config.CLIENT.logger(self._logger_name("log_w_struct")) + self.to_delete.append(logger) + + logger.log(self.JSON_PAYLOAD) + entries = _list_entries(logger) + + self.assertEqual(len(entries), 1) + self.assertEqual(entries[0].payload, self.JSON_PAYLOAD) + + def test_log_empty(self): + logger = Config.CLIENT.logger(self._logger_name("log_empty")) + self.to_delete.append(logger) + + logger.log() + entries = _list_entries(logger) + + self.assertEqual(len(entries), 1) + self.assertIsNone(entries[0].payload) + def test_log_handler_async(self): LOG_MESSAGE = "It was the worst of times" @@ -415,7 +443,7 @@ def test_log_handler_async(self): cloud_logger.warning(LOG_MESSAGE) handler.flush() entries = _list_entries(logger) - expected_payload = {"message": LOG_MESSAGE, "python_logger": handler.name} + expected_payload = LOG_MESSAGE self.assertEqual(len(entries), 1) self.assertEqual(entries[0].payload, expected_payload) @@ -437,44 +465,46 @@ def test_log_handler_sync(self): cloud_logger.warning(LOG_MESSAGE) entries = _list_entries(logger) - expected_payload = {"message": LOG_MESSAGE, "python_logger": LOGGER_NAME} + expected_payload = LOG_MESSAGE self.assertEqual(len(entries), 1) self.assertEqual(entries[0].payload, expected_payload) def test_handlers_w_extras(self): LOG_MESSAGE = "Testing with injected extras." + LOGGER_NAME = "handler_extras" + handler_name = self._logger_name(LOGGER_NAME) + + handler = CloudLoggingHandler( + Config.CLIENT, name=handler_name, transport=SyncTransport + ) + + # only create the logger to delete, hidden otherwise + logger = Config.CLIENT.logger(handler.name) + self.to_delete.append(logger) - for cls in [CloudLoggingHandler, AppEngineHandler]: - LOGGER_NAME = f"{cls.__name__}-handler_extras" - handler_name = self._logger_name(LOGGER_NAME) - - handler = cls(Config.CLIENT, name=handler_name, transport=SyncTransport) - - # only create the logger to delete, hidden otherwise - logger = Config.CLIENT.logger(handler.name) - self.to_delete.append(logger) - - cloud_logger = logging.getLogger(LOGGER_NAME) - cloud_logger.addHandler(handler) - expected_request = {"requestUrl": "localhost"} - expected_source = {"file": "test.py"} - extra = { - "trace": "123", - "span_id": "456", - "http_request": expected_request, - "source_location": expected_source, - "resource": Resource(type="cloudiot_device", labels={}), - "labels": {"test-label": "manual"}, - } - cloud_logger.warning(LOG_MESSAGE, extra=extra) - - entries = _list_entries(logger) - self.assertEqual(len(entries), 1) - self.assertEqual(entries[0].trace, extra["trace"]) - self.assertEqual(entries[0].span_id, extra["span_id"]) - self.assertEqual(entries[0].http_request, expected_request) - self.assertEqual(entries[0].labels, extra["labels"]) - self.assertEqual(entries[0].resource.type, extra["resource"].type) + cloud_logger = logging.getLogger(LOGGER_NAME) + cloud_logger.addHandler(handler) + expected_request = {"requestUrl": "localhost"} + expected_source = {"file": "test.py"} + extra = { + "trace": "123", + "span_id": "456", + "http_request": expected_request, + "source_location": expected_source, + "resource": Resource(type="cloudiot_device", labels={}), + "labels": {"test-label": "manual"}, + } + cloud_logger.warn(LOG_MESSAGE, extra=extra) + + entries = _list_entries(logger) + self.assertEqual(len(entries), 1) + self.assertEqual(entries[0].trace, extra["trace"]) + self.assertEqual(entries[0].span_id, extra["span_id"]) + self.assertEqual(entries[0].http_request, expected_request) + self.assertEqual( + entries[0].labels, {**extra["labels"], "python_logger": LOGGER_NAME} + ) + self.assertEqual(entries[0].resource.type, extra["resource"].type) def test_log_root_handler(self): LOG_MESSAGE = "It was the best of times." @@ -490,7 +520,7 @@ def test_log_root_handler(self): logging.warning(LOG_MESSAGE) entries = _list_entries(logger) - expected_payload = {"message": LOG_MESSAGE, "python_logger": "root"} + expected_payload = LOG_MESSAGE self.assertEqual(len(entries), 1) self.assertEqual(entries[0].payload, expected_payload) diff --git a/tests/unit/handlers/test_handlers.py b/tests/unit/handlers/test_handlers.py index b7fef1b9e..c51175261 100644 --- a/tests/unit/handlers/test_handlers.py +++ b/tests/unit/handlers/test_handlers.py @@ -63,6 +63,7 @@ def test_filter_record(self): "file": "testpath", "function": "test-function", } + expected_label = {"python_logger": logname} record = logging.LogRecord( logname, logging.INFO, @@ -78,7 +79,6 @@ def test_filter_record(self): self.assertTrue(success) self.assertEqual(record.msg, message) - self.assertEqual(record._msg_str, message) self.assertEqual(record._source_location, expected_location) self.assertEqual(record._source_location_str, json.dumps(expected_location)) self.assertIsNone(record._resource) @@ -88,8 +88,8 @@ def test_filter_record(self): self.assertEqual(record._span_id_str, "") self.assertIsNone(record._http_request) self.assertEqual(record._http_request_str, "{}") - self.assertIsNone(record._labels) - self.assertEqual(record._labels_str, "{}") + self.assertEqual(record._labels, expected_label) + self.assertEqual(record._labels_str, json.dumps(expected_label)) def test_minimal_record(self): """ @@ -105,7 +105,6 @@ def test_minimal_record(self): self.assertTrue(success) self.assertIsNone(record.msg) - self.assertEqual(record._msg_str, "") self.assertIsNone(record._source_location) self.assertEqual(record._source_location_str, "{}") self.assertIsNone(record._resource) @@ -297,7 +296,16 @@ def test_emit(self): handler.handle(record) self.assertEqual( handler.transport.send_called_with, - (record, message, _GLOBAL_RESOURCE, None, None, None, None, None), + ( + record, + message, + _GLOBAL_RESOURCE, + {"python_logger": logname}, + None, + None, + None, + None, + ), ) def test_emit_manual_field_override(self): @@ -336,6 +344,7 @@ def test_emit_manual_field_override(self): "default_key": "default-value", "overwritten_key": "new_value", "added_key": "added_value", + "python_logger": logname, } setattr(record, "labels", added_labels) handler.handle(record) @@ -368,14 +377,25 @@ def test_emit_with_custom_formatter(self): handler.setFormatter(logFormatter) message = "test" expected_result = "logname :: INFO :: test" + logname = "logname" + expected_label = {"python_logger": logname} record = logging.LogRecord( - "logname", logging.INFO, None, None, message, None, None + logname, logging.INFO, None, None, message, None, None ) handler.handle(record) self.assertEqual( handler.transport.send_called_with, - (record, expected_result, _GLOBAL_RESOURCE, None, None, None, None, None,), + ( + record, + expected_result, + _GLOBAL_RESOURCE, + expected_label, + None, + None, + None, + None, + ), ) def test_format_with_arguments(self): diff --git a/tests/unit/handlers/test_structured_log.py b/tests/unit/handlers/test_structured_log.py index 271a68189..c87f7f23e 100644 --- a/tests/unit/handlers/test_structured_log.py +++ b/tests/unit/handlers/test_structured_log.py @@ -60,6 +60,7 @@ def test_format(self): record = logging.LogRecord( logname, logging.INFO, pathname, lineno, message, None, None, func=func ) + expected_labels = {**labels, "python_logger": logname} expected_payload = { "message": message, "severity": record.levelname, @@ -71,7 +72,7 @@ def test_format(self): "function": func, }, "httpRequest": {}, - "logging.googleapis.com/labels": labels, + "logging.googleapis.com/labels": expected_labels, } handler.filter(record) result = json.loads(handler.format(record)) @@ -91,7 +92,6 @@ def test_format_minimal(self): record = logging.LogRecord(None, logging.INFO, None, None, None, None, None,) record.created = None expected_payload = { - "message": "", "logging.googleapis.com/trace": "", "logging.googleapis.com/sourceLocation": {}, "httpRequest": {}, @@ -265,6 +265,7 @@ def test_format_overrides(self): "default_key": "default-value", "overwritten_key": "new_value", "added_key": "added_value", + "python_logger": logname, }, } diff --git a/tests/unit/handlers/transports/test_background_thread.py b/tests/unit/handlers/transports/test_background_thread.py index 1666cd74b..f408de476 100644 --- a/tests/unit/handlers/transports/test_background_thread.py +++ b/tests/unit/handlers/transports/test_background_thread.py @@ -279,15 +279,14 @@ def test_enqueue_defaults(self): self._enqueue_record(worker, message) entry = worker._queue.get_nowait() - expected_info = {"message": message, "python_logger": "testing"} - self.assertEqual(entry["info"], expected_info) + self.assertEqual(entry["message"], message) self.assertEqual(entry["severity"], LogSeverity.INFO) self.assertIsInstance(entry["timestamp"], datetime.datetime) self.assertNotIn("resource", entry.keys()) - self.assertNotIn("labels", entry.keys()) self.assertNotIn("trace", entry.keys()) self.assertNotIn("span_id", entry.keys()) self.assertNotIn("http_request", entry.keys()) + self.assertEqual(entry["labels"], {"python_logger": "testing"}) def test_enqueue_explicit(self): import datetime @@ -313,11 +312,10 @@ def test_enqueue_explicit(self): entry = worker._queue.get_nowait() - expected_info = {"message": message, "python_logger": "testing"} - self.assertEqual(entry["info"], expected_info) + self.assertEqual(entry["message"], message) self.assertEqual(entry["severity"], LogSeverity.ERROR) self.assertIs(entry["resource"], resource) - self.assertIs(entry["labels"], labels) + self.assertEqual(entry["labels"], {**labels, "python_logger": "testing"}) self.assertIs(entry["trace"], trace) self.assertIs(entry["span_id"], span_id) self.assertIsInstance(entry["timestamp"], datetime.datetime) @@ -388,9 +386,9 @@ def test__thread_main_max_latency(self, time): worker._queue = mock.create_autospec(queue.Queue, instance=True) worker._queue.get.side_effect = [ - {"info": {"message": "1"}}, # Single record. + {"message": 1}, # Single record. queue.Empty(), # Emulate a queue.get() timeout. - {"info": {"message": "1"}}, # Second record. + {"message": "2"}, # Second record. background_thread._WORKER_TERMINATOR, # Stop the thread. queue.Empty(), # Emulate a queue.get() timeout. ] @@ -479,9 +477,9 @@ def __init__(self): self.commit_called = False self.commit_count = None - def log_struct( + def log( self, - info, + message, severity=logging.INFO, resource=None, labels=None, @@ -495,8 +493,8 @@ def log_struct( assert resource is None resource = _GLOBAL_RESOURCE - self.log_struct_called_with = (info, severity, resource, labels, trace, span_id) - self.entries.append(info) + self.log_called_with = (message, severity, resource, labels, trace, span_id) + self.entries.append(message) def commit(self): self.commit_called = True diff --git a/tests/unit/handlers/transports/test_sync.py b/tests/unit/handlers/transports/test_sync.py index 9f0642757..cc8ffe284 100644 --- a/tests/unit/handlers/transports/test_sync.py +++ b/tests/unit/handlers/transports/test_sync.py @@ -41,26 +41,51 @@ def test_send(self): client = _Client(self.PROJECT) - stackdriver_logger_name = "python" + client_name = "python" python_logger_name = "mylogger" - transport = self._make_one(client, stackdriver_logger_name) + transport = self._make_one(client, client_name) message = "hello world" record = logging.LogRecord( python_logger_name, logging.INFO, None, None, message, None, None ) transport.send(record, message, resource=_GLOBAL_RESOURCE) - EXPECTED_STRUCT = {"message": message, "python_logger": python_logger_name} EXPECTED_SENT = ( - EXPECTED_STRUCT, + message, LogSeverity.INFO, _GLOBAL_RESOURCE, + {"python_logger": python_logger_name}, + None, + None, None, + ) + self.assertEqual(transport.logger.log_called_with, EXPECTED_SENT) + + def test_send_struct(self): + from google.cloud.logging_v2.logger import _GLOBAL_RESOURCE + from google.cloud.logging_v2._helpers import LogSeverity + + client = _Client(self.PROJECT) + + client_name = "python" + python_logger_name = "mylogger" + transport = self._make_one(client, client_name) + message = {"message": "hello world", "extra": "test"} + record = logging.LogRecord( + python_logger_name, logging.INFO, None, None, message, None, None + ) + + transport.send(record, message, resource=_GLOBAL_RESOURCE) + EXPECTED_SENT = ( + message, + LogSeverity.INFO, + _GLOBAL_RESOURCE, + {"python_logger": python_logger_name}, None, None, None, ) - self.assertEqual(transport.logger.log_struct_called_with, EXPECTED_SENT) + self.assertEqual(transport.logger.log_called_with, EXPECTED_SENT) class _Logger(object): @@ -69,7 +94,7 @@ class _Logger(object): def __init__(self, name): self.name = name - def log_struct( + def log( self, message, severity=None, @@ -79,7 +104,7 @@ def log_struct( span_id=None, http_request=None, ): - self.log_struct_called_with = ( + self.log_called_with = ( message, severity, resource, diff --git a/tests/unit/test_logger.py b/tests/unit/test_logger.py index 5ad486178..d0e751e93 100644 --- a/tests/unit/test_logger.py +++ b/tests/unit/test_logger.py @@ -464,6 +464,80 @@ def test_log_proto_w_explicit(self): self.assertEqual(api._write_entries_called_with, (ENTRIES, None, None, None)) + def test_log_inference_empty(self): + DEFAULT_LABELS = {"foo": "spam"} + ENTRIES = [ + { + "logName": "projects/%s/logs/%s" % (self.PROJECT, self.LOGGER_NAME), + "resource": {"type": "global", "labels": {}}, + "labels": DEFAULT_LABELS, + } + ] + client = _Client(self.PROJECT) + api = client.logging_api = _DummyLoggingAPI() + logger = self._make_one(self.LOGGER_NAME, client=client, labels=DEFAULT_LABELS) + + logger.log() + + self.assertEqual(api._write_entries_called_with, (ENTRIES, None, None, None)) + + def test_log_inference_text(self): + RESOURCE = {"type": "global", "labels": {}} + TEXT = "TEXT" + ENTRIES = [ + { + "logName": "projects/%s/logs/%s" % (self.PROJECT, self.LOGGER_NAME), + "textPayload": TEXT, + "resource": RESOURCE, + } + ] + client = _Client(self.PROJECT) + api = client.logging_api = _DummyLoggingAPI() + logger = self._make_one(self.LOGGER_NAME, client=client) + + logger.log(TEXT) + + self.assertEqual(api._write_entries_called_with, (ENTRIES, None, None, None)) + + def test_log_inference_struct(self): + STRUCT = {"message": "MESSAGE", "weather": "cloudy"} + RESOURCE = {"type": "global", "labels": {}} + ENTRIES = [ + { + "logName": "projects/%s/logs/%s" % (self.PROJECT, self.LOGGER_NAME), + "jsonPayload": STRUCT, + "resource": RESOURCE, + } + ] + client = _Client(self.PROJECT) + api = client.logging_api = _DummyLoggingAPI() + logger = self._make_one(self.LOGGER_NAME, client=client) + + logger.log(STRUCT) + + self.assertEqual(api._write_entries_called_with, (ENTRIES, None, None, None)) + + def test_log_inference_proto(self): + import json + from google.protobuf.json_format import MessageToJson + from google.protobuf.struct_pb2 import Struct, Value + + message = Struct(fields={"foo": Value(bool_value=True)}) + ENTRIES = [ + { + "logName": "projects/%s/logs/%s" % (self.PROJECT, self.LOGGER_NAME), + "protoPayload": json.loads(MessageToJson(message)), + "resource": {"type": "global", "labels": {}}, + } + ] + client = _Client(self.PROJECT) + api = client.logging_api = _DummyLoggingAPI() + logger = self._make_one(self.LOGGER_NAME, client=client) + + logger.log(message) + + self.assertEqual(api._write_entries_called_with, (ENTRIES, None, None, None)) + def test_delete_w_bound_client(self): client = _Client(project=self.PROJECT) api = client.logging_api = _DummyLoggingAPI() @@ -902,6 +976,123 @@ def test_log_proto_explicit(self): ) self.assertEqual(batch.entries, [ENTRY]) + def test_log_inference_empty(self): + """ + When calling batch.log with empty input, it should + call batch.log_empty + """ + from google.cloud.logging import LogEntry + + ENTRY = LogEntry() + client = _Client(project=self.PROJECT, connection=_make_credentials()) + logger = _Logger() + batch = self._make_one(logger, client=client) + batch.log() + self.assertEqual(batch.entries, [ENTRY]) + + def test_log_inference_text(self): + """ + When calling batch.log with text input, it should + call batch.log_text + """ + from google.cloud.logging_v2.entries import _GLOBAL_RESOURCE + from google.cloud.logging import TextEntry + + TEXT = "This is the entry text" + ENTRY = TextEntry(payload=TEXT, resource=_GLOBAL_RESOURCE) + client = _Client(project=self.PROJECT, connection=_make_credentials()) + logger = _Logger() + batch = self._make_one(logger, client=client) + batch.log(TEXT) + self.assertEqual(batch.entries, [ENTRY]) + + def test_log_inference_struct(self): + """ + When calling batch.struct with text input, it should + call batch.log_struct + """ + from google.cloud.logging_v2.entries import _GLOBAL_RESOURCE + from google.cloud.logging import StructEntry + + STRUCT = {"message": "Message text", "weather": "partly cloudy"} + ENTRY = StructEntry(payload=STRUCT, resource=_GLOBAL_RESOURCE) + client = _Client(project=self.PROJECT, connection=_make_credentials()) + logger = _Logger() + batch = self._make_one(logger, client=client) + batch.log(STRUCT) + self.assertEqual(batch.entries, [ENTRY]) + + def test_log_inference_proto(self): + """ + When calling batch.log with proto input, it should + call batch.log_proto + """ + from google.cloud.logging_v2.entries import _GLOBAL_RESOURCE + from google.cloud.logging import ProtobufEntry + from google.protobuf.struct_pb2 import Struct + from google.protobuf.struct_pb2 import Value + + message = Struct(fields={"foo": Value(bool_value=True)}) + ENTRY = ProtobufEntry(payload=message, resource=_GLOBAL_RESOURCE) + client = _Client(project=self.PROJECT, connection=_make_credentials()) + logger = _Logger() + batch = self._make_one(logger, client=client) + batch.log(message) + self.assertEqual(batch.entries, [ENTRY]) + + def test_log_inference_struct_explicit(self): + """ + When calling batch.log with struct input, it should + call batch.log_struct, along with input arguments + """ + import datetime + from google.cloud.logging import Resource + from google.cloud.logging import StructEntry + + STRUCT = {"message": "Message text", "weather": "partly cloudy"} + LABELS = {"foo": "bar", "baz": "qux"} + IID = "IID" + SEVERITY = "CRITICAL" + METHOD = "POST" + URI = "https://api.example.com/endpoint" + STATUS = "500" + TRACE = "12345678-1234-5678-1234-567812345678" + SPANID = "000000000000004a" + REQUEST = {"requestMethod": METHOD, "requestUrl": URI, "status": STATUS} + TIMESTAMP = datetime.datetime(2016, 12, 31, 0, 1, 2, 999999) + RESOURCE = Resource( + type="gae_app", labels={"module_id": "default", "version_id": "test"} + ) + ENTRY = StructEntry( + payload=STRUCT, + labels=LABELS, + insert_id=IID, + severity=SEVERITY, + http_request=REQUEST, + timestamp=TIMESTAMP, + resource=RESOURCE, + trace=TRACE, + span_id=SPANID, + trace_sampled=True, + ) + + client = _Client(project=self.PROJECT, connection=_make_credentials()) + logger = _Logger() + batch = self._make_one(logger, client=client) + batch.log( + STRUCT, + labels=LABELS, + insert_id=IID, + severity=SEVERITY, + http_request=REQUEST, + timestamp=TIMESTAMP, + resource=RESOURCE, + trace=TRACE, + span_id=SPANID, + trace_sampled=True, + ) + self.assertEqual(batch.entries, [ENTRY]) + def test_commit_w_unknown_entry_type(self): from google.cloud.logging_v2.entries import _GLOBAL_RESOURCE from google.cloud.logging import LogEntry From c63250399fcd6e1317d341e98fab11095c443e5e Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Wed, 26 Jan 2022 16:48:15 -0800 Subject: [PATCH 24/34] feat!: Infer default resource in logger (#315) --- .github/.OwlBot.lock.yaml | 1 + google/cloud/logging_v2/client.py | 9 ++- google/cloud/logging_v2/handlers/handlers.py | 8 ++- google/cloud/logging_v2/logger.py | 11 +++- tests/unit/handlers/test_handlers.py | 7 ++- tests/unit/test_client.py | 8 ++- tests/unit/test_logger.py | 65 ++++++++++++++++---- 7 files changed, 85 insertions(+), 24 deletions(-) diff --git a/.github/.OwlBot.lock.yaml b/.github/.OwlBot.lock.yaml index 8cb43804d..fa15cb546 100644 --- a/.github/.OwlBot.lock.yaml +++ b/.github/.OwlBot.lock.yaml @@ -14,3 +14,4 @@ docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest digest: sha256:ed1f9983d5a935a89fe8085e8bb97d94e41015252c5b6c9771257cf8624367e6 + diff --git a/google/cloud/logging_v2/client.py b/google/cloud/logging_v2/client.py index c9bbe1fe0..5792ff6f9 100644 --- a/google/cloud/logging_v2/client.py +++ b/google/cloud/logging_v2/client.py @@ -182,16 +182,21 @@ def metrics_api(self): self._metrics_api = JSONMetricsAPI(self) return self._metrics_api - def logger(self, name): + def logger(self, name, *, labels=None, resource=None): """Creates a logger bound to the current client. Args: name (str): The name of the logger to be constructed. + resource (Optional[~logging_v2.Resource]): a monitored resource object + representing the resource the code was run on. If not given, will + be inferred from the environment. + labels (Optional[dict]): Mapping of default labels for entries written + via this logger. Returns: ~logging_v2.logger.Logger: Logger created with the current client. """ - return Logger(name, client=self) + return Logger(name, client=self, labels=labels, resource=resource) def list_entries( self, diff --git a/google/cloud/logging_v2/handlers/handlers.py b/google/cloud/logging_v2/handlers/handlers.py index 5d16e74b5..b554a6fdb 100644 --- a/google/cloud/logging_v2/handlers/handlers.py +++ b/google/cloud/logging_v2/handlers/handlers.py @@ -18,7 +18,6 @@ import json import logging -from google.cloud.logging_v2.logger import _GLOBAL_RESOURCE from google.cloud.logging_v2.handlers.transports import BackgroundThreadTransport from google.cloud.logging_v2.handlers._monitored_resources import detect_resource from google.cloud.logging_v2.handlers._helpers import get_request_data @@ -144,7 +143,7 @@ def __init__( *, name=DEFAULT_LOGGER_NAME, transport=BackgroundThreadTransport, - resource=_GLOBAL_RESOURCE, + resource=None, labels=None, stream=None, ): @@ -163,11 +162,14 @@ def __init__( :class:`.BackgroundThreadTransport`. The other option is :class:`.SyncTransport`. resource (~logging_v2.resource.Resource): - Resource for this Handler. Defaults to ``global``. + Resource for this Handler. If not given, will be inferred from the environment. labels (Optional[dict]): Additional labels to attach to logs. stream (Optional[IO]): Stream to be used by the handler. """ super(CloudLoggingHandler, self).__init__(stream) + if not resource: + # infer the correct monitored resource from the local environment + resource = detect_resource(client.project) self.name = name self.client = client self.transport = transport(client, name) diff --git a/google/cloud/logging_v2/logger.py b/google/cloud/logging_v2/logger.py index ffe7ea706..01221fc7b 100644 --- a/google/cloud/logging_v2/logger.py +++ b/google/cloud/logging_v2/logger.py @@ -22,6 +22,7 @@ from google.cloud.logging_v2.entries import StructEntry from google.cloud.logging_v2.entries import TextEntry from google.cloud.logging_v2.resource import Resource +from google.cloud.logging_v2.handlers._monitored_resources import detect_resource import google.protobuf.message @@ -51,19 +52,23 @@ class Logger(object): See https://cloud.google.com/logging/docs/reference/v2/rest/v2/projects.logs """ - def __init__(self, name, client, *, labels=None, resource=_GLOBAL_RESOURCE): + def __init__(self, name, client, *, labels=None, resource=None): """ Args: name (str): The name of the logger. client (~logging_v2.client.Client): A client which holds credentials and project configuration for the logger (which requires a project). - resource (~logging_v2.Resource): a monitored resource object - representing the resource the code was run on. + resource (Optional[~logging_v2.Resource]): a monitored resource object + representing the resource the code was run on. If not given, will + be inferred from the environment. labels (Optional[dict]): Mapping of default labels for entries written via this logger. """ + if not resource: + # infer the correct monitored resource from the local environment + resource = detect_resource(client.project) self.name = name self._client = client self.labels = labels diff --git a/tests/unit/handlers/test_handlers.py b/tests/unit/handlers/test_handlers.py index c51175261..74f5c6dd8 100644 --- a/tests/unit/handlers/test_handlers.py +++ b/tests/unit/handlers/test_handlers.py @@ -236,7 +236,9 @@ def _make_one(self, *args, **kw): def test_ctor_defaults(self): import sys - from google.cloud.logging_v2.logger import _GLOBAL_RESOURCE + from google.cloud.logging_v2.handlers._monitored_resources import ( + _create_global_resource, + ) from google.cloud.logging_v2.handlers.handlers import DEFAULT_LOGGER_NAME patch = mock.patch( @@ -251,7 +253,8 @@ def test_ctor_defaults(self): self.assertIsInstance(handler.transport, _Transport) self.assertIs(handler.transport.client, client) self.assertEqual(handler.transport.name, DEFAULT_LOGGER_NAME) - self.assertEqual(handler.resource, _GLOBAL_RESOURCE) + global_resource = _create_global_resource(self.PROJECT) + self.assertEqual(handler.resource, global_resource) self.assertIsNone(handler.labels) self.assertIs(handler.stream, sys.stderr) diff --git a/tests/unit/test_client.py b/tests/unit/test_client.py index 11ccd7e37..46526fb21 100644 --- a/tests/unit/test_client.py +++ b/tests/unit/test_client.py @@ -239,14 +239,20 @@ def make_api(client_obj): def test_logger(self): from google.cloud.logging import Logger + from google.cloud.logging_v2.logger import _GLOBAL_RESOURCE creds = _make_credentials() client = self._make_one(project=self.PROJECT, credentials=creds) - logger = client.logger(self.LOGGER_NAME) + labels = {"test": "true"} + logger = client.logger( + self.LOGGER_NAME, resource=_GLOBAL_RESOURCE, labels=labels + ) self.assertIsInstance(logger, Logger) self.assertEqual(logger.name, self.LOGGER_NAME) self.assertIs(logger.client, client) self.assertEqual(logger.project, self.PROJECT) + self.assertEqual(logger.default_resource, _GLOBAL_RESOURCE) + self.assertEqual(logger.labels, labels) def test_list_entries_defaults(self): from google.cloud.logging import TextEntry diff --git a/tests/unit/test_logger.py b/tests/unit/test_logger.py index d0e751e93..0d8fd1208 100644 --- a/tests/unit/test_logger.py +++ b/tests/unit/test_logger.py @@ -99,11 +99,15 @@ def test_batch_w_alternate_client(self): self.assertIs(batch.client, client2) def test_log_empty_defaults_w_default_labels(self): + from google.cloud.logging_v2.handlers._monitored_resources import ( + detect_resource, + ) + DEFAULT_LABELS = {"foo": "spam"} ENTRIES = [ { "logName": "projects/%s/logs/%s" % (self.PROJECT, self.LOGGER_NAME), - "resource": {"type": "global", "labels": {}}, + "resource": detect_resource(self.PROJECT)._to_dict(), "labels": DEFAULT_LABELS, } ] @@ -170,7 +174,11 @@ def test_log_empty_w_explicit(self): self.assertEqual(api._write_entries_called_with, (ENTRIES, None, None, None)) def test_log_text_defaults(self): - RESOURCE = {"type": "global", "labels": {}} + from google.cloud.logging_v2.handlers._monitored_resources import ( + detect_resource, + ) + + RESOURCE = detect_resource(self.PROJECT)._to_dict() TEXT = "TEXT" ENTRIES = [ { @@ -188,8 +196,12 @@ def test_log_text_defaults(self): self.assertEqual(api._write_entries_called_with, (ENTRIES, None, None, None)) def test_log_text_w_unicode_and_default_labels(self): + from google.cloud.logging_v2.handlers._monitored_resources import ( + detect_resource, + ) + TEXT = "TEXT" - RESOURCE = {"type": "global", "labels": {}} + RESOURCE = detect_resource(self.PROJECT)._to_dict() DEFAULT_LABELS = {"foo": "spam"} ENTRIES = [ { @@ -265,8 +277,12 @@ def test_log_text_explicit(self): self.assertEqual(api._write_entries_called_with, (ENTRIES, None, None, None)) def test_log_struct_defaults(self): + from google.cloud.logging_v2.handlers._monitored_resources import ( + detect_resource, + ) + STRUCT = {"message": "MESSAGE", "weather": "cloudy"} - RESOURCE = {"type": "global", "labels": {}} + RESOURCE = detect_resource(self.PROJECT)._to_dict() ENTRIES = [ { "logName": "projects/%s/logs/%s" % (self.PROJECT, self.LOGGER_NAME), @@ -283,8 +299,12 @@ def test_log_struct_defaults(self): self.assertEqual(api._write_entries_called_with, (ENTRIES, None, None, None)) def test_log_struct_w_default_labels(self): + from google.cloud.logging_v2.handlers._monitored_resources import ( + detect_resource, + ) + STRUCT = {"message": "MESSAGE", "weather": "cloudy"} - RESOURCE = {"type": "global", "labels": {}} + RESOURCE = detect_resource(self.PROJECT)._to_dict() DEFAULT_LABELS = {"foo": "spam"} ENTRIES = [ { @@ -360,6 +380,9 @@ def test_log_struct_w_explicit(self): self.assertEqual(api._write_entries_called_with, (ENTRIES, None, None, None)) def test_log_proto_defaults(self): + from google.cloud.logging_v2.handlers._monitored_resources import ( + detect_resource, + ) import json from google.protobuf.json_format import MessageToJson from google.protobuf.struct_pb2 import Struct, Value @@ -369,7 +392,7 @@ def test_log_proto_defaults(self): { "logName": "projects/%s/logs/%s" % (self.PROJECT, self.LOGGER_NAME), "protoPayload": json.loads(MessageToJson(message)), - "resource": {"type": "global", "labels": {}}, + "resource": detect_resource(self.PROJECT)._to_dict(), } ] client = _Client(self.PROJECT) @@ -381,6 +404,9 @@ def test_log_proto_defaults(self): self.assertEqual(api._write_entries_called_with, (ENTRIES, None, None, None)) def test_log_proto_w_default_labels(self): + from google.cloud.logging_v2.handlers._monitored_resources import ( + detect_resource, + ) import json from google.protobuf.json_format import MessageToJson from google.protobuf.struct_pb2 import Struct, Value @@ -391,7 +417,7 @@ def test_log_proto_w_default_labels(self): { "logName": "projects/%s/logs/%s" % (self.PROJECT, self.LOGGER_NAME), "protoPayload": json.loads(MessageToJson(message)), - "resource": {"type": "global", "labels": {}}, + "resource": detect_resource(self.PROJECT)._to_dict(), "labels": DEFAULT_LABELS, } ] @@ -465,11 +491,15 @@ def test_log_proto_w_explicit(self): self.assertEqual(api._write_entries_called_with, (ENTRIES, None, None, None)) def test_log_inference_empty(self): + from google.cloud.logging_v2.handlers._monitored_resources import ( + detect_resource, + ) + DEFAULT_LABELS = {"foo": "spam"} ENTRIES = [ { "logName": "projects/%s/logs/%s" % (self.PROJECT, self.LOGGER_NAME), - "resource": {"type": "global", "labels": {}}, + "resource": detect_resource(self.PROJECT)._to_dict(), "labels": DEFAULT_LABELS, } ] @@ -482,13 +512,16 @@ def test_log_inference_empty(self): self.assertEqual(api._write_entries_called_with, (ENTRIES, None, None, None)) def test_log_inference_text(self): - RESOURCE = {"type": "global", "labels": {}} + from google.cloud.logging_v2.handlers._monitored_resources import ( + detect_resource, + ) + TEXT = "TEXT" ENTRIES = [ { "logName": "projects/%s/logs/%s" % (self.PROJECT, self.LOGGER_NAME), "textPayload": TEXT, - "resource": RESOURCE, + "resource": detect_resource(self.PROJECT)._to_dict(), } ] client = _Client(self.PROJECT) @@ -500,13 +533,16 @@ def test_log_inference_text(self): self.assertEqual(api._write_entries_called_with, (ENTRIES, None, None, None)) def test_log_inference_struct(self): + from google.cloud.logging_v2.handlers._monitored_resources import ( + detect_resource, + ) + STRUCT = {"message": "MESSAGE", "weather": "cloudy"} - RESOURCE = {"type": "global", "labels": {}} ENTRIES = [ { "logName": "projects/%s/logs/%s" % (self.PROJECT, self.LOGGER_NAME), "jsonPayload": STRUCT, - "resource": RESOURCE, + "resource": detect_resource(self.PROJECT)._to_dict(), } ] client = _Client(self.PROJECT) @@ -521,13 +557,16 @@ def test_log_inference_proto(self): import json from google.protobuf.json_format import MessageToJson from google.protobuf.struct_pb2 import Struct, Value + from google.cloud.logging_v2.handlers._monitored_resources import ( + detect_resource, + ) message = Struct(fields={"foo": Value(bool_value=True)}) ENTRIES = [ { "logName": "projects/%s/logs/%s" % (self.PROJECT, self.LOGGER_NAME), "protoPayload": json.loads(MessageToJson(message)), - "resource": {"type": "global", "labels": {}}, + "resource": detect_resource(self.PROJECT)._to_dict(), } ] client = _Client(self.PROJECT) From 6fa17735fe3edb45483ec5e3abd1f53c24ffa881 Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Tue, 27 Jul 2021 11:20:31 -0700 Subject: [PATCH 25/34] feat!: support string-encoded json (#339) --- google/cloud/logging_v2/handlers/handlers.py | 38 +++- tests/unit/handlers/test_handlers.py | 184 +++++++++++++++++++ tests/unit/handlers/test_structured_log.py | 41 +++++ 3 files changed, 256 insertions(+), 7 deletions(-) diff --git a/google/cloud/logging_v2/handlers/handlers.py b/google/cloud/logging_v2/handlers/handlers.py index b554a6fdb..8d14852e1 100644 --- a/google/cloud/logging_v2/handlers/handlers.py +++ b/google/cloud/logging_v2/handlers/handlers.py @@ -192,13 +192,8 @@ def emit(self, record): """ resource = record._resource or self.resource labels = record._labels - message = None - if isinstance(record.msg, collections.abc.Mapping): - # if input is a dictionary, pass as-is for structured logging - message = record.msg - elif record.msg: - # otherwise, format message string based on superclass - message = super(CloudLoggingHandler, self).format(record) + message = _format_and_parse_message(record, self) + if resource.type == _GAE_RESOURCE_TYPE and record._trace is not None: # add GAE-specific label labels = {_GAE_TRACE_ID_LABEL: record._trace, **(labels or {})} @@ -215,6 +210,35 @@ def emit(self, record): ) +def _format_and_parse_message(record, formatter_handler): + """ + Helper function to apply formatting to a LogRecord message, + and attempt to parse encoded JSON into a dictionary object. + + Resulting output will be of type (str | dict | None) + + Args: + record (logging.LogRecord): The record object representing the log + formatter_handler (logging.Handler): The handler used to format the log + """ + # if message is a dictionary, return as-is + if isinstance(record.msg, collections.abc.Mapping): + return record.msg + # format message string based on superclass + message = formatter_handler.format(record) + try: + # attempt to parse encoded json into dictionary + if message[0] == "{": + json_message = json.loads(message) + if isinstance(json_message, collections.abc.Mapping): + message = json_message + except (json.decoder.JSONDecodeError, IndexError): + # log string is not valid json + pass + # if formatted message contains no content, return None + return message if message != "None" else None + + def setup_logging( handler, *, excluded_loggers=EXCLUDED_LOGGER_DEFAULTS, log_level=logging.INFO ): diff --git a/tests/unit/handlers/test_handlers.py b/tests/unit/handlers/test_handlers.py index 74f5c6dd8..d36dc8959 100644 --- a/tests/unit/handlers/test_handlers.py +++ b/tests/unit/handlers/test_handlers.py @@ -311,6 +311,20 @@ def test_emit(self): ), ) + def test_emit_minimal(self): + from google.cloud.logging_v2.logger import _GLOBAL_RESOURCE + + client = _Client(self.PROJECT) + handler = self._make_one( + client, transport=_Transport, resource=_GLOBAL_RESOURCE + ) + record = logging.LogRecord(None, logging.INFO, None, None, None, None, None) + handler.handle(record) + self.assertEqual( + handler.transport.send_called_with, + (record, None, _GLOBAL_RESOURCE, None, None, None, None, None,), + ) + def test_emit_manual_field_override(self): from google.cloud.logging_v2.logger import _GLOBAL_RESOURCE from google.cloud.logging_v2.resource import Resource @@ -401,6 +415,70 @@ def test_emit_with_custom_formatter(self): ), ) + def test_emit_dict(self): + """ + Handler should support logging dictionaries + """ + from google.cloud.logging_v2.logger import _GLOBAL_RESOURCE + + client = _Client(self.PROJECT) + handler = self._make_one( + client, transport=_Transport, resource=_GLOBAL_RESOURCE, + ) + message = {"x": "test"} + logname = "logname" + expected_label = {"python_logger": logname} + record = logging.LogRecord( + logname, logging.INFO, None, None, message, None, None + ) + handler.handle(record) + + self.assertEqual( + handler.transport.send_called_with, + ( + record, + message, + _GLOBAL_RESOURCE, + expected_label, + None, + None, + None, + None, + ), + ) + + def test_emit_with_encoded_json(self): + """ + Handler should parse json encoded as a string + """ + from google.cloud.logging_v2.logger import _GLOBAL_RESOURCE + + client = _Client(self.PROJECT) + handler = self._make_one( + client, transport=_Transport, resource=_GLOBAL_RESOURCE, + ) + logFormatter = logging.Formatter(fmt='{ "x" : "%(name)s" }') + handler.setFormatter(logFormatter) + logname = "logname" + expected_result = {"x": logname} + expected_label = {"python_logger": logname} + record = logging.LogRecord(logname, logging.INFO, None, None, None, None, None) + handler.handle(record) + + self.assertEqual( + handler.transport.send_called_with, + ( + record, + expected_result, + _GLOBAL_RESOURCE, + expected_label, + None, + None, + None, + None, + ), + ) + def test_format_with_arguments(self): """ Handler should support format string arguments @@ -425,6 +503,112 @@ def test_format_with_arguments(self): ) +class TestFormatAndParseMessage(unittest.TestCase): + def test_none(self): + """ + None messages with no special formatting should return + None after formatting + """ + from google.cloud.logging_v2.handlers.handlers import _format_and_parse_message + + message = None + record = logging.LogRecord(None, None, None, None, message, None, None) + handler = logging.StreamHandler() + result = _format_and_parse_message(record, handler) + self.assertEqual(result, None) + + def test_none_formatted(self): + """ + None messages with formatting rules should return formatted string + """ + from google.cloud.logging_v2.handlers.handlers import _format_and_parse_message + + message = None + record = logging.LogRecord("logname", None, None, None, message, None, None) + handler = logging.StreamHandler() + formatter = logging.Formatter("name: %(name)s") + handler.setFormatter(formatter) + result = _format_and_parse_message(record, handler) + self.assertEqual(result, "name: logname") + + def test_unformatted_string(self): + """ + Unformated strings should be returned unchanged + """ + from google.cloud.logging_v2.handlers.handlers import _format_and_parse_message + + message = '"test"' + record = logging.LogRecord("logname", None, None, None, message, None, None) + handler = logging.StreamHandler() + result = _format_and_parse_message(record, handler) + self.assertEqual(result, message) + + def test_empty_string(self): + """ + Empty strings should be returned unchanged + """ + from google.cloud.logging_v2.handlers.handlers import _format_and_parse_message + + message = "" + record = logging.LogRecord("logname", None, None, None, message, None, None) + handler = logging.StreamHandler() + result = _format_and_parse_message(record, handler) + self.assertEqual(result, message) + + def test_string_formatted_with_args(self): + """ + string messages should properly apply formatting and arguments + """ + from google.cloud.logging_v2.handlers.handlers import _format_and_parse_message + + message = "argument: %s" + arg = "test" + record = logging.LogRecord("logname", None, None, None, message, arg, None) + handler = logging.StreamHandler() + formatter = logging.Formatter("name: %(name)s :: message: %(message)s") + handler.setFormatter(formatter) + result = _format_and_parse_message(record, handler) + self.assertEqual(result, "name: logname :: message: argument: test") + + def test_dict(self): + """ + dict messages should be unchanged + """ + from google.cloud.logging_v2.handlers.handlers import _format_and_parse_message + + message = {"a": "b"} + record = logging.LogRecord("logname", None, None, None, message, None, None) + handler = logging.StreamHandler() + formatter = logging.Formatter("name: %(name)s") + handler.setFormatter(formatter) + result = _format_and_parse_message(record, handler) + self.assertEqual(result, message) + + def test_string_encoded_dict(self): + """ + dicts should be extracted from string messages + """ + from google.cloud.logging_v2.handlers.handlers import _format_and_parse_message + + message = '{ "x": { "y" : "z" } }' + record = logging.LogRecord("logname", None, None, None, message, None, None) + handler = logging.StreamHandler() + result = _format_and_parse_message(record, handler) + self.assertEqual(result, {"x": {"y": "z"}}) + + def test_broken_encoded_dict(self): + """ + unparseable encoded dicts should be kept as strings + """ + from google.cloud.logging_v2.handlers.handlers import _format_and_parse_message + + message = '{ "x": { "y" : ' + record = logging.LogRecord("logname", None, None, None, message, None, None) + handler = logging.StreamHandler() + result = _format_and_parse_message(record, handler) + self.assertEqual(result, message) + + class TestSetupLogging(unittest.TestCase): def _call_fut(self, handler, excludes=None): from google.cloud.logging.handlers import setup_logging diff --git a/tests/unit/handlers/test_structured_log.py b/tests/unit/handlers/test_structured_log.py index c87f7f23e..d9dfa2512 100644 --- a/tests/unit/handlers/test_structured_log.py +++ b/tests/unit/handlers/test_structured_log.py @@ -92,13 +92,16 @@ def test_format_minimal(self): record = logging.LogRecord(None, logging.INFO, None, None, None, None, None,) record.created = None expected_payload = { + "severity": "INFO", "logging.googleapis.com/trace": "", + "logging.googleapis.com/spanId": "", "logging.googleapis.com/sourceLocation": {}, "httpRequest": {}, "logging.googleapis.com/labels": {}, } handler.filter(record) result = json.loads(handler.format(record)) + self.assertEqual(set(expected_payload.keys()), set(result.keys())) for (key, value) in expected_payload.items(): self.assertEqual( value, result[key], f"expected_payload[{key}] != result[{key}]" @@ -170,6 +173,44 @@ def test_format_with_custom_formatter(self): handler.filter(record) result = handler.format(record) self.assertIn(expected_result, result) + self.assertIn("message", result) + + def test_dict(self): + """ + Handler should parse json encoded as a string + """ + import logging + + handler = self._make_one() + message = {"x": "test"} + expected_result = '"x": "test"' + record = logging.LogRecord( + "logname", logging.INFO, None, None, message, None, None, + ) + record.created = None + handler.filter(record) + result = handler.format(record) + self.assertIn(expected_result, result) + self.assertNotIn("message", result) + + def test_encoded_json(self): + """ + Handler should parse json encoded as a string + """ + import logging + + handler = self._make_one() + logFormatter = logging.Formatter(fmt='{ "name" : "%(name)s" }') + handler.setFormatter(logFormatter) + expected_result = '"name": "logname"' + record = logging.LogRecord( + "logname", logging.INFO, None, None, None, None, None, + ) + record.created = None + handler.filter(record) + result = handler.format(record) + self.assertIn(expected_result, result) + self.assertNotIn("message", result) def test_format_with_arguments(self): """ From e1506fa9030776353878048ce562c53bf6ccf7bf Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Tue, 5 Oct 2021 17:04:19 -0700 Subject: [PATCH 26/34] fix!: api consistency between HTTP and Gapic layers (#375) --- google/cloud/logging_v2/_gapic.py | 109 ++++++---- google/cloud/logging_v2/_http.py | 79 +++++--- google/cloud/logging_v2/client.py | 71 ++++--- google/cloud/logging_v2/logger.py | 25 ++- tests/system/test_system.py | 319 ++++++++++++++++++------------ tests/unit/test__gapic.py | 116 ++++++++++- tests/unit/test__http.py | 136 +++++++------ tests/unit/test_client.py | 72 +++---- tests/unit/test_logger.py | 107 ++++++++-- 9 files changed, 691 insertions(+), 343 deletions(-) diff --git a/google/cloud/logging_v2/_gapic.py b/google/cloud/logging_v2/_gapic.py index 7a6d70650..3661d3d09 100644 --- a/google/cloud/logging_v2/_gapic.py +++ b/google/cloud/logging_v2/_gapic.py @@ -49,10 +49,11 @@ def list_entries( *, filter_=None, order_by=None, + max_results=None, page_size=None, page_token=None, ): - """Return a page of log entry resources. + """Return a generator of log entry resources. Args: resource_names (Sequence[str]): Names of one or more parent resources @@ -69,14 +70,16 @@ def list_entries( https://cloud.google.com/logging/docs/view/advanced_filters order_by (str) One of :data:`~logging_v2.ASCENDING` or :data:`~logging_v2.DESCENDING`. - page_size (int): maximum number of entries to return, If not passed, - defaults to a value set by the API. - page_token (str): opaque marker for the next "page" of entries. If not - passed, the API will return the first page of - entries. - + max_results (Optional[int]): + Optional. The maximum number of entries to return. + Non-positive values are treated as 0. If None, uses API defaults. + page_size (int): number of entries to fetch in each API call. Although + requests are paged internally, logs are returned by the generator + one at a time. If not passed, defaults to a value set by the API. + page_token (str): opaque marker for the starting "page" of entries. If not + passed, the API will return the first page of entries. Returns: - Iterator[~logging_v2.LogEntry] + Generator[~logging_v2.LogEntry] """ # full resource names are expected by the API resource_names = resource_names @@ -89,19 +92,27 @@ def list_entries( ) response = self._gapic_api.list_log_entries(request=request) - page_iter = iter(response) + log_iter = iter(response) # We attach a mutable loggers dictionary so that as Logger # objects are created by entry_from_resource, they can be # re-used by other log entries from the same logger. loggers = {} - def log_entries_pager(page_iter): - for page in page_iter: - log_entry_dict = _parse_log_entry(LogEntryPB.pb(page)) + if max_results is not None and max_results < 0: + raise ValueError("max_results must be positive") + + # create generator + def log_entries_pager(log_iter): + i = 0 + for entry in log_iter: + if max_results is not None and i >= max_results: + break + log_entry_dict = _parse_log_entry(LogEntryPB.pb(entry)) yield entry_from_resource(log_entry_dict, self._client, loggers=loggers) + i += 1 - return log_entries_pager(page_iter) + return log_entries_pager(log_iter) def write_entries( self, @@ -175,7 +186,7 @@ def __init__(self, gapic_api, client): self._gapic_api = gapic_api self._client = client - def list_sinks(self, parent, *, page_size=0, page_token=None): + def list_sinks(self, parent, *, max_results=None, page_size=None, page_token=None): """List sinks for the parent resource. Args: @@ -187,27 +198,37 @@ def list_sinks(self, parent, *, page_size=0, page_token=None): "organizations/[ORGANIZATION_ID]" "billingAccounts/[BILLING_ACCOUNT_ID]" "folders/[FOLDER_ID]". - page_size (Optional[int]): Maximum number of sinks to return, If not passed, - defaults to a value set by the API. - page_token (Optional[str]): Opaque marker for the next "page" of sinks. If not - passed, the API will return the first page of - sinks. + max_results (Optional[int]): + Optional. The maximum number of entries to return. + Non-positive values are treated as 0. If None, uses API defaults. + page_size (int): number of entries to fetch in each API call. Although + requests are paged internally, logs are returned by the generator + one at a time. If not passed, defaults to a value set by the API. + page_token (str): opaque marker for the starting "page" of entries. If not + passed, the API will return the first page of entries. Returns: - Iterator[~logging_v2.Sink] + Generator[~logging_v2.Sink] """ request = ListSinksRequest( parent=parent, page_size=page_size, page_token=page_token ) response = self._gapic_api.list_sinks(request) - page_iter = iter(response) + sink_iter = iter(response) + + if max_results is not None and max_results < 0: + raise ValueError("max_results must be positive") - def sinks_pager(page_iter): - for page in page_iter: + def sinks_pager(sink_iter): + i = 0 + for entry in sink_iter: + if max_results is not None and i >= max_results: + break # Convert the GAPIC sink type into the handwritten `Sink` type - yield Sink.from_api_repr(LogSink.to_dict(page), client=self._client) + yield Sink.from_api_repr(LogSink.to_dict(entry), client=self._client) + i += 1 - return sinks_pager(page_iter) + return sinks_pager(sink_iter) def sink_create( self, parent, sink_name, filter_, destination, *, unique_writer_identity=False @@ -347,33 +368,47 @@ def __init__(self, gapic_api, client): self._gapic_api = gapic_api self._client = client - def list_metrics(self, project, *, page_size=0, page_token=None): + def list_metrics( + self, project, *, max_results=None, page_size=None, page_token=None + ): """List metrics for the project associated with this client. Args: project (str): ID of the project whose metrics are to be listed. - page_size (int): Maximum number of metrics to return, If not passed, - defaults to a value set by the API. - page_token (str): Opaque marker for the next "page" of metrics. If not - passed, the API will return the first page of - sinks. + max_results (Optional[int]): + Optional. The maximum number of entries to return. + Non-positive values are treated as 0. If None, uses API defaults. + page_size (int): number of entries to fetch in each API call. Although + requests are paged internally, logs are returned by the generator + one at a time. If not passed, defaults to a value set by the API. + page_token (str): opaque marker for the starting "page" of entries. If not + passed, the API will return the first page of entries. Returns: - Iterable[logging_v2.Metric]: Iterable of metrics. + Generator[logging_v2.Metric] """ path = f"projects/{project}" request = ListLogMetricsRequest( parent=path, page_size=page_size, page_token=page_token, ) response = self._gapic_api.list_log_metrics(request=request) - page_iter = iter(response) + metric_iter = iter(response) + + if max_results is not None and max_results < 0: + raise ValueError("max_results must be positive") - def metrics_pager(page_iter): - for page in page_iter: + def metrics_pager(metric_iter): + i = 0 + for entry in metric_iter: + if max_results is not None and i >= max_results: + break # Convert GAPIC metrics type into handwritten `Metric` type - yield Metric.from_api_repr(LogMetric.to_dict(page), client=self._client) + yield Metric.from_api_repr( + LogMetric.to_dict(entry), client=self._client + ) + i += 1 - return metrics_pager(page_iter) + return metrics_pager(metric_iter) def metric_create(self, project, metric_name, filter_, description): """Create a metric resource. diff --git a/google/cloud/logging_v2/_http.py b/google/cloud/logging_v2/_http.py index 68bde346a..21fb38606 100644 --- a/google/cloud/logging_v2/_http.py +++ b/google/cloud/logging_v2/_http.py @@ -74,6 +74,7 @@ def list_entries( *, filter_=None, order_by=None, + max_results=None, page_size=None, page_token=None, ): @@ -94,14 +95,16 @@ def list_entries( https://cloud.google.com/logging/docs/view/advanced_filters order_by (str) One of :data:`~logging_v2.ASCENDING` or :data:`~logging_v2.DESCENDING`. - page_size (int): maximum number of entries to return, If not passed, - defaults to a value set by the API. - page_token (str): opaque marker for the next "page" of entries. If not - passed, the API will return the first page of - entries. - + max_results (Optional[int]): + Optional. The maximum number of entries to return. + Non-positive values are treated as 0. If None, uses API defaults. + page_size (int): number of entries to fetch in each API call. Although + requests are paged internally, logs are returned by the generator + one at a time. If not passed, defaults to a value set by the API. + page_token (str): opaque marker for the starting "page" of entries. If not + passed, the API will return the first page of entries. Returns: - Iterator[~logging_v2.LogEntry] + Generator[~logging_v2.LogEntry] """ extra_params = {"resourceNames": resource_names} @@ -131,7 +134,8 @@ def list_entries( ) # This method uses POST to make a read-only request. iterator._HTTP_METHOD = "POST" - return iterator + + return _entries_pager(iterator, max_results) def write_entries( self, @@ -219,7 +223,7 @@ def __init__(self, client): self._client = client self.api_request = client._connection.api_request - def list_sinks(self, parent, *, page_size=None, page_token=None): + def list_sinks(self, parent, *, max_results=None, page_size=None, page_token=None): """List sinks for the parent resource. See @@ -234,14 +238,17 @@ def list_sinks(self, parent, *, page_size=None, page_token=None): "organizations/[ORGANIZATION_ID]" "billingAccounts/[BILLING_ACCOUNT_ID]" "folders/[FOLDER_ID]". - page_size (Optional[int]): Maximum number of sinks to return, If not passed, - defaults to a value set by the API. - page_token (Optional[str]): Opaque marker for the next "page" of sinks. If not - passed, the API will return the first page of - sinks. + max_results (Optional[int]): + Optional. The maximum number of entries to return. + Non-positive values are treated as 0. If None, uses API defaults. + page_size (int): number of entries to fetch in each API call. Although + requests are paged internally, logs are returned by the generator + one at a time. If not passed, defaults to a value set by the API. + page_token (str): opaque marker for the starting "page" of entries. If not + passed, the API will return the first page of entries. Returns: - Iterator[~logging_v2.Sink] + Generator[~logging_v2.Sink] """ extra_params = {} @@ -249,7 +256,7 @@ def list_sinks(self, parent, *, page_size=None, page_token=None): extra_params["pageSize"] = page_size path = f"/{parent}/sinks" - return page_iterator.HTTPIterator( + iterator = page_iterator.HTTPIterator( client=self._client, api_request=self._client._connection.api_request, path=path, @@ -259,6 +266,8 @@ def list_sinks(self, parent, *, page_size=None, page_token=None): extra_params=extra_params, ) + return _entries_pager(iterator, max_results) + def sink_create( self, parent, sink_name, filter_, destination, *, unique_writer_identity=False ): @@ -373,24 +382,27 @@ def __init__(self, client): self._client = client self.api_request = client._connection.api_request - def list_metrics(self, project, *, page_size=None, page_token=None): + def list_metrics( + self, project, *, max_results=None, page_size=None, page_token=None + ): """List metrics for the project associated with this client. See https://cloud.google.com/logging/docs/reference/v2/rest/v2/projects.metrics/list Args: - page_size (Optional[int]): The maximum number of sinks in each - page of results from this request. Non-positive values are ignored. Defaults to a - sensible value set by the API. - page_token (Optional[str]): If present, return the next batch of sinks, using the - value, which must correspond to the ``nextPageToken`` value - returned in the previous response. Deprecated: use the ``pages`` - property ofthe returned iterator instead of manually passing the - token. + max_results (Optional[int]): + Optional. The maximum number of entries to return. + Non-positive values are treated as 0. If None, uses API defaults. + page_size (int): number of entries to fetch in each API call. Although + requests are paged internally, logs are returned by the generator + one at a time. If not passed, defaults to a value set by the API. + page_token (str): opaque marker for the starting "page" of entries. If not + passed, the API will return the first page of entries. Returns: - Iterator[google.cloud.logging_v2.metric.Metric] + Generator[logging_v2.Metric] + """ extra_params = {} @@ -398,7 +410,7 @@ def list_metrics(self, project, *, page_size=None, page_token=None): extra_params["pageSize"] = page_size path = f"/projects/{project}/metrics" - return page_iterator.HTTPIterator( + iterator = page_iterator.HTTPIterator( client=self._client, api_request=self._client._connection.api_request, path=path, @@ -407,6 +419,7 @@ def list_metrics(self, project, *, page_size=None, page_token=None): page_token=page_token, extra_params=extra_params, ) + return _entries_pager(iterator, max_results) def metric_create(self, project, metric_name, filter_, description): """Create a metric resource. @@ -469,6 +482,18 @@ def metric_delete(self, project, metric_name): self.api_request(method="DELETE", path=target) +def _entries_pager(page_iter, max_results=None): + if max_results is not None and max_results < 0: + raise ValueError("max_results must be positive") + + i = 0 + for page in page_iter: + if max_results is not None and i >= max_results: + break + yield page + i += 1 + + def _item_to_entry(iterator, resource, loggers): """Convert a log entry resource to the native object. diff --git a/google/cloud/logging_v2/client.py b/google/cloud/logging_v2/client.py index 5792ff6f9..7098c8baa 100644 --- a/google/cloud/logging_v2/client.py +++ b/google/cloud/logging_v2/client.py @@ -204,10 +204,11 @@ def list_entries( resource_names=None, filter_=None, order_by=None, + max_results=None, page_size=None, page_token=None, ): - """Return a page of log entry resources. + """Return a generator of log entry resources. Args: resource_names (Sequence[str]): Names of one or more parent resources @@ -226,14 +227,17 @@ def list_entries( https://cloud.google.com/logging/docs/view/advanced_filters order_by (str) One of :data:`~logging_v2.ASCENDING` or :data:`~logging_v2.DESCENDING`. - page_size (int): maximum number of entries to return, If not passed, - defaults to a value set by the API. - page_token (str): opaque marker for the next "page" of entries. If not - passed, the API will return the first page of - entries. + max_results (Optional[int]): + Optional. The maximum number of entries to return. + Non-positive values are treated as 0. If None, uses API defaults. + page_size (int): number of entries to fetch in each API call. Although + requests are paged internally, logs are returned by the generator + one at a time. If not passed, defaults to a value set by the API. + page_token (str): opaque marker for the starting "page" of entries. If not + passed, the API will return the first page of entries. Returns: - Iterator[~logging_v2.LogEntry] + Generator[~logging_v2.LogEntry] """ if resource_names is None: resource_names = [f"projects/{self.project}"] @@ -243,6 +247,7 @@ def list_entries( resource_names=resource_names, filter_=filter_, order_by=order_by, + max_results=max_results, page_size=page_size, page_token=page_token, ) @@ -266,7 +271,9 @@ def sink(self, name, *, filter_=None, destination=None): """ return Sink(name, filter_=filter_, destination=destination, client=self) - def list_sinks(self, *, parent=None, page_size=None, page_token=None): + def list_sinks( + self, *, parent=None, max_results=None, page_size=None, page_token=None + ): """List sinks for the a parent resource. See @@ -283,22 +290,25 @@ def list_sinks(self, *, parent=None, page_size=None, page_token=None): "folders/[FOLDER_ID]". If not passed, defaults to the project bound to the API's client. - page_size (Optional[int]): The maximum number of sinks in each - page of results from this request. Non-positive values are ignored. Defaults to a - sensible value set by the API. - page_token (Optional[str]): If present, return the next batch of sinks, using the - value, which must correspond to the ``nextPageToken`` value - returned in the previous response. Deprecated: use the ``pages`` - property ofthe returned iterator instead of manually passing the - token. + max_results (Optional[int]): + Optional. The maximum number of entries to return. + Non-positive values are treated as 0. If None, uses API defaults. + page_size (int): number of entries to fetch in each API call. Although + requests are paged internally, logs are returned by the generator + one at a time. If not passed, defaults to a value set by the API. + page_token (str): opaque marker for the starting "page" of entries. If not + passed, the API will return the first page of entries. Returns: - Iterator[~logging_v2.sink.Sink] + Generator[~logging_v2.Sink] """ if parent is None: parent = f"projects/{self.project}" return self.sinks_api.list_sinks( - parent=parent, page_size=page_size, page_token=page_token + parent=parent, + max_results=max_results, + page_size=page_size, + page_token=page_token, ) def metric(self, name, *, filter_=None, description=""): @@ -319,27 +329,30 @@ def metric(self, name, *, filter_=None, description=""): """ return Metric(name, filter_=filter_, client=self, description=description) - def list_metrics(self, *, page_size=None, page_token=None): + def list_metrics(self, *, max_results=None, page_size=None, page_token=None): """List metrics for the project associated with this client. See https://cloud.google.com/logging/docs/reference/v2/rest/v2/projects.metrics/list Args: - page_size (Optional[int]): The maximum number of sinks in each - page of results from this request. Non-positive values are ignored. Defaults to a - sensible value set by the API. - page_token (Optional[str]): If present, return the next batch of sinks, using the - value, which must correspond to the ``nextPageToken`` value - returned in the previous response. Deprecated: use the ``pages`` - property ofthe returned iterator instead of manually passing the - token. + max_results (Optional[int]): + Optional. The maximum number of entries to return. + Non-positive values are treated as 0. If None, uses API defaults. + page_size (int): number of entries to fetch in each API call. Although + requests are paged internally, logs are returned by the generator + one at a time. If not passed, defaults to a value set by the API. + page_token (str): opaque marker for the starting "page" of entries. If not + passed, the API will return the first page of entries. Returns: - Iterator[~logging_v2.metric.Metric] + Generator[logging_v2.Metric] """ return self.metrics_api.list_metrics( - self.project, page_size=page_size, page_token=page_token + self.project, + max_results=max_results, + page_size=page_size, + page_token=page_token, ) def get_default_handler(self, **kw): diff --git a/google/cloud/logging_v2/logger.py b/google/cloud/logging_v2/logger.py index 01221fc7b..404871bef 100644 --- a/google/cloud/logging_v2/logger.py +++ b/google/cloud/logging_v2/logger.py @@ -264,10 +264,11 @@ def list_entries( resource_names=None, filter_=None, order_by=None, + max_results=None, page_size=None, page_token=None, ): - """Return a page of log entries. + """Return a generator of log entry resources. See https://cloud.google.com/logging/docs/reference/v2/rest/v2/entries/list @@ -289,19 +290,16 @@ def list_entries( By default, a 24 hour filter is applied. order_by (Optional[str]): One of :data:`~logging_v2.ASCENDING` or :data:`~logging_v2.DESCENDING`. - page_size (Optional[int]): - Optional. The maximum number of entries in each page of results - from this request. Non-positive values are ignored. Defaults - to a sensible value set by the API. - page_token (Optional[str]): - Optional. If present, return the next batch of entries, using - the value, which must correspond to the ``nextPageToken`` value - returned in the previous response. Deprecated: use the ``pages`` - property of the returned iterator instead of manually passing - the token. - + max_results (Optional[int]): + Optional. The maximum number of entries to return. + Non-positive values are treated as 0. If None, uses API defaults. + page_size (int): number of entries to fetch in each API call. Although + requests are paged internally, logs are returned by the generator + one at a time. If not passed, defaults to a value set by the API. + page_token (str): opaque marker for the starting "page" of entries. If not + passed, the API will return the first page of entries. Returns: - Iterator[~logging_v2.entries.LogEntry] + Generator[~logging_v2.LogEntry] """ if resource_names is None: @@ -317,6 +315,7 @@ def list_entries( resource_names=resource_names, filter_=filter_, order_by=order_by, + max_results=max_results, page_size=page_size, page_token=page_token, ) diff --git a/tests/system/test_system.py b/tests/system/test_system.py index 836339f0b..d7e1e57d2 100644 --- a/tests/system/test_system.py +++ b/tests/system/test_system.py @@ -36,6 +36,7 @@ from google.cloud.logging_v2.handlers.transports import SyncTransport from google.cloud.logging_v2 import client from google.cloud.logging_v2.resource import Resource +from google.cloud.logging_v2.entries import TextEntry from google.protobuf.struct_pb2 import Struct, Value, ListValue, NullValue @@ -97,11 +98,13 @@ class Config(object): """ CLIENT = None + HTTP_CLIENT = None use_mtls = os.environ.get("GOOGLE_API_USE_MTLS_ENDPOINT", "never") def setUpModule(): Config.CLIENT = client.Client() + Config.HTTP_CLIENT = client.Client(_use_grpc=False) # Skip the test cases using bigquery, storage and pubsub clients for mTLS testing. @@ -186,34 +189,34 @@ def test_list_entry_with_auditlog(self): audit_dict = { "@type": type_url, "methodName": "test", - "requestMetadata": {"callerIp": "::1", "callerSuppliedUserAgent": "test"}, "resourceName": "test", "serviceName": "test", - "status": {"code": 0}, } audit_struct = self._dict_to_struct(audit_dict) - logger = Config.CLIENT.logger(f"audit-proto-{uuid.uuid1()}") - logger.log_proto(audit_struct) - - # retrieve log - retry = RetryErrors((TooManyRequests, StopIteration), max_tries=8) - protobuf_entry = retry(lambda: next(logger.list_entries()))() - - self.assertIsInstance(protobuf_entry, entries.ProtobufEntry) - self.assertIsNone(protobuf_entry.payload_pb) - self.assertIsInstance(protobuf_entry.payload_json, dict) - self.assertEqual(protobuf_entry.payload_json["@type"], type_url) - self.assertEqual( - protobuf_entry.payload_json["methodName"], audit_dict["methodName"] - ) - self.assertEqual( - protobuf_entry.to_api_repr()["protoPayload"]["@type"], type_url - ) - self.assertEqual( - protobuf_entry.to_api_repr()["protoPayload"]["methodName"], - audit_dict["methodName"], - ) + gapic_logger = Config.CLIENT.logger(f"audit-proto-{uuid.uuid1()}") + http_logger = Config.HTTP_CLIENT.logger(f"audit-proto-{uuid.uuid1()}-http") + for logger in [gapic_logger, http_logger]: + logger.log_proto(audit_struct) + + # retrieve log + retry = RetryErrors((TooManyRequests, StopIteration), max_tries=8) + protobuf_entry = retry(lambda: next(logger.list_entries()))() + + self.assertIsInstance(protobuf_entry, entries.ProtobufEntry) + self.assertIsNone(protobuf_entry.payload_pb) + self.assertIsInstance(protobuf_entry.payload_json, dict) + self.assertEqual(protobuf_entry.payload_json["@type"], type_url) + self.assertEqual( + protobuf_entry.payload_json["methodName"], audit_dict["methodName"] + ) + self.assertEqual( + protobuf_entry.to_api_repr()["protoPayload"]["@type"], type_url + ) + self.assertEqual( + protobuf_entry.to_api_repr()["protoPayload"]["methodName"], + audit_dict["methodName"], + ) def test_list_entry_with_requestlog(self): """ @@ -244,20 +247,22 @@ def test_list_entry_with_requestlog(self): } req_struct = self._dict_to_struct(req_dict) - logger = Config.CLIENT.logger(f"req-proto-{uuid.uuid1()}") - logger.log_proto(req_struct) - - # retrieve log - retry = RetryErrors((TooManyRequests, StopIteration), max_tries=8) - protobuf_entry = retry(lambda: next(logger.list_entries()))() - - self.assertIsInstance(protobuf_entry, entries.ProtobufEntry) - self.assertIsNone(protobuf_entry.payload_pb) - self.assertIsInstance(protobuf_entry.payload_json, dict) - self.assertEqual(protobuf_entry.payload_json["@type"], type_url) - self.assertEqual( - protobuf_entry.to_api_repr()["protoPayload"]["@type"], type_url - ) + gapic_logger = Config.CLIENT.logger(f"req-proto-{uuid.uuid1()}") + http_logger = Config.CLIENT.logger(f"req-proto-{uuid.uuid1()}-http") + for logger in [gapic_logger, http_logger]: + logger.log_proto(req_struct) + + # retrieve log + retry = RetryErrors((TooManyRequests, StopIteration), max_tries=8) + protobuf_entry = retry(lambda: next(logger.list_entries()))() + + self.assertIsInstance(protobuf_entry, entries.ProtobufEntry) + self.assertIsNone(protobuf_entry.payload_pb) + self.assertIsInstance(protobuf_entry.payload_json, dict) + self.assertEqual(protobuf_entry.payload_json["@type"], type_url) + self.assertEqual( + protobuf_entry.to_api_repr()["protoPayload"]["@type"], type_url + ) def test_list_entry_with_auditdata(self): """ @@ -294,46 +299,51 @@ def test_list_entry_with_auditdata(self): def test_log_text(self): TEXT_PAYLOAD = "System test: test_log_text" - logger = Config.CLIENT.logger(self._logger_name("log_text")) - self.to_delete.append(logger) - logger.log_text(TEXT_PAYLOAD) - entries = _list_entries(logger) - self.assertEqual(len(entries), 1) - self.assertEqual(entries[0].payload, TEXT_PAYLOAD) + gapic_logger = Config.CLIENT.logger(self._logger_name("log_text")) + http_logger = Config.HTTP_CLIENT.logger(self._logger_name("log_text_http")) + for logger in [gapic_logger, http_logger]: + self.to_delete.append(logger) + logger.log_text(TEXT_PAYLOAD) + entries = _list_entries(logger) + self.assertEqual(len(entries), 1) + self.assertEqual(entries[0].payload, TEXT_PAYLOAD) + self.assertTrue(isinstance(entries[0], TextEntry)) def test_log_text_with_timestamp(self): text_payload = "System test: test_log_text_with_timestamp" - logger = Config.CLIENT.logger(self._logger_name("log_text_ts")) + gapic_logger = Config.CLIENT.logger(self._logger_name("log_text_ts")) + http_logger = Config.HTTP_CLIENT.logger(self._logger_name("log_text_ts_http")) now = datetime.utcnow() - - self.to_delete.append(logger) - - logger.log_text(text_payload, timestamp=now) - entries = _list_entries(logger) - self.assertEqual(len(entries), 1) - self.assertEqual(entries[0].payload, text_payload) - self.assertEqual(entries[0].timestamp, now.replace(tzinfo=UTC)) - self.assertIsInstance(entries[0].received_timestamp, datetime) + for logger in [gapic_logger, http_logger]: + self.to_delete.append(logger) + logger.log_text(text_payload, timestamp=now) + entries = _list_entries(logger) + self.assertEqual(len(entries), 1) + self.assertEqual(entries[0].payload, text_payload) + self.assertEqual(entries[0].timestamp, now.replace(tzinfo=UTC)) + self.assertIsInstance(entries[0].received_timestamp, datetime) def test_log_text_with_resource(self): text_payload = "System test: test_log_text_with_timestamp" - logger = Config.CLIENT.logger(self._logger_name("log_text_res")) + gapic_logger = Config.CLIENT.logger(self._logger_name("log_text_res")) + http_logger = Config.HTTP_CLIENT.logger(self._logger_name("log_text_res_http")) now = datetime.utcnow() - resource = Resource( - type="gae_app", - labels={"module_id": "default", "version_id": "test", "zone": ""}, - ) + for logger in [gapic_logger, http_logger]: + resource = Resource( + type="gae_app", + labels={"module_id": "default", "version_id": "test", "zone": ""}, + ) - self.to_delete.append(logger) + self.to_delete.append(logger) - logger.log_text(text_payload, timestamp=now, resource=resource) - entries = _list_entries(logger) - self.assertEqual(len(entries), 1) - self.assertEqual(entries[0].payload, text_payload) - # project_id is output only so we don't want it in assertion - del entries[0].resource.labels["project_id"] - self.assertEqual(entries[0].resource, resource) + logger.log_text(text_payload, timestamp=now, resource=resource) + entries = _list_entries(logger) + self.assertEqual(len(entries), 1) + self.assertEqual(entries[0].payload, text_payload) + # project_id is output only so we don't want it in assertion + del entries[0].resource.labels["project_id"] + self.assertEqual(entries[0].resource, resource) def test_log_text_w_metadata(self): TEXT_PAYLOAD = "System test: test_log_text" @@ -343,35 +353,42 @@ def test_log_text_w_metadata(self): URI = "https://api.example.com/endpoint" STATUS = 500 REQUEST = {"requestMethod": METHOD, "requestUrl": URI, "status": STATUS} - logger = Config.CLIENT.logger(self._logger_name("log_text_md")) - self.to_delete.append(logger) - - logger.log_text( - TEXT_PAYLOAD, insert_id=INSERT_ID, severity=SEVERITY, http_request=REQUEST - ) - entries = _list_entries(logger) + gapic_logger = Config.CLIENT.logger(self._logger_name("log_text_md")) + http_logger = Config.HTTP_CLIENT.logger(self._logger_name("log_text_md_http")) + for logger in [gapic_logger, http_logger]: + self.to_delete.append(logger) + + logger.log_text( + TEXT_PAYLOAD, + insert_id=INSERT_ID, + severity=SEVERITY, + http_request=REQUEST, + ) + entries = _list_entries(logger) - self.assertEqual(len(entries), 1) + self.assertEqual(len(entries), 1) - entry = entries[0] - self.assertEqual(entry.payload, TEXT_PAYLOAD) - self.assertEqual(entry.insert_id, INSERT_ID) - self.assertEqual(entry.severity, SEVERITY) + entry = entries[0] + self.assertEqual(entry.payload, TEXT_PAYLOAD) + self.assertEqual(entry.insert_id, INSERT_ID) + self.assertEqual(entry.severity, SEVERITY) - request = entry.http_request - self.assertEqual(request["requestMethod"], METHOD) - self.assertEqual(request["requestUrl"], URI) - self.assertEqual(request["status"], STATUS) + request = entry.http_request + self.assertEqual(request["requestMethod"], METHOD) + self.assertEqual(request["requestUrl"], URI) + self.assertEqual(request["status"], STATUS) def test_log_struct(self): - logger = Config.CLIENT.logger(self._logger_name("log_struct")) - self.to_delete.append(logger) + gapic_logger = Config.CLIENT.logger(self._logger_name("log_struct")) + http_logger = Config.HTTP_CLIENT.logger(self._logger_name("log_struct_http")) + for logger in [gapic_logger, http_logger]: + self.to_delete.append(logger) - logger.log_struct(self.JSON_PAYLOAD) - entries = _list_entries(logger) + logger.log_struct(self.JSON_PAYLOAD) + entries = _list_entries(logger) - self.assertEqual(len(entries), 1) - self.assertEqual(entries[0].payload, self.JSON_PAYLOAD) + self.assertEqual(len(entries), 1) + self.assertEqual(entries[0].payload, self.JSON_PAYLOAD) def test_log_struct_w_metadata(self): INSERT_ID = "INSERTID" @@ -380,54 +397,63 @@ def test_log_struct_w_metadata(self): URI = "https://api.example.com/endpoint" STATUS = 500 REQUEST = {"requestMethod": METHOD, "requestUrl": URI, "status": STATUS} - logger = Config.CLIENT.logger(self._logger_name("log_struct_md")) - self.to_delete.append(logger) - - logger.log_struct( - self.JSON_PAYLOAD, - insert_id=INSERT_ID, - severity=SEVERITY, - http_request=REQUEST, - ) - entries = _list_entries(logger) + gapic_logger = Config.CLIENT.logger(self._logger_name("log_struct_md")) + http_logger = Config.HTTP_CLIENT.logger(self._logger_name("log_struct_md_http")) + for logger in [gapic_logger, http_logger]: + self.to_delete.append(logger) + + logger.log_struct( + self.JSON_PAYLOAD, + insert_id=INSERT_ID, + severity=SEVERITY, + http_request=REQUEST, + ) + entries = _list_entries(logger) - self.assertEqual(len(entries), 1) - self.assertEqual(entries[0].payload, self.JSON_PAYLOAD) - self.assertEqual(entries[0].insert_id, INSERT_ID) - self.assertEqual(entries[0].severity, SEVERITY) - request = entries[0].http_request - self.assertEqual(request["requestMethod"], METHOD) - self.assertEqual(request["requestUrl"], URI) - self.assertEqual(request["status"], STATUS) + self.assertEqual(len(entries), 1) + self.assertEqual(entries[0].payload, self.JSON_PAYLOAD) + self.assertEqual(entries[0].insert_id, INSERT_ID) + self.assertEqual(entries[0].severity, SEVERITY) + request = entries[0].http_request + self.assertEqual(request["requestMethod"], METHOD) + self.assertEqual(request["requestUrl"], URI) + self.assertEqual(request["status"], STATUS) def test_log_w_text(self): TEXT_PAYLOAD = "System test: test_log_w_text" - logger = Config.CLIENT.logger(self._logger_name("log_w_text")) - self.to_delete.append(logger) - logger.log(TEXT_PAYLOAD) - entries = _list_entries(logger) - self.assertEqual(len(entries), 1) - self.assertEqual(entries[0].payload, TEXT_PAYLOAD) + gapic_logger = Config.CLIENT.logger(self._logger_name("log_w_text")) + http_logger = Config.HTTP_CLIENT.logger(self._logger_name("log_w_text")) + for logger in [gapic_logger, http_logger]: + self.to_delete.append(logger) + logger.log(TEXT_PAYLOAD) + entries = _list_entries(logger) + self.assertEqual(len(entries), 1) + self.assertEqual(entries[0].payload, TEXT_PAYLOAD) def test_log_w_struct(self): - logger = Config.CLIENT.logger(self._logger_name("log_w_struct")) - self.to_delete.append(logger) + gapic_logger = Config.CLIENT.logger(self._logger_name("log_w_struct")) + http_logger = Config.HTTP_CLIENT.logger(self._logger_name("log_w_struct_http")) + for logger in [gapic_logger, http_logger]: + self.to_delete.append(logger) - logger.log(self.JSON_PAYLOAD) - entries = _list_entries(logger) + logger.log(self.JSON_PAYLOAD) + entries = _list_entries(logger) - self.assertEqual(len(entries), 1) - self.assertEqual(entries[0].payload, self.JSON_PAYLOAD) + self.assertEqual(len(entries), 1) + self.assertEqual(entries[0].payload, self.JSON_PAYLOAD) def test_log_empty(self): - logger = Config.CLIENT.logger(self._logger_name("log_empty")) - self.to_delete.append(logger) + gapic_logger = Config.CLIENT.logger(self._logger_name("log_empty")) + http_logger = Config.HTTP_CLIENT.logger(self._logger_name("log_empty_http")) - logger.log() - entries = _list_entries(logger) + for logger in [gapic_logger, http_logger]: + self.to_delete.append(logger) - self.assertEqual(len(entries), 1) - self.assertIsNone(entries[0].payload) + logger.log() + entries = _list_entries(logger) + + self.assertEqual(len(entries), 1) + self.assertIsNone(entries[0].payload) def test_log_handler_async(self): LOG_MESSAGE = "It was the worst of times" @@ -755,6 +781,51 @@ def test_update_sink(self): self.assertEqual(sink.filter_, UPDATED_FILTER) self.assertEqual(sink.destination, dataset_uri) + def test_api_equality_list_logs(self): + unique_id = uuid.uuid1() + gapic_logger = Config.CLIENT.logger(f"api-list-{unique_id}") + http_logger = Config.HTTP_CLIENT.logger(f"api-list-{unique_id}") + # write logs + log_count = 5 + for i in range(log_count): + gapic_logger.log_text(f"test {i}") + + def retryable(): + max_results = 3 + gapic_generator = gapic_logger.list_entries(max_results=max_results) + http_generator = http_logger.list_entries(max_results=max_results) + # returned objects should be consistent + self.assertEqual(type(gapic_generator), type(http_generator)) + gapic_list, http_list = list(gapic_generator), list(http_generator) + # max_results should limit the number of logs returned + self.assertEqual(len(gapic_list), max_results) + self.assertEqual(len(http_list), max_results) + # returned logs should be the same + self.assertEqual(gapic_list[0].insert_id, http_list[0].insert_id) + # should return in ascending order + self.assertEqual(gapic_list[0].payload, "test 0") + # test reverse ordering + gapic_generator = gapic_logger.list_entries( + max_results=max_results, order_by=google.cloud.logging_v2.DESCENDING + ) + http_generator = http_logger.list_entries( + max_results=max_results, order_by=google.cloud.logging_v2.DESCENDING + ) + gapic_list, http_list = list(gapic_generator), list(http_generator) + self.assertEqual(len(gapic_list), max_results) + self.assertEqual(len(http_list), max_results) + # http and gapic results should be consistent + self.assertEqual(gapic_list[0].insert_id, http_list[0].insert_id) + # returned logs should be in descending order + self.assertEqual(gapic_list[0].payload, f"test {log_count-1}") + + RetryErrors( + (ServiceUnavailable, InternalServerError, AssertionError), + delay=2, + backoff=2, + max_tries=3, + )(retryable)() + class _DeleteWrapper(object): def __init__(self, publisher, topic_path): diff --git a/tests/unit/test__gapic.py b/tests/unit/test__gapic.py index 5da1c7122..d8c4bf57e 100644 --- a/tests/unit/test__gapic.py +++ b/tests/unit/test__gapic.py @@ -32,7 +32,7 @@ FILTER = "logName:syslog AND severity>=ERROR" -class Test_LoggingAPI(object): +class Test_LoggingAPI(unittest.TestCase): LOG_NAME = "log_name" LOG_PATH = f"projects/{PROJECT}/logs/{LOG_NAME}" @@ -107,6 +107,49 @@ def test_list_entries_with_options(self): assert request.page_size == 42 assert request.page_token == "token" + def test_list_logs_with_max_results(self): + client = self.make_logging_api() + log_entry_msg = LogEntryPB(log_name=self.LOG_PATH, text_payload="text") + + with mock.patch.object( + type(client._gapic_api.transport.list_log_entries), "__call__" + ) as call: + call.return_value = logging_v2.types.ListLogEntriesResponse( + entries=[log_entry_msg, log_entry_msg] + ) + result = client.list_entries( + [PROJECT_PATH], + filter_=FILTER, + order_by=google.cloud.logging.ASCENDING, + page_size=42, + page_token="token", + max_results=1, + ) + + # Check the request + call.assert_called_once() + assert len(list(result)) == 1 + + def test_list_logs_negative_max_results(self): + client = self.make_logging_api() + + with self.assertRaises(ValueError): + with mock.patch.object( + type(client._gapic_api.transport.list_log_entries), "__call__" + ) as call: + call.return_value = logging_v2.types.ListLogEntriesResponse(entries=[]) + result = client.list_entries( + [PROJECT_PATH], + filter_=FILTER, + order_by=google.cloud.logging.ASCENDING, + page_size=42, + page_token="token", + max_results=-1, + ) + # Check the request + list(result) + call.assert_called_once() + def test_write_entries_single(self): client = self.make_logging_api() @@ -141,7 +184,7 @@ def test_logger_delete(self): assert call.call_args.args[0].log_name == self.LOG_PATH -class Test_SinksAPI(object): +class Test_SinksAPI(unittest.TestCase): SINK_NAME = "sink_name" PARENT_PATH = f"projects/{PROJECT}" SINK_PATH = f"projects/{PROJECT}/sinks/{SINK_NAME}" @@ -208,6 +251,40 @@ def test_list_sinks_with_options(self): assert request.page_size == 42 assert request.page_token == "token" + def test_list_sinks_with_max_results(self): + client = self.make_sinks_api() + sink_msg = LogSink( + name=self.SINK_NAME, destination=self.DESTINATION_URI, filter=FILTER + ) + + with mock.patch.object( + type(client._gapic_api.transport.list_sinks), "__call__" + ) as call: + call.return_value = logging_v2.types.ListSinksResponse( + sinks=[sink_msg, sink_msg] + ) + result = client.list_sinks( + self.PARENT_PATH, page_size=42, page_token="token", max_results=1 + ) + # Check the request + call.assert_called_once() + assert len(list(result)) == 1 + + def test_list_sinks_negative_max_results(self): + client = self.make_sinks_api() + + with self.assertRaises(ValueError): + with mock.patch.object( + type(client._gapic_api.transport.list_sinks), "__call__" + ) as call: + call.return_value = logging_v2.types.ListSinksResponse(sinks=[]) + result = client.list_sinks( + self.PARENT_PATH, page_size=42, page_token="token", max_results=-1 + ) + # Check the request + list(result) + call.assert_called_once() + def test_sink_create(self): client = self.make_sinks_api() with mock.patch.object( @@ -315,7 +392,7 @@ def test_sink_delete(self): assert request.sink_name == self.SINK_PATH -class Test_MetricsAPI(object): +class Test_MetricsAPI(unittest.TestCase): METRIC_NAME = "metric_name" METRIC_PATH = f"projects/{PROJECT}/metrics/{METRIC_NAME}" DESCRIPTION = "Description" @@ -379,6 +456,39 @@ def test_list_metrics_options(self): assert request.page_size == 42 assert request.page_token == "token" + def test_list_metrics_with_max_results(self): + client = self.make_metrics_api() + metric = logging_v2.types.LogMetric( + name=self.METRIC_PATH, description=self.DESCRIPTION, filter=FILTER + ) + with mock.patch.object( + type(client._gapic_api.transport.list_log_metrics), "__call__" + ) as call: + call.return_value = logging_v2.types.ListLogMetricsResponse( + metrics=[metric, metric] + ) + result = client.list_metrics( + PROJECT, page_size=42, page_token="token", max_results=1 + ) + # Check the request + call.assert_called_once() + assert len(list(result)) == 1 + + def test_list_metrics_negative_max_results(self): + client = self.make_metrics_api() + + with self.assertRaises(ValueError): + with mock.patch.object( + type(client._gapic_api.transport.list_log_metrics), "__call__" + ) as call: + call.return_value = logging_v2.types.ListLogMetricsResponse(metrics=[]) + result = client.list_metrics( + PROJECT, page_size=42, page_token="token", max_results=-1 + ) + # Check the request + list(result) + call.assert_called_once() + def test_metric_create(self): client = self.make_metrics_api() diff --git a/tests/unit/test__http.py b/tests/unit/test__http.py index e927f6c15..2154b6f57 100644 --- a/tests/unit/test__http.py +++ b/tests/unit/test__http.py @@ -129,16 +129,20 @@ def _make_timestamp(): NOW = datetime.datetime.utcnow().replace(tzinfo=UTC) return NOW, _datetime_to_rfc3339_w_nanos(NOW) - def test_list_entries_no_paging(self): + def test_list_entries_with_limits(self): from google.cloud.logging import Client from google.cloud.logging import TextEntry from google.cloud.logging import Logger NOW, TIMESTAMP = self._make_timestamp() IID = "IID" + IID1 = "IID1" + IID2 = "IID2" TEXT = "TEXT" SENT = {"resourceNames": [self.PROJECT_PATH]} - TOKEN = "TOKEN" + PAYLOAD = {"message": "MESSAGE", "weather": "partly cloudy"} + PROTO_PAYLOAD = PAYLOAD.copy() + PROTO_PAYLOAD["@type"] = "type.googleapis.com/testing.example" RETURNED = { "entries": [ { @@ -147,24 +151,42 @@ def test_list_entries_no_paging(self): "resource": {"type": "global"}, "timestamp": TIMESTAMP, "logName": f"projects/{self.PROJECT}/logs/{self.LOGGER_NAME}", - } + }, + { + "jsonPayload": PAYLOAD, + "insertId": IID1, + "resource": {"type": "global"}, + "timestamp": TIMESTAMP, + "logName": "projects/%s/logs/%s" % (self.PROJECT, self.LOGGER_NAME), + }, + { + "protoPayload": PROTO_PAYLOAD, + "insertId": IID2, + "resource": {"type": "global"}, + "timestamp": TIMESTAMP, + "logName": "projects/%s/logs/%s" % (self.PROJECT, self.LOGGER_NAME), + }, ], - "nextPageToken": TOKEN, } client = Client( project=self.PROJECT, credentials=_make_credentials(), _use_grpc=False ) + # try with negative max_results + with self.assertRaises(ValueError): + client._connection = _Connection(RETURNED) + api = self._make_one(client) + empty = list(api.list_entries([self.PROJECT_PATH], max_results=-1)) + # try with max_results of 0 client._connection = _Connection(RETURNED) api = self._make_one(client) - - iterator = api.list_entries([self.PROJECT_PATH]) - page = next(iterator.pages) - entries = list(page) - token = iterator.next_page_token - - # First check the token. - self.assertEqual(token, TOKEN) - # Then check the entries returned. + empty = list(api.list_entries([self.PROJECT_PATH], max_results=0)) + self.assertEqual(empty, []) + # try with single result + client._connection = _Connection(RETURNED) + api = self._make_one(client) + iterator = api.list_entries([self.PROJECT_PATH], max_results=1) + entries = list(iterator) + # check the entries returned. self.assertEqual(len(entries), 1) entry = entries[0] self.assertIsInstance(entry, TextEntry) @@ -183,7 +205,7 @@ def test_list_entries_no_paging(self): called_with, {"method": "POST", "path": expected_path, "data": SENT} ) - def test_list_entries_w_paging(self): + def test_list_entries(self): from google.cloud.logging import DESCENDING from google.cloud.logging import Client from google.cloud.logging import Logger @@ -241,11 +263,8 @@ def test_list_entries_w_paging(self): page_token=TOKEN, ) entries = list(iterator) - token = iterator.next_page_token - # First check the token. - self.assertIsNone(token) - # Then check the entries returned. + # Check the entries returned. self.assertEqual(len(entries), 2) entry1 = entries[0] self.assertIsInstance(entry1, StructEntry) @@ -361,32 +380,38 @@ def test_ctor(self): self.assertIs(api._client, client) self.assertEqual(api.api_request, connection.api_request) - def test_list_sinks_no_paging(self): + def test_list_sinks_max_returned(self): from google.cloud.logging import Sink - TOKEN = "TOKEN" RETURNED = { "sinks": [ { "name": self.SINK_PATH, "filter": self.FILTER, "destination": self.DESTINATION_URI, - } + }, + {"name": "test", "filter": "test", "destination": "test"}, ], - "nextPageToken": TOKEN, } + # try with negative max_results + with self.assertRaises(ValueError): + conn = _Connection(RETURNED) + client = _Client(conn) + api = self._make_one(client) + empty = list(api.list_sinks(self.PROJECT_PATH, max_results=-1)) + # try with max_results of 0 conn = _Connection(RETURNED) client = _Client(conn) api = self._make_one(client) - - iterator = api.list_sinks(self.PROJECT_PATH) - page = next(iterator.pages) - sinks = list(page) - token = iterator.next_page_token - - # First check the token. - self.assertEqual(token, TOKEN) - # Then check the sinks returned. + empty = list(api.list_sinks(self.PROJECT_PATH, max_results=0)) + self.assertEqual(empty, []) + # try with single result + conn = _Connection(RETURNED) + client = _Client(conn) + api = self._make_one(client) + iterator = api.list_sinks(self.PROJECT_PATH, max_results=1) + sinks = list(iterator) + # Check the sinks returned. self.assertEqual(len(sinks), 1) sink = sinks[0] self.assertIsInstance(sink, Sink) @@ -401,7 +426,7 @@ def test_list_sinks_no_paging(self): called_with, {"method": "GET", "path": path, "query_params": {}} ) - def test_list_sinks_w_paging(self): + def test_list_sinks(self): from google.cloud.logging import Sink TOKEN = "TOKEN" @@ -423,11 +448,7 @@ def test_list_sinks_w_paging(self): self.PROJECT_PATH, page_size=PAGE_SIZE, page_token=TOKEN ) sinks = list(iterator) - token = iterator.next_page_token - - # First check the token. - self.assertIsNone(token) - # Then check the sinks returned. + # Check the sinks returned. self.assertEqual(len(sinks), 1) sink = sinks[0] self.assertIsInstance(sink, Sink) @@ -632,26 +653,35 @@ def _get_target_class(): def _make_one(self, *args, **kw): return self._get_target_class()(*args, **kw) - def test_list_metrics_no_paging(self): + def test_list_metrics_max_results(self): from google.cloud.logging import Metric - TOKEN = "TOKEN" RETURNED = { - "metrics": [{"name": self.METRIC_PATH, "filter": self.FILTER}], - "nextPageToken": TOKEN, + "metrics": [ + {"name": self.METRIC_PATH, "filter": self.FILTER}, + {"name": "test", "filter": "test"}, + ], } + # try with negative max_results + with self.assertRaises(ValueError): + conn = _Connection(RETURNED) + client = _Client(conn) + api = self._make_one(client) + empty = list(api.list_metrics(self.PROJECT, max_results=-1)) + # try with max_results of 0 + conn = _Connection(RETURNED) + client = _Client(conn) + api = self._make_one(client) + empty = list(api.list_metrics(self.PROJECT, max_results=0)) + self.assertEqual(empty, []) + # try with single result conn = _Connection(RETURNED) client = _Client(conn) api = self._make_one(client) - iterator = api.list_metrics(self.PROJECT) - page = next(iterator.pages) - metrics = list(page) - token = iterator.next_page_token - - # First check the token. - self.assertEqual(token, TOKEN) - # Then check the metrics returned. + iterator = api.list_metrics(self.PROJECT, max_results=1) + metrics = list(iterator) + # Check the metrics returned. self.assertEqual(len(metrics), 1) metric = metrics[0] self.assertIsInstance(metric, Metric) @@ -666,7 +696,7 @@ def test_list_metrics_no_paging(self): called_with, {"method": "GET", "path": path, "query_params": {}} ) - def test_list_metrics_w_paging(self): + def test_list_metrics(self): from google.cloud.logging import Metric TOKEN = "TOKEN" @@ -678,11 +708,7 @@ def test_list_metrics_w_paging(self): iterator = api.list_metrics(self.PROJECT, page_size=PAGE_SIZE, page_token=TOKEN) metrics = list(iterator) - token = iterator.next_page_token - - # First check the token. - self.assertIsNone(token) - # Then check the metrics returned. + # Check the metrics returned. self.assertEqual(len(metrics), 1) metric = metrics[0] self.assertIsInstance(metric, Metric) diff --git a/tests/unit/test_client.py b/tests/unit/test_client.py index 46526fb21..1a31e9c0c 100644 --- a/tests/unit/test_client.py +++ b/tests/unit/test_client.py @@ -259,7 +259,6 @@ def test_list_entries_defaults(self): IID = "IID" TEXT = "TEXT" - TOKEN = "TOKEN" ENTRIES = [ { "textPayload": TEXT, @@ -272,13 +271,11 @@ def test_list_entries_defaults(self): client = self._make_one( project=self.PROJECT, credentials=creds, _use_grpc=False ) - returned = {"entries": ENTRIES, "nextPageToken": TOKEN} + returned = {"entries": ENTRIES} client._connection = _Connection(returned) iterator = client.list_entries() - page = next(iterator.pages) - entries = list(page) - token = iterator.next_page_token + entries = list(iterator) self.assertEqual(len(entries), 1) entry = entries[0] @@ -289,7 +286,6 @@ def test_list_entries_defaults(self): self.assertEqual(logger.name, self.LOGGER_NAME) self.assertIs(logger.client, client) self.assertEqual(logger.project, self.PROJECT) - self.assertEqual(token, TOKEN) # check call payload call_payload_no_filter = deepcopy(client._connection._called_with) @@ -342,6 +338,12 @@ def test_list_entries_explicit(self): "resource": {"type": "global"}, "logName": "projects/%s/logs/%s" % (self.PROJECT, self.LOGGER_NAME), }, + { + "protoPayload": "ignored", + "insertId": "ignored", + "resource": {"type": "global"}, + "logName": "projects/%s/logs/%s" % (self.PROJECT, self.LOGGER_NAME), + }, ] client = self._make_one( project=self.PROJECT, credentials=_make_credentials(), _use_grpc=False @@ -355,13 +357,10 @@ def test_list_entries_explicit(self): order_by=DESCENDING, page_size=PAGE_SIZE, page_token=TOKEN, + max_results=2, ) entries = list(iterator) - token = iterator.next_page_token - - # First, check the token. - self.assertIsNone(token) - # Then check the entries. + # Check the entries. self.assertEqual(len(entries), 2) entry = entries[0] self.assertIsInstance(entry, StructEntry) @@ -423,7 +422,6 @@ def test_list_entries_explicit_timestamp(self): PAYLOAD = {"message": "MESSAGE", "weather": "partly cloudy"} PROTO_PAYLOAD = PAYLOAD.copy() PROTO_PAYLOAD["@type"] = "type.googleapis.com/testing.example" - TOKEN = "TOKEN" PAGE_SIZE = 42 ENTRIES = [ { @@ -450,14 +448,9 @@ def test_list_entries_explicit_timestamp(self): filter_=INPUT_FILTER, order_by=DESCENDING, page_size=PAGE_SIZE, - page_token=TOKEN, ) entries = list(iterator) - token = iterator.next_page_token - - # First, check the token. - self.assertIsNone(token) - # Then check the entries. + # Check the entries. self.assertEqual(len(entries), 2) entry = entries[0] self.assertIsInstance(entry, StructEntry) @@ -491,7 +484,6 @@ def test_list_entries_explicit_timestamp(self): "filter": INPUT_FILTER, "orderBy": DESCENDING, "pageSize": PAGE_SIZE, - "pageToken": TOKEN, "resourceNames": [f"projects/{PROJECT1}", f"projects/{PROJECT2}"], }, }, @@ -529,7 +521,6 @@ def test_list_sinks_no_paging(self): from google.cloud.logging import Sink PROJECT = "PROJECT" - TOKEN = "TOKEN" SINK_NAME = "sink_name" FILTER = "logName:syslog AND severity>=ERROR" SINKS = [ @@ -538,17 +529,13 @@ def test_list_sinks_no_paging(self): client = self._make_one( project=PROJECT, credentials=_make_credentials(), _use_grpc=False ) - returned = {"sinks": SINKS, "nextPageToken": TOKEN} + returned = {"sinks": SINKS} client._connection = _Connection(returned) iterator = client.list_sinks() - page = next(iterator.pages) - sinks = list(page) - token = iterator.next_page_token + sinks = list(iterator) - # First check the token. - self.assertEqual(token, TOKEN) - # Then check the sinks returned. + # Check the sinks returned. self.assertEqual(len(sinks), 1) sink = sinks[0] self.assertIsInstance(sink, Sink) @@ -573,7 +560,8 @@ def test_list_sinks_with_paging(self): TOKEN = "TOKEN" PAGE_SIZE = 42 SINKS = [ - {"name": SINK_NAME, "filter": FILTER, "destination": self.DESTINATION_URI} + {"name": SINK_NAME, "filter": FILTER, "destination": self.DESTINATION_URI}, + {"name": "test", "filter": "test", "destination": "test"}, ] client = self._make_one( project=PROJECT, credentials=_make_credentials(), _use_grpc=False @@ -581,13 +569,11 @@ def test_list_sinks_with_paging(self): returned = {"sinks": SINKS} client._connection = _Connection(returned) - iterator = client.list_sinks(page_size=PAGE_SIZE, page_token=TOKEN) + iterator = client.list_sinks( + page_size=PAGE_SIZE, page_token=TOKEN, max_results=1 + ) sinks = list(iterator) - token = iterator.next_page_token - - # First check the token. - self.assertIsNone(token) - # Then check the sinks returned. + # Check the sinks returned. self.assertEqual(len(sinks), 1) sink = sinks[0] self.assertIsInstance(sink, Sink) @@ -678,29 +664,27 @@ def test_list_metrics_with_paging(self): from google.cloud.logging import Metric token = "TOKEN" - next_token = "T00KEN" page_size = 42 metrics = [ { "name": self.METRIC_NAME, "filter": self.FILTER, "description": self.DESCRIPTION, - } + }, + {"name": "test", "filter": "test", "description": "test"}, ] client = self._make_one( project=self.PROJECT, credentials=_make_credentials(), _use_grpc=False ) - returned = {"metrics": metrics, "nextPageToken": next_token} + returned = {"metrics": metrics} client._connection = _Connection(returned) # Execute request. - iterator = client.list_metrics(page_size=page_size, page_token=token) - page = next(iterator.pages) - metrics = list(page) - - # First check the token. - self.assertEqual(iterator.next_page_token, next_token) - # Then check the metrics returned. + iterator = client.list_metrics( + page_size=page_size, page_token=token, max_results=1 + ) + metrics = list(iterator) + # Check the metrics returned. self.assertEqual(len(metrics), 1) metric = metrics[0] self.assertIsInstance(metric, Metric) diff --git a/tests/unit/test_logger.py b/tests/unit/test_logger.py index 0d8fd1208..ef13c923c 100644 --- a/tests/unit/test_logger.py +++ b/tests/unit/test_logger.py @@ -605,23 +605,18 @@ def test_delete_w_alternate_client(self): def test_list_entries_defaults(self): from google.cloud.logging import Client - TOKEN = "TOKEN" - client = Client( project=self.PROJECT, credentials=_make_credentials(), _use_grpc=False ) - returned = {"nextPageToken": TOKEN} + returned = {} client._connection = _Connection(returned) logger = self._make_one(self.LOGGER_NAME, client=client) iterator = logger.list_entries() - page = next(iterator.pages) - entries = list(page) - token = iterator.next_page_token + entries = list(iterator) self.assertEqual(len(entries), 0) - self.assertEqual(token, TOKEN) LOG_FILTER = "logName=projects/%s/logs/%s" % (self.PROJECT, self.LOGGER_NAME) # check call payload @@ -668,10 +663,8 @@ def test_list_entries_explicit(self): page_token=TOKEN, ) entries = list(iterator) - token = iterator.next_page_token self.assertEqual(len(entries), 0) - self.assertIsNone(token) # self.assertEqual(client._listed, LISTED) # check call payload call_payload_no_filter = deepcopy(client._connection._called_with) @@ -728,10 +721,8 @@ def test_list_entries_explicit_timestamp(self): page_token=TOKEN, ) entries = list(iterator) - token = iterator.next_page_token self.assertEqual(len(entries), 0) - self.assertIsNone(token) # self.assertEqual(client._listed, LISTED) # check call payload LOG_FILTER = "logName=projects/%s/logs/%s" % (self.PROJECT, self.LOGGER_NAME,) @@ -751,6 +742,100 @@ def test_list_entries_explicit_timestamp(self): }, ) + def test_list_entries_limit(self): + from google.cloud.logging import DESCENDING + from google.cloud.logging import ProtobufEntry + from google.cloud.logging import StructEntry + from google.cloud.logging import Logger + from google.cloud.logging import Client + + PROJECT1 = "PROJECT1" + PROJECT2 = "PROJECT2" + INPUT_FILTER = "logName:LOGNAME" + IID1 = "IID1" + IID2 = "IID2" + PAYLOAD = {"message": "MESSAGE", "weather": "partly cloudy"} + PROTO_PAYLOAD = PAYLOAD.copy() + PROTO_PAYLOAD["@type"] = "type.googleapis.com/testing.example" + TOKEN = "TOKEN" + PAGE_SIZE = 42 + ENTRIES = [ + { + "jsonPayload": PAYLOAD, + "insertId": IID1, + "resource": {"type": "global"}, + "logName": "projects/%s/logs/%s" % (self.PROJECT, self.LOGGER_NAME), + }, + { + "protoPayload": PROTO_PAYLOAD, + "insertId": IID2, + "resource": {"type": "global"}, + "logName": "projects/%s/logs/%s" % (self.PROJECT, self.LOGGER_NAME), + }, + { + "protoPayload": "ignored", + "insertId": "ignored", + "resource": {"type": "global"}, + "logName": "projects/%s/logs/%s" % (self.PROJECT, self.LOGGER_NAME), + }, + ] + client = Client( + project=self.PROJECT, credentials=_make_credentials(), _use_grpc=False + ) + returned = {"entries": ENTRIES} + client._connection = _Connection(returned) + logger = self._make_one(self.LOGGER_NAME, client=client) + + iterator = logger.list_entries( + resource_names=[f"projects/{PROJECT1}", f"projects/{PROJECT2}"], + filter_=INPUT_FILTER, + order_by=DESCENDING, + page_size=PAGE_SIZE, + page_token=TOKEN, + max_results=2, + ) + entries = list(iterator) + # Check the entries. + self.assertEqual(len(entries), 2) + entry = entries[0] + self.assertIsInstance(entry, StructEntry) + self.assertEqual(entry.insert_id, IID1) + self.assertEqual(entry.payload, PAYLOAD) + logger = entry.logger + self.assertIsInstance(logger, Logger) + self.assertEqual(logger.name, self.LOGGER_NAME) + self.assertIs(logger.client, client) + self.assertEqual(logger.project, self.PROJECT) + + entry = entries[1] + self.assertIsInstance(entry, ProtobufEntry) + self.assertEqual(entry.insert_id, IID2) + self.assertEqual(entry.payload, PROTO_PAYLOAD) + logger = entry.logger + self.assertEqual(logger.name, self.LOGGER_NAME) + self.assertIs(logger.client, client) + self.assertEqual(logger.project, self.PROJECT) + + self.assertIs(entries[0].logger, entries[1].logger) + + # check call payload + call_payload_no_filter = deepcopy(client._connection._called_with) + call_payload_no_filter["data"]["filter"] = "removed" + self.assertEqual( + call_payload_no_filter, + { + "path": "/entries:list", + "method": "POST", + "data": { + "filter": "removed", + "orderBy": DESCENDING, + "pageSize": PAGE_SIZE, + "pageToken": TOKEN, + "resourceNames": [f"projects/{PROJECT1}", f"projects/{PROJECT2}"], + }, + }, + ) + class TestBatch(unittest.TestCase): From 818213e143d6a1941211a48e0b23069a426ac300 Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Fri, 15 Oct 2021 12:59:57 -0700 Subject: [PATCH 27/34] feat: avoid importing grpc when explicitly disabled (#416) --- google/cloud/logging_v2/client.py | 20 +++++++++++++------- 1 file changed, 13 insertions(+), 7 deletions(-) diff --git a/google/cloud/logging_v2/client.py b/google/cloud/logging_v2/client.py index 7098c8baa..92ab72a3a 100644 --- a/google/cloud/logging_v2/client.py +++ b/google/cloud/logging_v2/client.py @@ -18,13 +18,6 @@ import os import sys -try: - from google.cloud.logging_v2 import _gapic -except ImportError: # pragma: NO COVER - _HAVE_GRPC = False - _gapic = None -else: - _HAVE_GRPC = True import google.api_core.client_options from google.cloud.client import ClientWithProject @@ -48,6 +41,19 @@ _DISABLE_GRPC = os.getenv(DISABLE_GRPC, False) +_HAVE_GRPC = False + +try: + if not _DISABLE_GRPC: + # only import if DISABLE_GRPC is not set + from google.cloud.logging_v2 import _gapic + + _HAVE_GRPC = True +except ImportError: # pragma: NO COVER + # could not import gapic library. Fall back to HTTP mode + _HAVE_GRPC = False + _gapic = None + _USE_GRPC = _HAVE_GRPC and not _DISABLE_GRPC _GAE_RESOURCE_TYPE = "gae_app" From 83d9ca8521fe7c470bb6755a48a97496515d7abc Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Thu, 11 Nov 2021 13:54:00 -0800 Subject: [PATCH 28/34] feat!: make logging API more friendly to use (#422) --- google/cloud/logging_v2/logger.py | 30 +++++++-- tests/system/test_system.py | 19 ++++++ tests/unit/test_logger.py | 101 ++++++++++++++++++++++++++++++ 3 files changed, 145 insertions(+), 5 deletions(-) diff --git a/google/cloud/logging_v2/logger.py b/google/cloud/logging_v2/logger.py index 404871bef..542e4d629 100644 --- a/google/cloud/logging_v2/logger.py +++ b/google/cloud/logging_v2/logger.py @@ -45,6 +45,8 @@ ("source_location", None), ) +_STRUCT_EXTRACTABLE_FIELDS = ["severity", "trace", "span_id"] + class Logger(object): """Loggers represent named targets for log entries. @@ -133,6 +135,20 @@ def _do_log(self, client, _entry_class, payload=None, **kw): kw["labels"] = kw.pop("labels", self.labels) kw["resource"] = kw.pop("resource", self.default_resource) + severity = kw.get("severity", None) + if isinstance(severity, str) and not severity.isupper(): + # convert severity to upper case, as expected by enum definition + kw["severity"] = severity.upper() + + if isinstance(kw["resource"], collections.abc.Mapping): + # if resource was passed as a dict, attempt to parse it into a + # Resource object + try: + kw["resource"] = Resource(**kw["resource"]) + except TypeError as e: + # dict couldn't be parsed as a Resource + raise TypeError("invalid resource dict") from e + if payload is not None: entry = _entry_class(payload=payload, **kw) else: @@ -186,6 +202,10 @@ def log_struct(self, info, *, client=None, **kw): kw (Optional[dict]): additional keyword arguments for the entry. See :class:`~logging_v2.entries.LogEntry`. """ + for field in _STRUCT_EXTRACTABLE_FIELDS: + # attempt to copy relevant fields from the payload into the LogEntry body + if field in info and field not in kw: + kw[field] = info[field] self._do_log(client, StructEntry, info, **kw) def log_proto(self, message, *, client=None, **kw): @@ -220,14 +240,14 @@ def log(self, message=None, *, client=None, **kw): kw (Optional[dict]): additional keyword arguments for the entry. See :class:`~logging_v2.entries.LogEntry`. """ - entry_type = LogEntry if isinstance(message, google.protobuf.message.Message): - entry_type = ProtobufEntry + self.log_proto(message, client=client, **kw) elif isinstance(message, collections.abc.Mapping): - entry_type = StructEntry + self.log_struct(message, client=client, **kw) elif isinstance(message, str): - entry_type = TextEntry - self._do_log(client, entry_type, message, **kw) + self.log_text(message, client=client, **kw) + else: + self._do_log(client, LogEntry, message, **kw) def delete(self, logger_name=None, *, client=None): """Delete all entries in a logger via a DELETE request diff --git a/tests/system/test_system.py b/tests/system/test_system.py index d7e1e57d2..cde722bd6 100644 --- a/tests/system/test_system.py +++ b/tests/system/test_system.py @@ -455,6 +455,25 @@ def test_log_empty(self): self.assertEqual(len(entries), 1) self.assertIsNone(entries[0].payload) + def test_log_struct_logentry_data(self): + logger = Config.CLIENT.logger(self._logger_name("log_w_struct")) + self.to_delete.append(logger) + + JSON_PAYLOAD = { + "message": "System test: test_log_struct_logentry_data", + "severity": "warning", + "trace": "123", + "span_id": "456", + } + logger.log(JSON_PAYLOAD) + entries = _list_entries(logger) + + self.assertEqual(len(entries), 1) + self.assertEqual(entries[0].payload, JSON_PAYLOAD) + self.assertEqual(entries[0].severity, "WARNING") + self.assertEqual(entries[0].trace, JSON_PAYLOAD["trace"]) + self.assertEqual(entries[0].span_id, JSON_PAYLOAD["span_id"]) + def test_log_handler_async(self): LOG_MESSAGE = "It was the worst of times" diff --git a/tests/unit/test_logger.py b/tests/unit/test_logger.py index ef13c923c..5f0868ba2 100644 --- a/tests/unit/test_logger.py +++ b/tests/unit/test_logger.py @@ -379,6 +379,107 @@ def test_log_struct_w_explicit(self): self.assertEqual(api._write_entries_called_with, (ENTRIES, None, None, None)) + def test_log_struct_inference(self): + """ + LogEntry fields in _STRUCT_EXTRACTABLE_FIELDS should be inferred from + the payload data if not passed as a parameter + """ + from google.cloud.logging_v2.handlers._monitored_resources import ( + detect_resource, + ) + + STRUCT = { + "message": "System test: test_log_struct_logentry_data", + "severity": "warning", + "trace": "123", + "span_id": "456", + } + RESOURCE = detect_resource(self.PROJECT)._to_dict() + ENTRIES = [ + { + "logName": "projects/%s/logs/%s" % (self.PROJECT, self.LOGGER_NAME), + "jsonPayload": STRUCT, + "severity": "WARNING", + "trace": "123", + "spanId": "456", + "resource": RESOURCE, + } + ] + client = _Client(self.PROJECT) + api = client.logging_api = _DummyLoggingAPI() + logger = self._make_one(self.LOGGER_NAME, client=client) + + logger.log_struct(STRUCT, resource=RESOURCE) + + self.assertEqual(api._write_entries_called_with, (ENTRIES, None, None, None)) + + def test_log_w_dict_resource(self): + """ + Users should be able to input a dictionary with type and labels instead + of a Resource object + """ + import pytest + + MESSAGE = "hello world" + client = _Client(self.PROJECT) + api = client.logging_api = _DummyLoggingAPI() + logger = self._make_one(self.LOGGER_NAME, client=client) + broken_resource_dicts = [{}, {"type": ""}, {"labels": ""}] + for resource in broken_resource_dicts: + # ensure bad inputs result in a helpful error + with pytest.raises(TypeError): + logger.log(MESSAGE, resource=resource) + # ensure well-formed dict is converted to a resource + resource = {"type": "gae_app", "labels": []} + ENTRIES = [ + { + "logName": "projects/%s/logs/%s" % (self.PROJECT, self.LOGGER_NAME), + "textPayload": MESSAGE, + "resource": resource, + } + ] + logger.log(MESSAGE, resource=resource) + self.assertEqual(api._write_entries_called_with, (ENTRIES, None, None, None)) + + def test_log_lowercase_severity(self): + """ + lower case severity strings should be accepted + """ + from google.cloud.logging_v2.handlers._monitored_resources import ( + detect_resource, + ) + + for lower_severity in [ + "default", + "debug", + "info", + "notice", + "warning", + "error", + "critical", + "alert", + "emergency", + ]: + MESSAGE = "hello world" + RESOURCE = detect_resource(self.PROJECT)._to_dict() + ENTRIES = [ + { + "logName": "projects/%s/logs/%s" % (self.PROJECT, self.LOGGER_NAME), + "textPayload": MESSAGE, + "resource": RESOURCE, + "severity": lower_severity.upper(), + } + ] + client = _Client(self.PROJECT) + api = client.logging_api = _DummyLoggingAPI() + logger = self._make_one(self.LOGGER_NAME, client=client) + + logger.log(MESSAGE, severity=lower_severity) + + self.assertEqual( + api._write_entries_called_with, (ENTRIES, None, None, None) + ) + def test_log_proto_defaults(self): from google.cloud.logging_v2.handlers._monitored_resources import ( detect_resource, From a760e02371a55d6262e42de9e0222fffa2c7192b Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Tue, 7 Dec 2021 12:07:04 -0800 Subject: [PATCH 29/34] feat: add json_fields extras argument for adding to jsonPayload (#447) --- google/cloud/logging_v2/client.py | 2 +- google/cloud/logging_v2/handlers/handlers.py | 16 +++- tests/system/test_system.py | 25 ++++++ tests/unit/handlers/test_handlers.py | 90 ++++++++++++++++++++ tests/unit/handlers/test_structured_log.py | 23 +++++ 5 files changed, 153 insertions(+), 3 deletions(-) diff --git a/google/cloud/logging_v2/client.py b/google/cloud/logging_v2/client.py index 92ab72a3a..3d5ea24fc 100644 --- a/google/cloud/logging_v2/client.py +++ b/google/cloud/logging_v2/client.py @@ -376,7 +376,7 @@ def get_default_handler(self, **kw): if monitored_resource.type == _GAE_RESOURCE_TYPE: return CloudLoggingHandler(self, resource=monitored_resource, **kw) elif monitored_resource.type == _GKE_RESOURCE_TYPE: - return ContainerEngineHandler(**kw) + return StructuredLogHandler(**kw, project_id=self.project) elif monitored_resource.type == _GCF_RESOURCE_TYPE: # __stdout__ stream required to support structured logging on Python 3.7 kw["stream"] = kw.get("stream", sys.__stdout__) diff --git a/google/cloud/logging_v2/handlers/handlers.py b/google/cloud/logging_v2/handlers/handlers.py index 8d14852e1..39bcbca79 100644 --- a/google/cloud/logging_v2/handlers/handlers.py +++ b/google/cloud/logging_v2/handlers/handlers.py @@ -221,9 +221,16 @@ def _format_and_parse_message(record, formatter_handler): record (logging.LogRecord): The record object representing the log formatter_handler (logging.Handler): The handler used to format the log """ - # if message is a dictionary, return as-is + passed_json_fields = getattr(record, "json_fields", {}) + # if message is a dictionary, use dictionary directly if isinstance(record.msg, collections.abc.Mapping): - return record.msg + payload = record.msg + # attach any extra json fields if present + if passed_json_fields and isinstance( + passed_json_fields, collections.abc.Mapping + ): + payload = {**payload, **passed_json_fields} + return payload # format message string based on superclass message = formatter_handler.format(record) try: @@ -235,6 +242,11 @@ def _format_and_parse_message(record, formatter_handler): except (json.decoder.JSONDecodeError, IndexError): # log string is not valid json pass + # if json_fields was set, create a dictionary using that + if passed_json_fields and isinstance(passed_json_fields, collections.abc.Mapping): + if message != "None": + passed_json_fields["message"] = message + return passed_json_fields # if formatted message contains no content, return None return message if message != "None" else None diff --git a/tests/system/test_system.py b/tests/system/test_system.py index cde722bd6..24050e8b3 100644 --- a/tests/system/test_system.py +++ b/tests/system/test_system.py @@ -551,6 +551,31 @@ def test_handlers_w_extras(self): ) self.assertEqual(entries[0].resource.type, extra["resource"].type) + def test_handlers_w_json_fields(self): + LOG_MESSAGE = "Testing with json_field extras." + LOGGER_NAME = "json_field_extras" + handler_name = self._logger_name(LOGGER_NAME) + + handler = CloudLoggingHandler( + Config.CLIENT, name=handler_name, transport=SyncTransport + ) + + # only create the logger to delete, hidden otherwise + logger = Config.CLIENT.logger(handler.name) + self.to_delete.append(logger) + + cloud_logger = logging.getLogger(LOGGER_NAME) + cloud_logger.addHandler(handler) + extra = {"json_fields": {"hello": "world", "two": 2}} + cloud_logger.warn(LOG_MESSAGE, extra=extra) + + entries = _list_entries(logger) + self.assertEqual(len(entries), 1) + payload = entries[0].payload + self.assertEqual(payload["message"], LOG_MESSAGE) + self.assertEqual(payload["hello"], "world") + self.assertEqual(payload["two"], 2) + def test_log_root_handler(self): LOG_MESSAGE = "It was the best of times." diff --git a/tests/unit/handlers/test_handlers.py b/tests/unit/handlers/test_handlers.py index d36dc8959..71a709b6a 100644 --- a/tests/unit/handlers/test_handlers.py +++ b/tests/unit/handlers/test_handlers.py @@ -447,6 +447,40 @@ def test_emit_dict(self): ), ) + def test_emit_w_json_extras(self): + """ + User can add json_fields to the record, which should populate the payload + """ + from google.cloud.logging_v2.logger import _GLOBAL_RESOURCE + + client = _Client(self.PROJECT) + handler = self._make_one( + client, transport=_Transport, resource=_GLOBAL_RESOURCE, + ) + message = "message" + json_fields = {"hello": "world"} + logname = "logname" + expected_label = {"python_logger": logname} + record = logging.LogRecord( + logname, logging.INFO, None, None, message, None, None + ) + setattr(record, "json_fields", json_fields) + handler.handle(record) + + self.assertEqual( + handler.transport.send_called_with, + ( + record, + {"message": "message", "hello": "world"}, + _GLOBAL_RESOURCE, + expected_label, + None, + None, + None, + None, + ), + ) + def test_emit_with_encoded_json(self): """ Handler should parse json encoded as a string @@ -608,6 +642,62 @@ def test_broken_encoded_dict(self): result = _format_and_parse_message(record, handler) self.assertEqual(result, message) + def test_json_fields(self): + """ + record.json_fields should populate the json payload + """ + from google.cloud.logging_v2.handlers.handlers import _format_and_parse_message + + message = "hello" + json_fields = {"key": "val"} + record = logging.LogRecord("logname", None, None, None, message, None, None) + setattr(record, "json_fields", json_fields) + handler = logging.StreamHandler() + result = _format_and_parse_message(record, handler) + self.assertEqual(result, {"message": message, "key": "val"}) + + def test_empty_json_fields(self): + """ + empty jsond_field dictionaries should result in a string output + """ + from google.cloud.logging_v2.handlers.handlers import _format_and_parse_message + + message = "hello" + record = logging.LogRecord("logname", None, None, None, message, None, None) + setattr(record, "json_fields", {}) + handler = logging.StreamHandler() + result = _format_and_parse_message(record, handler) + self.assertEqual(result, message) + + def test_json_fields_empty_message(self): + """ + empty message fields should not be added to json_fields dictionaries + """ + from google.cloud.logging_v2.handlers.handlers import _format_and_parse_message + + message = None + json_fields = {"key": "val"} + record = logging.LogRecord("logname", None, None, None, message, None, None) + setattr(record, "json_fields", json_fields) + handler = logging.StreamHandler() + result = _format_and_parse_message(record, handler) + self.assertEqual(result, json_fields) + + def test_json_fields_with_json_message(self): + """ + if json_fields and message are both dicts, they should be combined + """ + from google.cloud.logging_v2.handlers.handlers import _format_and_parse_message + + message = {"key_m": "val_m"} + json_fields = {"key_j": "val_j"} + record = logging.LogRecord("logname", None, None, None, message, None, None) + setattr(record, "json_fields", json_fields) + handler = logging.StreamHandler() + result = _format_and_parse_message(record, handler) + self.assertEqual(result["key_m"], message["key_m"]) + self.assertEqual(result["key_j"], json_fields["key_j"]) + class TestSetupLogging(unittest.TestCase): def _call_fut(self, handler, excludes=None): diff --git a/tests/unit/handlers/test_structured_log.py b/tests/unit/handlers/test_structured_log.py index d9dfa2512..08e4c2906 100644 --- a/tests/unit/handlers/test_structured_log.py +++ b/tests/unit/handlers/test_structured_log.py @@ -321,3 +321,26 @@ def test_format_overrides(self): result = json.loads(handler.format(record)) for (key, value) in expected_payload.items(): self.assertEqual(value, result[key]) + + def test_format_with_json_fields(self): + """ + User can add json_fields to the record, which should populate the payload + """ + import logging + import json + + handler = self._make_one() + message = "name: %s" + name_arg = "Daniel" + expected_result = "name: Daniel" + json_fields = {"hello": "world", "number": 12} + record = logging.LogRecord( + None, logging.INFO, None, None, message, name_arg, None, + ) + record.created = None + setattr(record, "json_fields", json_fields) + handler.filter(record) + result = json.loads(handler.format(record)) + self.assertEqual(result["message"], expected_result) + self.assertEqual(result["hello"], "world") + self.assertEqual(result["number"], 12) From 97e32b67603553fe350b6327455fc9f80b8aa6ce Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Tue, 7 Dec 2021 13:39:59 -0800 Subject: [PATCH 30/34] fix: allow reading logs from non-project paths (#444) --- google/cloud/logging_v2/entries.py | 17 ++++-- tests/unit/test_entries.py | 84 +++++++++++++++++++++++++++++- tests/unit/test_logger.py | 30 +++++++++++ 3 files changed, 124 insertions(+), 7 deletions(-) diff --git a/google/cloud/logging_v2/entries.py b/google/cloud/logging_v2/entries.py index 0af5a46f7..cb485da61 100644 --- a/google/cloud/logging_v2/entries.py +++ b/google/cloud/logging_v2/entries.py @@ -46,11 +46,12 @@ ) -def logger_name_from_path(path): +def logger_name_from_path(path, project=None): """Validate a logger URI path and get the logger name. Args: path (str): URI path for a logger API request + project (str): The project the path is expected to belong to Returns: str: Logger name parsed from ``path``. @@ -59,7 +60,7 @@ def logger_name_from_path(path): ValueError: If the ``path`` is ill-formed of if the project from ``path`` does not agree with the ``project`` passed in. """ - return _name_from_project_path(path, None, _LOGGER_TEMPLATE) + return _name_from_project_path(path, project, _LOGGER_TEMPLATE) def _int_or_none(value): @@ -155,7 +156,8 @@ def from_api_repr(cls, resource, client, *, loggers=None): Client which holds credentials and project configuration. loggers (Optional[dict]): A mapping of logger fullnames -> loggers. If not - passed, the entry will have a newly-created logger. + passed, the entry will have a newly-created logger if possible, + or an empty logger field if not. Returns: google.cloud.logging.entries.LogEntry: Log entry parsed from ``resource``. @@ -165,8 +167,13 @@ def from_api_repr(cls, resource, client, *, loggers=None): logger_fullname = resource["logName"] logger = loggers.get(logger_fullname) if logger is None: - logger_name = logger_name_from_path(logger_fullname) - logger = loggers[logger_fullname] = client.logger(logger_name) + # attempt to create a logger if possible + try: + logger_name = logger_name_from_path(logger_fullname, client.project) + logger = loggers[logger_fullname] = client.logger(logger_name) + except ValueError: + # log name is not scoped to a project. Leave logger as None + pass payload = cls._extract_payload(resource) insert_id = resource.get("insertId") timestamp = resource.get("timestamp") diff --git a/tests/unit/test_entries.py b/tests/unit/test_entries.py index b8795b8ce..6f3af684f 100644 --- a/tests/unit/test_entries.py +++ b/tests/unit/test_entries.py @@ -18,10 +18,10 @@ class Test_logger_name_from_path(unittest.TestCase): - def _call_fut(self, path): + def _call_fut(self, path, project=None): from google.cloud.logging_v2.entries import logger_name_from_path - return logger_name_from_path(path) + return logger_name_from_path(path, project) def test_w_simple_name(self): LOGGER_NAME = "LOGGER_NAME" @@ -37,6 +37,30 @@ def test_w_name_w_all_extras(self): logger_name = self._call_fut(PATH) self.assertEqual(logger_name, LOGGER_NAME) + def test_w_wrong_project(self): + LOGGER_NAME = "LOGGER_NAME" + IN_PROJECT = "in-project" + PATH_PROJECT = "path-project" + PATH = "projects/%s/logs/%s" % (PATH_PROJECT, LOGGER_NAME) + with self.assertRaises(ValueError): + self._call_fut(PATH, IN_PROJECT) + + def test_invalid_inputs(self): + invalid_list = [ + "", + "abc/123/logs/456", + "projects//logs/", + "projects/123/logs", + "projects/123logs/", + "projects123/logs", + "project/123", + "projects123logs456", + "/logs/123", + ] + for path in invalid_list: + with self.assertRaises(ValueError): + self._call_fut(path) + class Test__int_or_none(unittest.TestCase): def _call_fut(self, value): @@ -315,6 +339,62 @@ def test_from_api_repr_w_loggers_w_logger_match(self): self.assertEqual(entry.operation, OPERATION) self.assertIsNone(entry.payload) + def test_from_api_repr_w_folder_path(self): + from datetime import datetime + from datetime import timedelta + from google.cloud._helpers import UTC + + client = _Client(self.PROJECT) + IID = "IID" + NOW = datetime.utcnow().replace(tzinfo=UTC) + LATER = NOW + timedelta(seconds=1) + TIMESTAMP = _datetime_to_rfc3339_w_nanos(NOW) + RECEIVED = _datetime_to_rfc3339_w_nanos(LATER) + LOG_NAME = "folders/%s/logs/%s" % (self.PROJECT, self.LOGGER_NAME) + LABELS = {"foo": "bar", "baz": "qux"} + TRACE = "12345678-1234-5678-1234-567812345678" + SPANID = "000000000000004a" + FILE = "my_file.py" + LINE_NO = 123 + FUNCTION = "my_function" + SOURCE_LOCATION = {"file": FILE, "line": str(LINE_NO), "function": FUNCTION} + OP_ID = "OP_ID" + PRODUCER = "PRODUCER" + OPERATION = {"id": OP_ID, "producer": PRODUCER, "first": True, "last": False} + API_REPR = { + "logName": LOG_NAME, + "insertId": IID, + "timestamp": TIMESTAMP, + "receiveTimestamp": RECEIVED, + "labels": LABELS, + "trace": TRACE, + "spanId": SPANID, + "traceSampled": True, + "sourceLocation": SOURCE_LOCATION, + "operation": OPERATION, + } + klass = self._get_target_class() + + entry = klass.from_api_repr(API_REPR, client) + + self.assertEqual(entry.log_name, LOG_NAME) + self.assertIsNone(entry.logger) + self.assertEqual(entry.insert_id, IID) + self.assertEqual(entry.timestamp, NOW) + self.assertEqual(entry.received_timestamp, LATER) + self.assertEqual(entry.labels, LABELS) + self.assertEqual(entry.trace, TRACE) + self.assertEqual(entry.span_id, SPANID) + self.assertTrue(entry.trace_sampled) + + source_location = entry.source_location + self.assertEqual(source_location["file"], FILE) + self.assertEqual(source_location["line"], LINE_NO) + self.assertEqual(source_location["function"], FUNCTION) + + self.assertEqual(entry.operation, OPERATION) + self.assertIsNone(entry.payload) + def test_to_api_repr_w_source_location_no_line(self): from google.cloud.logging_v2.logger import _GLOBAL_RESOURCE diff --git a/tests/unit/test_logger.py b/tests/unit/test_logger.py index 5f0868ba2..1eae1cda6 100644 --- a/tests/unit/test_logger.py +++ b/tests/unit/test_logger.py @@ -937,6 +937,36 @@ def test_list_entries_limit(self): }, ) + def test_list_entries_folder(self): + from google.cloud.logging import TextEntry + from google.cloud.logging import Client + + client = Client( + project=self.PROJECT, credentials=_make_credentials(), _use_grpc=False + ) + FOLDER_ID = "123" + LOG_NAME = f"folders/{FOLDER_ID}/logs/cloudaudit.googleapis.com%2Fdata_access" + + ENTRIES = [ + { + "textPayload": "hello world", + "insertId": "1", + "resource": {"type": "global"}, + "logName": LOG_NAME, + }, + ] + returned = {"entries": ENTRIES} + client._connection = _Connection(returned) + + iterator = client.list_entries(resource_names=[f"folder/{FOLDER_ID}"],) + entries = list(iterator) + # Check the entries. + self.assertEqual(len(entries), 1) + entry = entries[0] + self.assertIsInstance(entry, TextEntry) + self.assertIsNone(entry.logger) + self.assertEqual(entry.log_name, LOG_NAME) + class TestBatch(unittest.TestCase): From e0c5fc02160ae87faf4ba5c2b62be86de6b02cf3 Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Fri, 10 Dec 2021 14:59:31 -0800 Subject: [PATCH 31/34] feat: trace improvements (#450) --- google/cloud/logging_v2/handlers/_helpers.py | 117 ++++++--- .../cloud/logging_v2/handlers/app_engine.py | 4 +- google/cloud/logging_v2/handlers/handlers.py | 10 +- tests/system/test_system.py | 4 + tests/unit/handlers/test__helpers.py | 223 +++++++++++++----- tests/unit/handlers/test_app_engine.py | 6 +- tests/unit/handlers/test_handlers.py | 71 +++++- tests/unit/handlers/test_structured_log.py | 44 +++- 8 files changed, 378 insertions(+), 101 deletions(-) diff --git a/google/cloud/logging_v2/handlers/_helpers.py b/google/cloud/logging_v2/handlers/_helpers.py index f5dfb7c55..32e70dfdd 100644 --- a/google/cloud/logging_v2/handlers/_helpers.py +++ b/google/cloud/logging_v2/handlers/_helpers.py @@ -27,11 +27,13 @@ from google.cloud.logging_v2.handlers.middleware.request import _get_django_request _DJANGO_CONTENT_LENGTH = "CONTENT_LENGTH" -_DJANGO_TRACE_HEADER = "HTTP_X_CLOUD_TRACE_CONTEXT" +_DJANGO_XCLOUD_TRACE_HEADER = "HTTP_X_CLOUD_TRACE_CONTEXT" +_DJANGO_TRACEPARENT = "HTTP_TRACEPARENT" _DJANGO_USERAGENT_HEADER = "HTTP_USER_AGENT" _DJANGO_REMOTE_ADDR_HEADER = "REMOTE_ADDR" _DJANGO_REFERER_HEADER = "HTTP_REFERER" -_FLASK_TRACE_HEADER = "X_CLOUD_TRACE_CONTEXT" +_FLASK_XCLOUD_TRACE_HEADER = "X_CLOUD_TRACE_CONTEXT" +_FLASK_TRACEPARENT = "TRACEPARENT" _PROTOCOL_HEADER = "SERVER_PROTOCOL" @@ -62,13 +64,12 @@ def get_request_data_from_flask(): """Get http_request and trace data from flask request headers. Returns: - Tuple[Optional[dict], Optional[str], Optional[str]]: - Data related to the current http request, trace_id, and span_id for - the request. All fields will be None if a django request isn't - found. + Tuple[Optional[dict], Optional[str], Optional[str], bool]: + Data related to the current http request, trace_id, span_id and trace_sampled + for the request. All fields will be None if a django request isn't found. """ if flask is None or not flask.request: - return None, None, None + return None, None, None, False # build http_request http_request = { @@ -79,25 +80,29 @@ def get_request_data_from_flask(): } # find trace id and span id - header = flask.request.headers.get(_FLASK_TRACE_HEADER) - trace_id, span_id = _parse_trace_span(header) + # first check for w3c traceparent header + header = flask.request.headers.get(_FLASK_TRACEPARENT) + trace_id, span_id, trace_sampled = _parse_trace_parent(header) + if trace_id is None: + # traceparent not found. look for xcloud_trace_context header + header = flask.request.headers.get(_FLASK_XCLOUD_TRACE_HEADER) + trace_id, span_id, trace_sampled = _parse_xcloud_trace(header) - return http_request, trace_id, span_id + return http_request, trace_id, span_id, trace_sampled def get_request_data_from_django(): """Get http_request and trace data from django request headers. Returns: - Tuple[Optional[dict], Optional[str], Optional[str]]: - Data related to the current http request, trace_id, and span_id for - the request. All fields will be None if a django request isn't - found. + Tuple[Optional[dict], Optional[str], Optional[str], bool]: + Data related to the current http request, trace_id, span_id, and trace_sampled + for the request. All fields will be None if a django request isn't found. """ request = _get_django_request() if request is None: - return None, None, None + return None, None, None, False # build http_request http_request = { @@ -108,34 +113,75 @@ def get_request_data_from_django(): } # find trace id and span id - header = request.META.get(_DJANGO_TRACE_HEADER) - trace_id, span_id = _parse_trace_span(header) + # first check for w3c traceparent header + header = request.META.get(_DJANGO_TRACEPARENT) + trace_id, span_id, trace_sampled = _parse_trace_parent(header) + if trace_id is None: + # traceparent not found. look for xcloud_trace_context header + header = request.META.get(_DJANGO_XCLOUD_TRACE_HEADER) + trace_id, span_id, trace_sampled = _parse_xcloud_trace(header) - return http_request, trace_id, span_id + return http_request, trace_id, span_id, trace_sampled -def _parse_trace_span(header): +def _parse_trace_parent(header): + """Given a w3 traceparent header, extract the trace and span ids. + For more information see https://www.w3.org/TR/trace-context/ + + Args: + header (str): the string extracted from the traceparent header + example: 00-0af7651916cd43dd8448eb211c80319c-b7ad6b7169203331-01 + Returns: + Tuple[Optional[dict], Optional[str], bool]: + The trace_id, span_id and trace_sampled extracted from the header + Each field will be None if header can't be parsed in expected format. + """ + trace_id = span_id = None + trace_sampled = False + # see https://www.w3.org/TR/trace-context/ for W3C traceparent format + if header: + try: + VERSION_PART = r"(?!ff)[a-f\d]{2}" + TRACE_ID_PART = r"(?![0]{32})[a-f\d]{32}" + PARENT_ID_PART = r"(?![0]{16})[a-f\d]{16}" + FLAGS_PART = r"[a-f\d]{2}" + regex = f"^\\s?({VERSION_PART})-({TRACE_ID_PART})-({PARENT_ID_PART})-({FLAGS_PART})(-.*)?\\s?$" + match = re.match(regex, header) + trace_id = match.group(2) + span_id = match.group(3) + # trace-flag component is an 8-bit bit field. Read as an int + int_flag = int(match.group(4), 16) + # trace sampled is set if the right-most bit in flag component is set + trace_sampled = bool(int_flag & 1) + except (IndexError, AttributeError): + # could not parse header as expected. Return None + pass + return trace_id, span_id, trace_sampled + + +def _parse_xcloud_trace(header): """Given an X_CLOUD_TRACE header, extract the trace and span ids. Args: header (str): the string extracted from the X_CLOUD_TRACE header Returns: - Tuple[Optional[dict], Optional[str]]: - The trace_id and span_id extracted from the header + Tuple[Optional[dict], Optional[str], bool]: + The trace_id, span_id and trace_sampled extracted from the header Each field will be None if not found. """ - trace_id = None - span_id = None + trace_id = span_id = None + trace_sampled = False + # see https://cloud.google.com/trace/docs/setup for X-Cloud-Trace_Context format if header: try: - split_header = header.split("/", 1) - trace_id = split_header[0] - header_suffix = split_header[1] - # the span is the set of alphanumeric characters after the / - span_id = re.findall(r"^\w+", header_suffix)[0] + regex = r"([\w-]+)?(\/?([\w-]+))?(;?o=(\d))?" + match = re.match(regex, header) + trace_id = match.group(1) + span_id = match.group(3) + trace_sampled = match.group(5) == "1" except IndexError: pass - return trace_id, span_id + return trace_id, span_id, trace_sampled def get_request_data(): @@ -143,10 +189,9 @@ def get_request_data(): frameworks (currently supported: Flask and Django). Returns: - Tuple[Optional[dict], Optional[str], Optional[str]]: - Data related to the current http request, trace_id, and span_id for - the request. All fields will be None if a django request isn't - found. + Tuple[Optional[dict], Optional[str], Optional[str], bool]: + Data related to the current http request, trace_id, span_id, and trace_sampled + for the request. All fields will be None if a http request isn't found. """ checkers = ( get_request_data_from_django, @@ -154,8 +199,8 @@ def get_request_data(): ) for checker in checkers: - http_request, trace_id, span_id = checker() + http_request, trace_id, span_id, trace_sampled = checker() if http_request is not None: - return http_request, trace_id, span_id + return http_request, trace_id, span_id, trace_sampled - return None, None, None + return None, None, None, False diff --git a/google/cloud/logging_v2/handlers/app_engine.py b/google/cloud/logging_v2/handlers/app_engine.py index abd16664f..a65d16a0e 100644 --- a/google/cloud/logging_v2/handlers/app_engine.py +++ b/google/cloud/logging_v2/handlers/app_engine.py @@ -98,7 +98,7 @@ def get_gae_labels(self): """ gae_labels = {} - _, trace_id, _ = get_request_data() + _, trace_id, _, _ = get_request_data() if trace_id is not None: gae_labels[_TRACE_ID_LABEL] = trace_id @@ -115,7 +115,7 @@ def emit(self, record): record (logging.LogRecord): The record to be logged. """ message = super(AppEngineHandler, self).format(record) - inferred_http, inferred_trace, _ = get_request_data() + inferred_http, inferred_trace, _, _ = get_request_data() if inferred_trace is not None: inferred_trace = f"projects/{self.project_id}/traces/{inferred_trace}" # allow user overrides diff --git a/google/cloud/logging_v2/handlers/handlers.py b/google/cloud/logging_v2/handlers/handlers.py index 39bcbca79..769146007 100644 --- a/google/cloud/logging_v2/handlers/handlers.py +++ b/google/cloud/logging_v2/handlers/handlers.py @@ -82,7 +82,12 @@ def filter(self, record): """ user_labels = getattr(record, "labels", {}) # infer request data from the environment - inferred_http, inferred_trace, inferred_span = get_request_data() + ( + inferred_http, + inferred_trace, + inferred_span, + inferred_sampled, + ) = get_request_data() if inferred_trace is not None and self.project is not None: # add full path for detected trace inferred_trace = f"projects/{self.project}/traces/{inferred_trace}" @@ -90,6 +95,7 @@ def filter(self, record): record._resource = getattr(record, "resource", None) record._trace = getattr(record, "trace", inferred_trace) or None record._span_id = getattr(record, "span_id", inferred_span) or None + record._trace_sampled = bool(getattr(record, "trace_sampled", inferred_sampled)) record._http_request = getattr(record, "http_request", inferred_http) record._source_location = CloudLoggingFilter._infer_source_location(record) # add logger name as a label if possible @@ -98,6 +104,7 @@ def filter(self, record): # create string representations for structured logging record._trace_str = record._trace or "" record._span_id_str = record._span_id or "" + record._trace_sampled_str = "true" if record._trace_sampled else "false" record._http_request_str = json.dumps( record._http_request or {}, ensure_ascii=False ) @@ -205,6 +212,7 @@ def emit(self, record): labels=labels, trace=record._trace, span_id=record._span_id, + trace_sampled=record._trace_sampled, http_request=record._http_request, source_location=record._source_location, ) diff --git a/tests/system/test_system.py b/tests/system/test_system.py index 24050e8b3..90b4059d6 100644 --- a/tests/system/test_system.py +++ b/tests/system/test_system.py @@ -454,6 +454,7 @@ def test_log_empty(self): self.assertEqual(len(entries), 1) self.assertIsNone(entries[0].payload) + self.assertFalse(entries[0].trace_sampled) def test_log_struct_logentry_data(self): logger = Config.CLIENT.logger(self._logger_name("log_w_struct")) @@ -473,6 +474,7 @@ def test_log_struct_logentry_data(self): self.assertEqual(entries[0].severity, "WARNING") self.assertEqual(entries[0].trace, JSON_PAYLOAD["trace"]) self.assertEqual(entries[0].span_id, JSON_PAYLOAD["span_id"]) + self.assertFalse(entries[0].trace_sampled) def test_log_handler_async(self): LOG_MESSAGE = "It was the worst of times" @@ -534,6 +536,7 @@ def test_handlers_w_extras(self): extra = { "trace": "123", "span_id": "456", + "trace_sampled": True, "http_request": expected_request, "source_location": expected_source, "resource": Resource(type="cloudiot_device", labels={}), @@ -545,6 +548,7 @@ def test_handlers_w_extras(self): self.assertEqual(len(entries), 1) self.assertEqual(entries[0].trace, extra["trace"]) self.assertEqual(entries[0].span_id, extra["span_id"]) + self.assertTrue(entries[0].trace_sampled) self.assertEqual(entries[0].http_request, expected_request) self.assertEqual( entries[0].labels, {**extra["labels"], "python_logger": LOGGER_NAME} diff --git a/tests/unit/handlers/test__helpers.py b/tests/unit/handlers/test__helpers.py index d26e700e8..9946c8eb5 100644 --- a/tests/unit/handlers/test__helpers.py +++ b/tests/unit/handlers/test__helpers.py @@ -16,10 +16,10 @@ import mock -_FLASK_TRACE_ID = "flask-id" +_FLASK_TRACE_ID = "flask0id" _FLASK_SPAN_ID = "span0flask" _FLASK_HTTP_REQUEST = {"requestUrl": "https://flask.palletsprojects.com/en/1.1.x/"} -_DJANGO_TRACE_ID = "django-id" +_DJANGO_TRACE_ID = "django0id" _DJANGO_SPAN_ID = "span0django" _DJANGO_HTTP_REQUEST = {"requestUrl": "https://www.djangoproject.com/"} @@ -29,7 +29,8 @@ class Test_get_request_data_from_flask(unittest.TestCase): def _call_fut(): from google.cloud.logging_v2.handlers import _helpers - return _helpers.get_request_data_from_flask() + http, trace, span, sampled = _helpers.get_request_data_from_flask() + return http, trace, span, sampled @staticmethod def create_app(): @@ -46,17 +47,18 @@ def index(): def test_no_context_header(self): app = self.create_app() with app.test_request_context(path="/", headers={}): - http_request, trace_id, span_id = self._call_fut() + http_request, trace_id, span_id, sampled = self._call_fut() self.assertIsNone(trace_id) self.assertIsNone(span_id) + self.assertEqual(sampled, False) self.assertEqual(http_request["requestMethod"], "GET") - def test_valid_context_header(self): + def test_xcloud_header(self): flask_trace_header = "X_CLOUD_TRACE_CONTEXT" expected_trace_id = _FLASK_TRACE_ID expected_span_id = _FLASK_SPAN_ID - flask_trace_id = f"{expected_trace_id}/{expected_span_id}" + flask_trace_id = f"{expected_trace_id}/{expected_span_id};o=1" app = self.create_app() context = app.test_request_context( @@ -64,10 +66,30 @@ def test_valid_context_header(self): ) with context: - http_request, trace_id, span_id = self._call_fut() + http_request, trace_id, span_id, sampled = self._call_fut() self.assertEqual(trace_id, expected_trace_id) self.assertEqual(span_id, expected_span_id) + self.assertEqual(sampled, True) + self.assertEqual(http_request["requestMethod"], "GET") + + def test_traceparent_header(self): + flask_trace_header = "TRACEPARENT" + expected_trace_id = "4bf92f3577b34da6a3ce929d0e0e4736" + expected_span_id = "00f067aa0ba902b7" + flask_trace_id = f"00-{expected_trace_id}-{expected_span_id}-01" + + app = self.create_app() + context = app.test_request_context( + path="/", headers={flask_trace_header: flask_trace_id} + ) + + with context: + http_request, trace_id, span_id, sampled = self._call_fut() + + self.assertEqual(trace_id, expected_trace_id) + self.assertEqual(span_id, expected_span_id) + self.assertEqual(sampled, True) self.assertEqual(http_request["requestMethod"], "GET") def test_http_request_populated(self): @@ -106,7 +128,8 @@ class Test_get_request_data_from_django(unittest.TestCase): def _call_fut(): from google.cloud.logging_v2.handlers import _helpers - return _helpers.get_request_data_from_django() + http, trace, span, sampled = _helpers.get_request_data_from_django() + return http, trace, span, sampled def setUp(self): from django.conf import settings @@ -131,20 +154,21 @@ def test_no_context_header(self): middleware = request.RequestMiddleware(None) middleware.process_request(django_request) - http_request, trace_id, span_id = self._call_fut() + http_request, trace_id, span_id, sampled = self._call_fut() self.assertEqual(http_request["requestMethod"], "GET") self.assertIsNone(trace_id) self.assertIsNone(span_id) + self.assertEqual(sampled, False) - def test_valid_context_header(self): + def test_xcloud_header(self): from django.test import RequestFactory from google.cloud.logging_v2.handlers.middleware import request django_trace_header = "HTTP_X_CLOUD_TRACE_CONTEXT" expected_span_id = _DJANGO_SPAN_ID expected_trace_id = _DJANGO_TRACE_ID - django_trace_id = f"{expected_trace_id}/{expected_span_id}" + django_trace_id = f"{expected_trace_id}/{expected_span_id};o=1" django_request = RequestFactory().get( "/", **{django_trace_header: django_trace_id} @@ -152,10 +176,31 @@ def test_valid_context_header(self): middleware = request.RequestMiddleware(None) middleware.process_request(django_request) - http_request, trace_id, span_id = self._call_fut() + http_request, trace_id, span_id, sampled = self._call_fut() + + self.assertEqual(trace_id, expected_trace_id) + self.assertEqual(span_id, expected_span_id) + self.assertEqual(sampled, True) + self.assertEqual(http_request["requestMethod"], "GET") + + def test_traceparent_header(self): + from django.test import RequestFactory + from google.cloud.logging_v2.handlers.middleware import request + + django_trace_header = "HTTP_TRACEPARENT" + expected_trace_id = "4bf92f3577b34da6a3ce929d0e0e4736" + expected_span_id = "00f067aa0ba902b7" + header = f"00-{expected_trace_id}-{expected_span_id}-01" + + django_request = RequestFactory().get("/", **{django_trace_header: header}) + + middleware = request.RequestMiddleware(None) + middleware.process_request(django_request) + http_request, trace_id, span_id, sampled = self._call_fut() self.assertEqual(trace_id, expected_trace_id) self.assertEqual(span_id, expected_span_id) + self.assertEqual(sampled, True) self.assertEqual(http_request["requestMethod"], "GET") def test_http_request_populated(self): @@ -203,7 +248,8 @@ class Test_get_request_data(unittest.TestCase): def _call_fut(): from google.cloud.logging_v2.handlers import _helpers - return _helpers.get_request_data() + http, trace, span, sampled = _helpers.get_request_data() + return http, trace, span, sampled def _helper(self, django_return, flask_return): django_patch = mock.patch( @@ -222,8 +268,13 @@ def _helper(self, django_return, flask_return): return django_mock, flask_mock, result def test_from_django(self): - django_expected = (_DJANGO_HTTP_REQUEST, _DJANGO_TRACE_ID, _DJANGO_SPAN_ID) - flask_expected = (None, None, None) + django_expected = ( + _DJANGO_HTTP_REQUEST, + _DJANGO_TRACE_ID, + _DJANGO_SPAN_ID, + False, + ) + flask_expected = (None, None, None, False) django_mock, flask_mock, output = self._helper(django_expected, flask_expected) self.assertEqual(output, django_expected) @@ -231,8 +282,8 @@ def test_from_django(self): flask_mock.assert_not_called() def test_from_flask(self): - django_expected = (None, None, None) - flask_expected = (_FLASK_HTTP_REQUEST, _FLASK_TRACE_ID, _FLASK_SPAN_ID) + django_expected = (None, None, None, False) + flask_expected = (_FLASK_HTTP_REQUEST, _FLASK_TRACE_ID, _FLASK_SPAN_ID, False) django_mock, flask_mock, output = self._helper(django_expected, flask_expected) self.assertEqual(output, flask_expected) @@ -241,8 +292,13 @@ def test_from_flask(self): flask_mock.assert_called_once_with() def test_from_django_and_flask(self): - django_expected = (_DJANGO_HTTP_REQUEST, _DJANGO_TRACE_ID, _DJANGO_SPAN_ID) - flask_expected = (_FLASK_HTTP_REQUEST, _FLASK_TRACE_ID, _FLASK_SPAN_ID) + django_expected = ( + _DJANGO_HTTP_REQUEST, + _DJANGO_TRACE_ID, + _DJANGO_SPAN_ID, + False, + ) + flask_expected = (_FLASK_HTTP_REQUEST, _FLASK_TRACE_ID, _FLASK_SPAN_ID, False) django_mock, flask_mock, output = self._helper(django_expected, flask_expected) @@ -253,19 +309,19 @@ def test_from_django_and_flask(self): flask_mock.assert_not_called() def test_missing_http_request(self): - flask_expected = (None, _FLASK_TRACE_ID, _FLASK_SPAN_ID) - django_expected = (None, _DJANGO_TRACE_ID, _DJANGO_TRACE_ID) + flask_expected = (None, _FLASK_TRACE_ID, _FLASK_SPAN_ID, True) + django_expected = (None, _DJANGO_TRACE_ID, _DJANGO_TRACE_ID, True) django_mock, flask_mock, output = self._helper(django_expected, flask_expected) # function only returns trace if http_request data is present - self.assertEqual(output, (None, None, None)) + self.assertEqual(output, (None, None, None, False)) django_mock.assert_called_once_with() flask_mock.assert_called_once_with() def test_missing_trace_id(self): - flask_expected = (_FLASK_HTTP_REQUEST, None, None) - django_expected = (None, _DJANGO_TRACE_ID, _DJANGO_SPAN_ID) + flask_expected = (_FLASK_HTTP_REQUEST, None, None, False) + django_expected = (None, _DJANGO_TRACE_ID, _DJANGO_SPAN_ID, True) django_mock, flask_mock, output = self._helper(django_expected, flask_expected) # trace_id is optional @@ -275,77 +331,136 @@ def test_missing_trace_id(self): flask_mock.assert_called_once_with() def test_missing_both(self): - flask_expected = (None, None, None) - django_expected = (None, None, None) + flask_expected = (None, None, None, False) + django_expected = (None, None, None, False) django_mock, flask_mock, output = self._helper(django_expected, flask_expected) - self.assertEqual(output, (None, None, None)) + self.assertEqual(output, (None, None, None, False)) django_mock.assert_called_once_with() flask_mock.assert_called_once_with() def test_wo_libraries(self): output = self._call_fut() - self.assertEqual(output, (None, None, None)) + self.assertEqual(output, (None, None, None, False)) -class Test__parse_trace_span(unittest.TestCase): +class Test__parse_xcloud_trace(unittest.TestCase): @staticmethod def _call_fut(header): from google.cloud.logging_v2.handlers import _helpers - return _helpers._parse_trace_span(header) + trace, span, sampled = _helpers._parse_xcloud_trace(header) + return trace, span, sampled def test_empty_header(self): header = "" - trace_id, span_id = self._call_fut(header) - self.assertEqual(trace_id, None) - self.assertEqual(span_id, None) + trace_id, span_id, sampled = self._call_fut(header) + self.assertIsNone(trace_id) + self.assertIsNone(span_id) + self.assertEqual(sampled, False) def test_no_span(self): header = "12345" - trace_id, span_id = self._call_fut(header) + trace_id, span_id, sampled = self._call_fut(header) self.assertEqual(trace_id, header) - self.assertEqual(span_id, None) + self.assertIsNone(span_id) + self.assertEqual(sampled, False) def test_no_trace(self): header = "/12345" - trace_id, span_id = self._call_fut(header) - self.assertEqual(trace_id, "") + trace_id, span_id, sampled = self._call_fut(header) + self.assertIsNone(trace_id) self.assertEqual(span_id, "12345") + self.assertEqual(sampled, False) def test_with_span(self): expected_trace = "12345" expected_span = "67890" header = f"{expected_trace}/{expected_span}" - trace_id, span_id = self._call_fut(header) + trace_id, span_id, sampled = self._call_fut(header) self.assertEqual(trace_id, expected_trace) self.assertEqual(span_id, expected_span) + self.assertEqual(sampled, False) def test_with_extra_characters(self): expected_trace = "12345" expected_span = "67890" - header = f"{expected_trace}/{expected_span};o=0" - trace_id, span_id = self._call_fut(header) + header = f"{expected_trace}/{expected_span};abc" + trace_id, span_id, sampled = self._call_fut(header) self.assertEqual(trace_id, expected_trace) self.assertEqual(span_id, expected_span) + self.assertEqual(sampled, False) - def test_with_unicode_span(self): - """ - Spans are expected to be alphanumeric - """ + def test_with_explicit_no_sampled(self): expected_trace = "12345" - header = f"{expected_trace}/😀123" - trace_id, span_id = self._call_fut(header) + expected_span = "67890" + header = f"{expected_trace}/{expected_span};o=0" + trace_id, span_id, sampled = self._call_fut(header) self.assertEqual(trace_id, expected_trace) - self.assertEqual(span_id, None) + self.assertEqual(span_id, expected_span) + self.assertEqual(sampled, False) - def test_with_unicode_trace(self): - """ - Spans are expected to be alphanumeric - """ - expected_trace = "12😀345" + def test_with__sampled(self): + expected_trace = "12345" expected_span = "67890" - header = f"{expected_trace}/{expected_span}" - trace_id, span_id = self._call_fut(header) + header = f"{expected_trace}/{expected_span};o=1" + trace_id, span_id, sampled = self._call_fut(header) self.assertEqual(trace_id, expected_trace) self.assertEqual(span_id, expected_span) + self.assertEqual(sampled, True) + + +class Test__parse_trace_parent(unittest.TestCase): + @staticmethod + def _call_fut(header): + from google.cloud.logging_v2.handlers import _helpers + + trace, span, sampled = _helpers._parse_trace_parent(header) + return trace, span, sampled + + def test_empty_header(self): + header = "" + trace_id, span_id, sampled = self._call_fut(header) + self.assertIsNone(trace_id) + self.assertIsNone(span_id) + self.assertEqual(sampled, False) + + def test_valid_header(self): + header = "00-0af7651916cd43dd8448eb211c80319c-b7ad6b7169203331-01" + trace_id, span_id, sampled = self._call_fut(header) + self.assertEqual(trace_id, "0af7651916cd43dd8448eb211c80319c") + self.assertEqual(span_id, "b7ad6b7169203331") + self.assertEqual(sampled, True) + + def test_not_sampled(self): + header = "00-0af7651916cd43dd8448eb211c80319c-b7ad6b7169203331-00" + trace_id, span_id, sampled = self._call_fut(header) + self.assertEqual(trace_id, "0af7651916cd43dd8448eb211c80319c") + self.assertEqual(span_id, "b7ad6b7169203331") + self.assertEqual(sampled, False) + + def test_sampled_w_other_flags(self): + header = "00-0af7651916cd43dd8448eb211c80319c-b7ad6b7169203331-09" + trace_id, span_id, sampled = self._call_fut(header) + self.assertEqual(trace_id, "0af7651916cd43dd8448eb211c80319c") + self.assertEqual(span_id, "b7ad6b7169203331") + self.assertEqual(sampled, True) + + def test_invalid_headers(self): + invalid_headers = [ + "", + "test" + "ff-0af7651916cd43dd8448eb211c80319c-b7ad6b7169203331-01", # invalid version + "00-00000000000000000000000000000000-b7ad6b7169203331-01", # invalid trace + "00-0af7651916cd43dd8448eb211c80319c-0000000000000000-01", # invalid span + "00-af7651916cd43dd8448eb211c80319c-b7ad6b7169203331-00", + "00-0af7651916cd43dd8448eb211c80319c-bad6b7169203331-00", + "00-0af7651916cd43dd8448eb211c80319c-b7ad6b7169203331-0", + "00-0af7651916cd43dd8448eb211c80319c-b7ad6b7169203331-", + "00-0af7651916cd43dd8448eb211c80319c-00", + ] + for header in invalid_headers: + trace_id, span_id, sampled = self._call_fut(header) + self.assertIsNone(trace_id) + self.assertIsNone(span_id) + self.assertEqual(sampled, False) diff --git a/tests/unit/handlers/test_app_engine.py b/tests/unit/handlers/test_app_engine.py index c726c8496..8eedfad9b 100644 --- a/tests/unit/handlers/test_app_engine.py +++ b/tests/unit/handlers/test_app_engine.py @@ -97,7 +97,7 @@ def test_emit(self): expected_trace_id = f"projects/{self.PROJECT}/traces/{trace_id}" get_request_patch = mock.patch( "google.cloud.logging_v2.handlers.app_engine.get_request_data", - return_value=(expected_http_request, trace_id, None), + return_value=(expected_http_request, trace_id, None, None), ) with get_request_patch: # library integrations mocked to return test data @@ -135,7 +135,7 @@ def test_emit_manual_field_override(self): inferred_trace_id = "trace-test" get_request_patch = mock.patch( "google.cloud.logging_v2.handlers.app_engine.get_request_data", - return_value=(inferred_http_request, inferred_trace_id, None), + return_value=(inferred_http_request, inferred_trace_id, None, None), ) with get_request_patch: # library integrations mocked to return test data @@ -180,7 +180,7 @@ def test_emit_manual_field_override(self): def _get_gae_labels_helper(self, trace_id): get_request_patch = mock.patch( "google.cloud.logging_v2.handlers.app_engine.get_request_data", - return_value=(None, trace_id, None), + return_value=(None, trace_id, None, None), ) client = mock.Mock(project=self.PROJECT, spec=["project"]) diff --git a/tests/unit/handlers/test_handlers.py b/tests/unit/handlers/test_handlers.py index 71a709b6a..bbfacf59f 100644 --- a/tests/unit/handlers/test_handlers.py +++ b/tests/unit/handlers/test_handlers.py @@ -84,6 +84,8 @@ def test_filter_record(self): self.assertIsNone(record._resource) self.assertIsNone(record._trace) self.assertEqual(record._trace_str, "") + self.assertFalse(record._trace_sampled) + self.assertEqual(record._trace_sampled_str, "false") self.assertIsNone(record._span_id) self.assertEqual(record._span_id_str, "") self.assertIsNone(record._http_request) @@ -112,6 +114,8 @@ def test_minimal_record(self): self.assertEqual(record._trace_str, "") self.assertIsNone(record._span_id) self.assertEqual(record._span_id_str, "") + self.assertFalse(record._trace_sampled) + self.assertEqual(record._trace_sampled_str, "false") self.assertIsNone(record._http_request) self.assertEqual(record._http_request_str, "{}") self.assertIsNone(record._labels) @@ -131,7 +135,7 @@ def test_record_with_request(self): expected_agent = "Mozilla/5.0" expected_trace = "123" expected_span = "456" - combined_trace = f"{expected_trace}/{expected_span}" + combined_trace = f"{expected_trace}/{expected_span};o=1" expected_request = { "requestMethod": "GET", "requestUrl": expected_path, @@ -154,6 +158,47 @@ def test_record_with_request(self): self.assertEqual(record._trace_str, expected_trace) self.assertEqual(record._span_id, expected_span) self.assertEqual(record._span_id_str, expected_span) + self.assertTrue(record._trace_sampled) + self.assertEqual(record._trace_sampled_str, "true") + self.assertEqual(record._http_request, expected_request) + self.assertEqual(record._http_request_str, json.dumps(expected_request)) + + def test_record_with_traceparent_request(self): + """ + test filter adds http request data when available + """ + import logging + + filter_obj = self._make_one() + record = logging.LogRecord(None, logging.INFO, None, None, None, None, None,) + record.created = None + + expected_path = "http://testserver/123" + expected_agent = "Mozilla/5.0" + expected_trace = "4bf92f3577b34da6a3ce929d0e0e4736" + expected_span = "00f067aa0ba902b7" + combined_trace = f"00-{expected_trace}-{expected_span}-03" + expected_request = { + "requestMethod": "GET", + "requestUrl": expected_path, + "userAgent": expected_agent, + "protocol": "HTTP/1.1", + } + + app = self.create_app() + with app.test_request_context( + expected_path, + headers={"User-Agent": expected_agent, "TRACEPARENT": combined_trace}, + ): + success = filter_obj.filter(record) + self.assertTrue(success) + + self.assertEqual(record._trace, expected_trace) + self.assertEqual(record._trace_str, expected_trace) + self.assertEqual(record._span_id, expected_span) + self.assertEqual(record._span_id_str, expected_span) + self.assertTrue(record._trace_sampled) + self.assertEqual(record._trace_sampled_str, "true") self.assertEqual(record._http_request, expected_request) self.assertEqual(record._http_request_str, json.dumps(expected_request)) @@ -306,6 +351,7 @@ def test_emit(self): {"python_logger": logname}, None, None, + False, None, None, ), @@ -322,7 +368,7 @@ def test_emit_minimal(self): handler.handle(record) self.assertEqual( handler.transport.send_called_with, - (record, None, _GLOBAL_RESOURCE, None, None, None, None, None,), + (record, None, _GLOBAL_RESOURCE, None, None, None, False, None, None,), ) def test_emit_manual_field_override(self): @@ -350,6 +396,8 @@ def test_emit_manual_field_override(self): setattr(record, "trace", expected_trace) expected_span = "456" setattr(record, "span_id", expected_span) + expected_sampled = True + setattr(record, "trace_sampled", expected_sampled) expected_http = {"reuqest_url": "manual"} setattr(record, "http_request", expected_http) expected_source = {"file": "test-file"} @@ -375,6 +423,7 @@ def test_emit_manual_field_override(self): expected_labels, expected_trace, expected_span, + expected_sampled, expected_http, expected_source, ), @@ -410,6 +459,7 @@ def test_emit_with_custom_formatter(self): expected_label, None, None, + False, None, None, ), @@ -442,6 +492,7 @@ def test_emit_dict(self): expected_label, None, None, + False, None, None, ), @@ -476,6 +527,7 @@ def test_emit_w_json_extras(self): expected_label, None, None, + False, None, None, ), @@ -508,6 +560,7 @@ def test_emit_with_encoded_json(self): expected_label, None, None, + False, None, None, ), @@ -533,7 +586,17 @@ def test_format_with_arguments(self): self.assertEqual( handler.transport.send_called_with, - (record, expected_result, _GLOBAL_RESOURCE, None, None, None, None, None,), + ( + record, + expected_result, + _GLOBAL_RESOURCE, + None, + None, + None, + False, + None, + None, + ), ) @@ -809,6 +872,7 @@ def send( labels=None, trace=None, span_id=None, + trace_sampled=None, http_request=None, source_location=None, ): @@ -819,6 +883,7 @@ def send( labels, trace, span_id, + trace_sampled, http_request, source_location, ) diff --git a/tests/unit/handlers/test_structured_log.py b/tests/unit/handlers/test_structured_log.py index 08e4c2906..5db098c29 100644 --- a/tests/unit/handlers/test_structured_log.py +++ b/tests/unit/handlers/test_structured_log.py @@ -66,6 +66,7 @@ def test_format(self): "severity": record.levelname, "logging.googleapis.com/trace": "", "logging.googleapis.com/spanId": "", + "logging.googleapis.com/trace_sampled": False, "logging.googleapis.com/sourceLocation": { "file": pathname, "line": lineno, @@ -95,6 +96,7 @@ def test_format_minimal(self): "severity": "INFO", "logging.googleapis.com/trace": "", "logging.googleapis.com/spanId": "", + "logging.googleapis.com/trace_sampled": False, "logging.googleapis.com/sourceLocation": {}, "httpRequest": {}, "logging.googleapis.com/labels": {}, @@ -242,10 +244,11 @@ def test_format_with_request(self): expected_agent = "Mozilla/5.0" expected_trace = "123" expected_span = "456" - trace_header = f"{expected_trace}/{expected_span};o=0" + trace_header = f"{expected_trace}/{expected_span};o=1" expected_payload = { "logging.googleapis.com/trace": expected_trace, "logging.googleapis.com/spanId": expected_span, + "logging.googleapis.com/trace_sampled": True, "httpRequest": { "requestMethod": "GET", "requestUrl": expected_path, @@ -267,6 +270,41 @@ def test_format_with_request(self): for (key, value) in expected_payload.items(): self.assertEqual(value, result[key]) + def test_format_with_traceparent(self): + import logging + import json + + handler = self._make_one() + logname = "loggername" + message = "hello world,嗨 世界" + record = logging.LogRecord(logname, logging.INFO, "", 0, message, None, None) + expected_path = "http://testserver/123" + expected_agent = "Mozilla/5.0" + expected_trace = "4bf92f3577b34da6a3ce929d0e0e4736" + expected_span = "00f067aa0ba902b7" + trace_header = f"00-{expected_trace}-{expected_span}-09" + expected_payload = { + "logging.googleapis.com/trace": expected_trace, + "logging.googleapis.com/spanId": expected_span, + "logging.googleapis.com/trace_sampled": True, + "httpRequest": { + "requestMethod": "GET", + "requestUrl": expected_path, + "userAgent": expected_agent, + "protocol": "HTTP/1.1", + }, + } + + app = self.create_app() + with app.test_request_context( + expected_path, + headers={"User-Agent": expected_agent, "TRACEPARENT": trace_header}, + ): + handler.filter(record) + result = json.loads(handler.format(record)) + for (key, value) in expected_payload.items(): + self.assertEqual(value, result[key]) + def test_format_overrides(self): """ Allow users to override log fields using `logging.info("", extra={})` @@ -289,17 +327,19 @@ def test_format_overrides(self): inferred_path = "http://testserver/123" overwrite_trace = "abc" overwrite_span = "def" - inferred_trace_span = "123/456;" + inferred_trace_span = "123/456;o=1" overwrite_file = "test-file" record.http_request = {"requestUrl": overwrite_path} record.source_location = {"file": overwrite_file} record.trace = overwrite_trace record.span_id = overwrite_span + record.trace_sampled = False added_labels = {"added_key": "added_value", "overwritten_key": "new_value"} record.labels = added_labels expected_payload = { "logging.googleapis.com/trace": overwrite_trace, "logging.googleapis.com/spanId": overwrite_span, + "logging.googleapis.com/trace_sampled": False, "logging.googleapis.com/sourceLocation": {"file": overwrite_file}, "httpRequest": {"requestUrl": overwrite_path}, "logging.googleapis.com/labels": { From 8a67b73cdfcb9da545671be6cf59c724360b1544 Mon Sep 17 00:00:00 2001 From: Daniel Sanche Date: Wed, 26 Jan 2022 14:16:17 -0800 Subject: [PATCH 32/34] docs: update usage guide for v3.0.0 (#456) --- UPGRADING.md | 130 ++++++++++- docs/direct-lib-usage.rst | 330 ++++++++++++++++++++++++++ docs/entries.rst | 4 +- docs/grpc-vs-http.rst | 14 ++ docs/handlers-app-engine.rst | 7 +- docs/handlers-cloud-logging.rst | 6 + docs/handlers-container-engine.rst | 7 +- docs/handlers-structured-log.rst | 6 + docs/handlers.rst | 13 +- docs/index.rst | 22 +- docs/logger.rst | 1 + docs/std-lib-integration.rst | 146 ++++++++++++ docs/stdlib-usage.rst | 70 ------ docs/transport.rst | 25 ++ docs/transports-base.rst | 6 - docs/transports-sync.rst | 6 - docs/transports-thread.rst | 7 - docs/usage.rst | 359 +---------------------------- docs/v2.rst | 19 -- google/cloud/logging_v2/logger.py | 11 +- samples/snippets/usage_guide.py | 109 ++++++++- 21 files changed, 808 insertions(+), 490 deletions(-) create mode 100644 docs/direct-lib-usage.rst create mode 100644 docs/grpc-vs-http.rst create mode 100644 docs/handlers-cloud-logging.rst create mode 100644 docs/handlers-structured-log.rst create mode 100644 docs/std-lib-integration.rst delete mode 100644 docs/stdlib-usage.rst create mode 100644 docs/transport.rst delete mode 100644 docs/transports-base.rst delete mode 100644 docs/transports-sync.rst delete mode 100644 docs/transports-thread.rst delete mode 100644 docs/v2.rst diff --git a/UPGRADING.md b/UPGRADING.md index af7461dda..e882a497b 100644 --- a/UPGRADING.md +++ b/UPGRADING.md @@ -1,3 +1,131 @@ +# 3.0.0 Migration Guide + +The v3.0.0 release of `google-cloud-logging` improves usability of the library, +particularly on serverless environments. + +If you experience technical issues or have questions, please file an [issue](https://github.com/googleapis/python-logging/issues). + +## Primary Changes + +### Handler deprecations ([#310](https://github.com/googleapis/python-logging/pull/310)) + +> **WARNING**: Breaking change + +We have changed our design policy to support more generic `Handler` classes instead of product-specific classes: + +- [`CloudLoggingHandler`](https://github.com/googleapis/python-logging/blob/v2.7.0/google/cloud/logging_v2/handlers/handlers.py) + - Sends logs over the network (using gRPC or HTTP API calls) + - Replaces `AppEngineHandler` +- [`StructuredLogHandler`](https://github.com/googleapis/python-logging/blob/v2.7.0/google/cloud/logging_v2/handlers/structured_log.py) + - Exports logs in JSON format through standard out, to be parsed by an agent + - Replaces `ContainerEngineHandler` + +As of v3.0.0, [`AppEngineHandler`](https://github.com/googleapis/python-logging/blob/v2.7.0/google/cloud/logging_v2/handlers/app_engine.py) +and [`ContainerEngineHandler`](https://github.com/googleapis/python-logging/blob/v2.7.0/google/cloud/logging_v2/handlers/container_engine.py) +are deprecated and won't be updated. These handlers might be removed from the library in a future update. + +### Full JSON log support in standard library integration ([#316](https://github.com/googleapis/python-logging/pull/316), [#339](https://github.com/googleapis/python-logging/pull/339), [#447](https://github.com/googleapis/python-logging/pull/447)) + +You can now log JSON data using the Python `logging` standard library integration. +To log JSON data, do one of the following: + +1. Use `json_fields` `extra` argument: + +```py +import logging + +data_dict = {"hello": "world"} +logging.info("message field", extra={"json_fields": data_dict}) +``` + +2. Log a JSON-parsable string: + +```py +import logging +import json + +data_dict = {"hello": "world"} +logging.info(json.dumps(data_dict)) +``` + +### Metadata autodetection ([#315](https://github.com/googleapis/python-logging/pull/315)) + +> **WARNING**: Breaking change + +Logs emitted by the library must be associated with a [montored-resource type](https://cloud.google.com/monitoring/api/resources) +that indicates the compute environment the log originated from. +- Prior to 3.0.0, when a log doesn't specify a monitored resource, that field is set to ["global"](https://cloud.google.com/monitoring/api/resources#tag_global). +- With 3.0.0, when a log doesn't specify a monitored resource, the library attempts to identify the resource. If a resource can't be detected, the field will still default to ["global"](https://cloud.google.com/monitoring/api/resources#tag_global). + +### New `Logger.log` method ([#316](https://github.com/googleapis/python-logging/pull/316)) + +In v3.0.0, the library adds a generic `log()` method that will attempt to infer and log any type: + +```py +logger.log("hello world") +``` + +v3.0.0 also supports the Logging class methods from previous releases: + +```py +logger.log_text("hello world") +logger.log_struct({"hello": "world"}) +logger.log_proto(proto_message) +logger.log_empty() +``` + +### More permissive arguments ([#422](https://github.com/googleapis/python-logging/pull/422)) + +> **WARNING**: Breaking change + +In v3.0.0, the library supports a wider variety of input formats: + +```py +# lowercase severity strings will be accepted +logger.log("hello world", severity="warning") +``` + +```py +# a severity will be pulled out of the JSON payload if not otherwise set +logger.log({"hello": "world", "severity":"warning"}) +``` + +```py +# resource data can be passed as a dict instead of a Resource object +logger.log("hello world", resource={"type":"global", "labels":[]}) +``` + +### Allow reading from non-project resources ([#444](https://github.com/googleapis/python-logging/pull/444)) + +Prior to v3.0.0, there was a crashing bug when attempting to read logs from non-project resources: + +- `organizations/[ORGANIZATION_ID]/logs/[LOG_ID]` +- `billingAccounts/[BILLING_ACCOUNT_ID]/logs/[LOG_ID]` +- `folders/[FOLDER_ID]/logs/[LOG_ID]` + +The v3.0.0 update fixes this issue. + +### Internal Gapic and HTTP implementation changes ([#375](https://github.com/googleapis/python-logging/pull/375)) + +> **WARNING**: Breaking change + +The library supports sending logs using two network protocols: gRPC and HTTP. Prior to v3.0.0, there was an +inconsistency in the implementations, resulting in unexpected behavior when in HTTP mode. + +### Max_size argument when listing entries ([#375](https://github.com/googleapis/python-logging/pull/375)) + +v3.0.0 introduces a new `max_size` argument to `list_entries` calls, which can be used to specify an upper bound +on how many logs should be returned: + +```py +from google.cloud import logging_v2 + +client = logging_v2.Client() +client.list_entries(max_size=5) +``` + +--- + # 2.0.0 Migration Guide The 2.0 release of the `google-cloud-logging` client is a significant upgrade based on a [next-gen code generator](https://github.com/googleapis/gapic-generator-python), and includes substantial interface changes. Existing code written for earlier versions of this library will likely require updates to use this version. This document describes the changes that have been made, and what you need to do to update your usage. @@ -334,4 +462,4 @@ The following resource name helpers have been renamed. **`ConfigServiceV2Client`** * `sink_path` -> `log_sink_path` -* `exclusion_path` -> `log_exclusion_path` \ No newline at end of file +* `exclusion_path` -> `log_exclusion_path` diff --git a/docs/direct-lib-usage.rst b/docs/direct-lib-usage.rst new file mode 100644 index 000000000..11cf39e9c --- /dev/null +++ b/docs/direct-lib-usage.rst @@ -0,0 +1,330 @@ +Direct Library Usage +==================== + +We recommend that you use the :mod:`google-cloud-logging` library +by integrating it with the :doc:`Python logging standard library`; +However, you can also use the library to interact with the Google Cloud Logging API +directly. + +In addition to writing logs, you can use the library to manage +:doc:`logs`, :doc:`sinks`, :doc:`metrics`, and other resources. + +Setup +---------------------------- + +Create a Client +~~~~~~~~~~~~~~~~~ + +.. _Creating Client: + +You must set up a :doc:`Client` to use the library: + +.. literalinclude:: ../samples/snippets/usage_guide.py + :start-after: [START usage_client_setup] + :end-before: [END usage_client_setup] + :dedent: 4 + +To use HTTP, :doc:`disable gRPC` when you set up the :doc:`Client`: + +.. literalinclude:: ../samples/snippets/usage_guide.py + :start-after: [START usage_http_client_setup] + :end-before: [END usage_http_client_setup] + :dedent: 4 + +Create a Logger +~~~~~~~~~~~~~~~~~ + +Loggers read, write, and delete logs from Google Cloud. + +You use your :doc:`Client` to create a :doc:`Logger`. + +.. literalinclude:: ../samples/snippets/usage_guide.py + :start-after: [START logger_create] + :end-before: [END logger_create] + :dedent: 4 + +To add custom labels, do so when you initialize a :doc:`Logger`. +When you add custom labels, these labels are added to each +:doc:`LogEntry` written by the :doc:`Logger`: + +.. literalinclude:: ../samples/snippets/usage_guide.py + :start-after: [START logger_custom_labels] + :end-before: [END logger_custom_labels] + :dedent: 4 + +By default, the library adds a `Monitored Resource field `_ +associated with the environment the code is run on. For example, code run on +App Engine will have a `gae_app `_ +resource, while code run locally will have a `global `_ resource field. + +To manually set the resource field, do so when you initialize the :doc:`Logger`: + +.. literalinclude:: ../samples/snippets/usage_guide.py + :start-after: [START logger_custom_resource] + :end-before: [END logger_custom_resource] + :dedent: 4 + + +Write Log Entries +------------------- + +You write logs by using :meth:`Logger.log `: + +.. literalinclude:: ../samples/snippets/usage_guide.py + :start-after: [START logger_log_basic] + :end-before: [END logger_log_basic] + :dedent: 4 + +You can add `LogEntry fields `_ +by passing them as keyword arguments: + +.. literalinclude:: ../samples/snippets/usage_guide.py + :start-after: [START logger_log_fields] + :end-before: [END logger_log_fields] + :dedent: 4 + +:meth:`Logger.log ` chooses the appropriate :doc:`LogEntry ` type +based on input type. To specify type, you can use the following Logger methods: + +- :meth:`Logger.log_text ` creates a :class:`~google.cloud.logging_v2.entries.TextEntry` +- :meth:`Logger.log_struct ` creates a :class:`~google.cloud.logging_v2.entries.StructEntry` +- :meth:`Logger.log_proto ` creates a :class:`~google.cloud.logging_v2.entries.ProtobufEntry` +- :meth:`Logger.log_empty ` creates an empty :class:`~google.cloud.logging_v2.entries.LogEntry` + +Batch Write Logs +------------------ + +By default, each log write takes place in an individual network request, which may be inefficient at scale. + +Using the :class:`~google.cloud.logging_v2.logger.Batch` class, logs are batched together, and only sent out +when :func:`batch.commit ` is called. + +.. literalinclude:: ../samples/snippets/usage_guide.py + :start-after: [START logger_log_batch] + :end-before: [END logger_log_batch] + :dedent: 4 + +To simplify things, you can also use :class:`~google.cloud.logging_v2.logger.Batch` as a context manager: + +.. literalinclude:: ../samples/snippets/usage_guide.py + :start-after: [START logger_log_batch_context] + :end-before: [END logger_log_batch_context] + :dedent: 4 + +In the previous example, the logs are automatically committed when the code exits the "with" block. + +Retrieve Log Entries +--------------------- + +You retrieve log entries for the default project using +:meth:`list_entries() ` +on a :doc:`Client` or :doc:`Logger` object: + +.. literalinclude:: ../samples/snippets/usage_guide.py + :start-after: [START client_list_entries_default] + :end-before: [END client_list_entries_default] + :dedent: 4 + +Entries returned by +:meth:`Client.list_entries() ` +or +:meth:`Logger.list_entries() ` +are instances of one of the following classes: + +- :class:`~google.cloud.logging_v2.entries.TextEntry` +- :class:`~google.cloud.logging_v2.entries.StructEntry` +- :class:`~google.cloud.logging_v2.entries.ProtobufEntry` + +To filter entries retrieved using the `Advanced Logs Filters`_ syntax + +.. _Advanced Logs Filters: https://cloud.google.com/logging/docs/view/advanced_filters + +To fetch entries for the default project. + +.. literalinclude:: ../samples/snippets/usage_guide.py + :start-after: [START client_list_entries_filter] + :end-before: [END client_list_entries_filter] + :dedent: 4 + +To sort entries in descending timestamp order. + +.. literalinclude:: ../samples/snippets/usage_guide.py + :start-after: [START client_list_entries_order_by] + :end-before: [END client_list_entries_order_by] + :dedent: 4 + +To retrieve entries for a single logger, sorting in descending timestamp order: + +.. literalinclude:: ../samples/snippets/usage_guide.py + :start-after: [START logger_list_entries] + :end-before: [END logger_list_entries] + :dedent: 4 + +For example, to retrieve all `GKE Admin Activity audit logs`_ +from the past 24 hours: + +.. _GKE Admin Activity audit logs: https://cloud.google.com/kubernetes-engine/docs/how-to/audit-logging#audit_logs_in_your_project + +.. literalinclude:: ../samples/snippets/usage_guide.py + :start-after: [START logging_list_gke_audit_logs] + :end-before: [END logging_list_gke_audit_logs] + :dedent: 4 + + +Delete Log Entries +-------------------- + +To delete all logs associated with a logger, use the following call: + +.. literalinclude:: ../samples/snippets/usage_guide.py + :start-after: [START logger_delete] + :end-before: [END logger_delete] + :dedent: 8 + + +Manage Log Metrics +-------------------- + +Logs-based metrics are counters of entries which match a given filter. +They can be used within Cloud Monitoring to create charts and alerts. + +To list all logs-based metrics for a project: + +.. literalinclude:: ../samples/snippets/usage_guide.py + :start-after: [START client_list_metrics] + :end-before: [END client_list_metrics] + :dedent: 4 + +To create a logs-based metric: + +.. literalinclude:: ../samples/snippets/usage_guide.py + :start-after: [START metric_create] + :end-before: [END metric_create] + :dedent: 4 + +To refresh local information about a logs-based metric: + +.. literalinclude:: ../samples/snippets/usage_guide.py + :start-after: [START metric_reload] + :end-before: [END metric_reload] + :dedent: 4 + +To update a logs-based metric: + +.. literalinclude:: ../samples/snippets/usage_guide.py + :start-after: [START metric_update] + :end-before: [END metric_update] + :dedent: 4 + +To delete a logs-based metric: + +.. literalinclude:: ../samples/snippets/usage_guide.py + :start-after: [START metric_delete] + :end-before: [END metric_delete] + :dedent: 4 + +Log Sinks +--------------- + +Sinks allow exporting of log entries which match a given filter to +Cloud Storage buckets, BigQuery datasets, or Cloud Pub/Sub topics. + +Cloud Storage Sink +~~~~~~~~~~~~~~~~~~~~~~~ + +Ensure the storage bucket that you want to export logs to has +``cloud-logs@google.com`` as an owner. See +`Setting permissions for Cloud Storage`_. + +.. _Setting permissions for Cloud Storage: https://cloud.google.com/logging/docs/export/configure_export_v2#errors_exporting_to_cloud_storage + +Ensure that ``cloud-logs@google.com`` is an owner of the bucket: + +.. literalinclude:: ../samples/snippets/usage_guide.py + :start-after: [START sink_bucket_permissions] + :end-before: [END sink_bucket_permissions] + :dedent: 4 + +To create a Cloud Storage sink: + +.. literalinclude:: ../samples/snippets/usage_guide.py + :start-after: [START sink_storage_create] + :end-before: [END sink_storage_create] + :dedent: 4 + + +BigQuery Sink +~~~~~~~~~~~~~~~~~~ + +To export logs to BigQuery, you must log into the Cloud Console +and add ``cloud-logs@google.com`` to a dataset. + +See: `Setting permissions for BigQuery`_ + +.. _Setting permissions for BigQuery: https://cloud.google.com/logging/docs/export/configure_export_v2#errors_exporting_to_bigquery + +.. literalinclude:: ../samples/snippets/usage_guide.py + :start-after: [START sink_dataset_permissions] + :end-before: [END sink_dataset_permissions] + :dedent: 4 + +To create a BigQuery sink: + +.. literalinclude:: ../samples/snippets/usage_guide.py + :start-after: [START sink_bigquery_create] + :end-before: [END sink_bigquery_create] + :dedent: 4 + + +Pub/Sub Sink +~~~~~~~~~~~~~~~~~ + +To export logs to BigQuery you must log into the Cloud Console +and add ``cloud-logs@google.com`` to a topic. + +See: `Setting permissions for Pub/Sub`_ + +.. _Setting permissions for Pub/Sub: https://cloud.google.com/logging/docs/export/configure_export_v2#errors_exporting_logs_to_cloud_pubsub + +.. literalinclude:: ../samples/snippets/usage_guide.py + :start-after: [START sink_topic_permissions] + :end-before: [END sink_topic_permissions] + :dedent: 4 + +To create a Cloud Pub/Sub sink: + +.. literalinclude:: ../samples/snippets/usage_guide.py + :start-after: [START sink_pubsub_create] + :end-before: [END sink_pubsub_create] + :dedent: 4 + +Manage Sinks +~~~~~~~~~~~~~~ + +To list all sinks for a project: + +.. literalinclude:: ../samples/snippets/usage_guide.py + :start-after: [START client_list_sinks] + :end-before: [END client_list_sinks] + :dedent: 4 + +To refresh local information about a sink: + +.. literalinclude:: ../samples/snippets/usage_guide.py + :start-after: [START sink_reload] + :end-before: [END sink_reload] + :dedent: 4 + +To update a sink: + +.. literalinclude:: ../samples/snippets/usage_guide.py + :start-after: [START sink_update] + :end-before: [END sink_update] + :dedent: 4 + +To delete a sink: + +.. literalinclude:: ../samples/snippets/usage_guide.py + :start-after: [START sink_delete] + :end-before: [END sink_delete] + :dedent: 4 diff --git a/docs/entries.rst b/docs/entries.rst index 9d473f3c1..dc257e4c9 100644 --- a/docs/entries.rst +++ b/docs/entries.rst @@ -1,5 +1,5 @@ -Entries -======= +Log Entries +=========== .. automodule:: google.cloud.logging_v2.entries :members: diff --git a/docs/grpc-vs-http.rst b/docs/grpc-vs-http.rst new file mode 100644 index 000000000..e6891420c --- /dev/null +++ b/docs/grpc-vs-http.rst @@ -0,0 +1,14 @@ +gRPC vs HTTP +==================== + +:mod:`google-cloud-logging` supports two different protocols for sending logs over the network: +gRPC and HTTP. Both implementations conform to the same API, and should be +invisible to the end user. + +gRPC is enabled by default. You can switch to HTTP mode by either: + +- setting the `DISABLE_GRPC` environment variable to `TRUE` +- or, passing `_use_grpc=False` when :ref:`initializing a Client` + +We recommend using gRPC whenever possible, but you may want to try the HTTP +implementation if you have network issues when using gRPC. diff --git a/docs/handlers-app-engine.rst b/docs/handlers-app-engine.rst index f25223a20..9f8a6c8db 100644 --- a/docs/handlers-app-engine.rst +++ b/docs/handlers-app-engine.rst @@ -1,5 +1,8 @@ -Google App Engine flexible Log Handler -====================================== +[DEPRECATED] App Engine Handler +=================================================== + +.. deprecated:: 3.0.0 + Use :class:`CloudLoggingHandler` instead. .. automodule:: google.cloud.logging_v2.handlers.app_engine :members: diff --git a/docs/handlers-cloud-logging.rst b/docs/handlers-cloud-logging.rst new file mode 100644 index 000000000..5ebaa51ff --- /dev/null +++ b/docs/handlers-cloud-logging.rst @@ -0,0 +1,6 @@ +Cloud Logging Handler +============================== + +.. automodule:: google.cloud.logging_v2.handlers.handlers + :members: + :show-inheritance: diff --git a/docs/handlers-container-engine.rst b/docs/handlers-container-engine.rst index 981b41dcb..0c074eb19 100644 --- a/docs/handlers-container-engine.rst +++ b/docs/handlers-container-engine.rst @@ -1,5 +1,8 @@ -Google Kubernetes Engine Log Handler -==================================== +[DEPRECATED] Kubernetes Engine Handler +================================================= + +.. deprecated:: 3.0.0 + Use :class:`StructuredLogHandler` instead. .. automodule:: google.cloud.logging_v2.handlers.container_engine :members: diff --git a/docs/handlers-structured-log.rst b/docs/handlers-structured-log.rst new file mode 100644 index 000000000..337ad591d --- /dev/null +++ b/docs/handlers-structured-log.rst @@ -0,0 +1,6 @@ +Structured Log Handler +============================== + +.. automodule:: google.cloud.logging_v2.handlers.structured_log + :members: + :show-inheritance: diff --git a/docs/handlers.rst b/docs/handlers.rst index 9089170fb..914757834 100644 --- a/docs/handlers.rst +++ b/docs/handlers.rst @@ -1,6 +1,9 @@ -Python Logging Module Handler -============================== +Handlers +---------------- +.. toctree:: + :maxdepth: 2 -.. automodule:: google.cloud.logging_v2.handlers.handlers - :members: - :show-inheritance: + handlers-cloud-logging + handlers-structured-log + handlers-app-engine + handlers-container-engine diff --git a/docs/index.rst b/docs/index.rst index 64c2dcd1e..01d8e4eee 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -1,17 +1,31 @@ .. include:: README.rst +Usage Guide +------------------- +.. toctree:: + :maxdepth: 2 + + usage + Documentation ------------------- .. toctree:: :maxdepth: 3 - v2 + client + logger + entries + metric + resource + sink + handlers + transport -Migration Guide ---------------- +Migration Guides +---------------- -See the guide below for instructions on migrating to the 2.x release of this library. +See the guide below for instructions on migrating between major releases of this library. .. toctree:: :maxdepth: 2 diff --git a/docs/logger.rst b/docs/logger.rst index 8aca18199..13f8e0d7e 100644 --- a/docs/logger.rst +++ b/docs/logger.rst @@ -3,4 +3,5 @@ Logger .. automodule:: google.cloud.logging_v2.logger :members: + :undoc-members: :show-inheritance: diff --git a/docs/std-lib-integration.rst b/docs/std-lib-integration.rst new file mode 100644 index 000000000..a485fce6d --- /dev/null +++ b/docs/std-lib-integration.rst @@ -0,0 +1,146 @@ +Integration with `logging` Standard Library +=========================================== + +We recommend that you use :mod:`google-cloud-logging` to integrate with +the Python :mod:`logging` standard library. This way, you can write logs using Python +standards, and still have your logs appear in Google Cloud Logging. + +Automatic Configuration +----------------------- + +To integrate :mod:`google-cloud-logging` with the standard :mod:`logging` module, +call :meth:`~google.cloud.logging_v2.client.Client.setup_logging` on a :class:`~google.cloud.logging_v2.client.Client` instance. + +.. literalinclude:: ../samples/snippets/handler.py + :start-after: [START logging_handler_setup] + :end-before: [END logging_handler_setup] + :dedent: 4 + +This :meth:`~google.cloud.logging_v2.client.Client.setup_logging` function chooses the best configurations for the environment your +code is running on. For more information, see the `Google Cloud Logging documentation `_. + +Manual Handler Configuration +----------------------------- + +.. _Manual Handler: + +Automatic Configuration automatically determines the appropriate handler for the environment. +To specify the handler yourself, construct an instance manually and pass it in +as an argument to :meth:`~google.cloud.logging_v2.handlers.setup_logging`: + +.. literalinclude:: ../samples/snippets/usage_guide.py + :start-after: [START create_cloud_handler] + :end-before: [END create_cloud_handler] + :dedent: 4 + +There are two supported handler classes to choose from: + +- :class:`~google.cloud.logging_v2.handlers.handlers.CloudLoggingHandler`: + - Sends logs directly to Cloud Logging over the network (:doc:`gRPC or HTTP`) + - Logs are transmitted according to a :ref:`Transport ` class + - This is the default handler on most environments, including local development +- :class:`~google.cloud.logging_v2.handlers.structured_log.StructuredLogHandler`: + - Outputs logs as `structured JSON `_ + to standard out, to be read and parsed by a GCP logging agent + - This is the default handler on Kubernetes Engine, Cloud Functions and Cloud Run + +Standard Library +--------------------------- + +After you setup the Google Cloud Logging library with the Python :mod:`logging` standard library, +you can send logs with the standard logging library as you normally would: + +.. literalinclude:: ../samples/snippets/handler.py + :start-after: [START logging_handler_usage] + :end-before: [END logging_handler_usage] + :dedent: 4 + +For more information on using the Python :mod:`logging` standard library, see the `logging documentation `_ + +Logging JSON Payloads +---------------------- + +.. _JSON: + +Although the Python :mod:`logging` standard library `expects all logs to be strings `_, +Google Cloud Logging allows `JSON payload data `_. + +To write JSON logs using the standard library integration, do one of the following: + +1. Use the `json_fields` `extra` argument: + +.. literalinclude:: ../samples/snippets/usage_guide.py + :start-after: [START logging_extra_json_fields] + :end-before: [END logging_extra_json_fields] + :dedent: 4 + +2. Log a JSON-parsable string: + +.. literalinclude:: ../samples/snippets/usage_guide.py + :start-after: [START logging_json_dumps] + :end-before: [END logging_json_dumps] + :dedent: 4 + + +Automatic Metadata Detection +---------------------------- + +.. _Autodetection: + +The Google Cloud Logging library attempts to detect and attach additional +`LogEntry fields `_ . +The following fields are currently supported: + +- labels +- trace* +- span_id* +- trace_sampled* +- http_request* +- source_location +- resource +- :ref:`json_fields` + +.. note:: + Fields marked with "*" require a supported Python web framework. The Google Cloud Logging + library currently supports `flask `_ and `django `_ + +Manual Metadata Using the `extra` Argument +-------------------------------------------- + +The Python :mod:`logging` standard library accepts `an "extra" argument `_ when +writing logs. You can use this argument to populate LogRecord objects with user-defined +key-value pairs. Google Cloud Logging uses the `extra` field as a way to pass in additional +metadata to populate `LogEntry fields `_. + +.. literalinclude:: ../samples/snippets/usage_guide.py + :start-after: [START logging_extras] + :end-before: [END logging_extras] + :dedent: 4 + +All of the `LogEntry fields `_ +that can be :ref:`autodetected` can also be set manually through the `extra` argument. Fields sent explicitly through the `extra` +argument override any :ref:`automatically detected` fields. + +CloudLoggingHandler Transports +------------------------------ + +.. _Transports: + +:doc:`Transport` classes define how the :class:`~google.cloud.logging_v2.handlers.handlers.CloudLoggingHandler` +transports logs over the network to Google Cloud. There are two Transport implementations +(defined as subclasses of :class:`transports.base.Transport `): + +- :class:`~google.cloud.logging_v2.handlers.transports.background_thread.BackgroundThreadTransport`: + - sends logs in batches, using a background thread + - the default Transport class +- :class:`~google.cloud.logging_v2.handlers.transports.sync.SyncTransport`: + - sends each log synchronously in a single API call + +You can set a Transport class by passing it as an argument when +:ref:`initializing CloudLoggingHandler manually.` + +You can use both transport options over :doc:`gRPC or HTTP`. + +.. note:: + :class:`~google.cloud.logging_v2.handlers.structured_log.StructuredLogHandler` + prints logs as formatted JSON to standard output, and does not use a Transport class. diff --git a/docs/stdlib-usage.rst b/docs/stdlib-usage.rst deleted file mode 100644 index 375b41ddf..000000000 --- a/docs/stdlib-usage.rst +++ /dev/null @@ -1,70 +0,0 @@ -Integration with Python logging module --------------------------------------- - - -It's possible to tie the Python :mod:`logging` module directly into Google Cloud Logging. To use it, -create a :class:`CloudLoggingHandler ` instance from your -Logging client. - -.. code-block:: python - - >>> import logging - >>> import google.cloud.logging # Don't conflict with standard logging - >>> from google.cloud.logging.handlers import CloudLoggingHandler - >>> client = google.cloud.logging.Client() - >>> handler = CloudLoggingHandler(client) - >>> cloud_logger = logging.getLogger('cloudLogger') - >>> cloud_logger.setLevel(logging.INFO) # defaults to WARN - >>> cloud_logger.addHandler(handler) - >>> cloud_logger.error('bad news') - -.. note:: - - This handler by default uses an asynchronous transport that sends log entries on a background - thread. However, the API call will still be made in the same process. For other transport - options, see the transports section. - -All logs will go to a single custom log, which defaults to "python". The name of the Python -logger will be included in the structured log entry under the "python_logger" field. You can -change it by providing a name to the handler: - -.. code-block:: python - - >>> handler = CloudLoggingHandler(client, name="mycustomlog") - -It is also possible to attach the handler to the root Python logger, so that for example a plain -`logging.warn` call would be sent to Cloud Logging, as well as any other loggers created. However, -you must avoid infinite recursion from the logging calls the client itself makes. A helper -method :meth:`setup_logging ` is provided to configure -this automatically: - -.. code-block:: python - - >>> import logging - >>> import google.cloud.logging # Don't conflict with standard logging - >>> from google.cloud.logging.handlers import CloudLoggingHandler, setup_logging - >>> client = google.cloud.logging.Client() - >>> handler = CloudLoggingHandler(client) - >>> logging.getLogger().setLevel(logging.INFO) # defaults to WARN - >>> setup_logging(handler) - >>> logging.error('bad news') - -You can also exclude certain loggers: - -.. code-block:: python - - >>> setup_logging(handler, excluded_loggers=('werkzeug',)) - - - -Python logging handler transports -================================== - -The Python logging handler can use different transports. The default is -:class:`google.cloud.logging_v2.handlers.BackgroundThreadTransport`. - - 1. :class:`google.cloud.logging_V2.handlers.BackgroundThreadTransport` this is the default. It writes - entries on a background :class:`python.threading.Thread`. - - 1. :class:`google.cloud.logging_V2.handlers.SyncTransport` this handler does a direct API call on each - logging statement to write the entry. diff --git a/docs/transport.rst b/docs/transport.rst new file mode 100644 index 000000000..9f4430103 --- /dev/null +++ b/docs/transport.rst @@ -0,0 +1,25 @@ +Transports +---------------- + +These classes define how the :class:`CloudLoggingHandler ` +transport logs into GCP. More information in the :ref:`User Guide` + +Base Transport +~~~~~~~~~~~~~~ +.. automodule:: google.cloud.logging_v2.handlers.transports.base + :members: + :show-inheritance: + +Background Thread Transport +~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +.. automodule:: google.cloud.logging_v2.handlers.transports.background_thread + :members: + :show-inheritance: + +Synchronous Transport +~~~~~~~~~~~~~~~~~~~~~ + +.. automodule:: google.cloud.logging_v2.handlers.transports.sync + :members: + :show-inheritance: diff --git a/docs/transports-base.rst b/docs/transports-base.rst deleted file mode 100644 index b28fb5ba6..000000000 --- a/docs/transports-base.rst +++ /dev/null @@ -1,6 +0,0 @@ -Python Logging Handler Sync Transport -====================================== - -.. automodule:: google.cloud.logging_v2.handlers.transports.base - :members: - :show-inheritance: diff --git a/docs/transports-sync.rst b/docs/transports-sync.rst deleted file mode 100644 index 32e6401cb..000000000 --- a/docs/transports-sync.rst +++ /dev/null @@ -1,6 +0,0 @@ -Python Logging Handler Sync Transport -====================================== - -.. automodule:: google.cloud.logging_v2.handlers.transports.sync - :members: - :show-inheritance: diff --git a/docs/transports-thread.rst b/docs/transports-thread.rst deleted file mode 100644 index 2899e6c48..000000000 --- a/docs/transports-thread.rst +++ /dev/null @@ -1,7 +0,0 @@ -Python Logging Handler Threaded Transport -========================================= - - -.. automodule:: google.cloud.logging_v2.handlers.transports.background_thread - :members: - :show-inheritance: diff --git a/docs/usage.rst b/docs/usage.rst index 1fde3d8ea..929ee9cef 100644 --- a/docs/usage.rst +++ b/docs/usage.rst @@ -1,356 +1,9 @@ Usage Guide -=========== +------------- +.. toctree:: + :maxdepth: 2 -Writing log entries -------------------- + std-lib-integration + direct-lib-usage + grpc-vs-http -To write log entries, first create a -:class:`~google.cloud.logging.logger.Logger`, passing the "log name" with -which to associate the entries: - -.. literalinclude:: ../samples/snippets/usage_guide.py - :start-after: [START logger_create] - :end-before: [END logger_create] - :dedent: 4 - -Write a simple text entry to the logger. - -.. literalinclude:: ../samples/snippets/usage_guide.py - :start-after: [START logger_log_text] - :end-before: [END logger_log_text] - :dedent: 4 - -Write a dictionary entry to the logger. - -.. literalinclude:: ../samples/snippets/usage_guide.py - :start-after: [START logger_log_struct] - :end-before: [END logger_log_struct] - :dedent: 4 - -Write a simple text entry and resource to the logger. - -Supported Resource values are listed at `Monitored Resource Types`_ - -.. _Monitored Resource Types: https://cloud.google.com/logging/docs/api/v2/resource-list - - -.. literalinclude:: ../samples/snippets/usage_guide.py - :start-after: [START logger_log_resource_text] - :end-before: [END logger_log_resource_text] - :dedent: 4 - -Retrieving log entries ----------------------- - -Fetch entries for the default project. - -.. literalinclude:: ../samples/snippets/usage_guide.py - :start-after: [START client_list_entries_default] - :end-before: [END client_list_entries_default] - :dedent: 4 - -Entries returned by -:meth:`Client.list_entries ` -or -:meth:`Logger.list_entries ` -will be instances of one of the following classes: - -- :class:`~google.cloud.logging.entries.TextEntry` -- :class:`~google.cloud.logging.entries.StructEntry` -- :class:`~google.cloud.logging.entries.ProtobufEntry` - -Filter entries retrieved using the `Advanced Logs Filters`_ syntax - -.. _Advanced Logs Filters: https://cloud.google.com/logging/docs/view/advanced_filters - -Fetch entries for the default project. - -.. literalinclude:: ../samples/snippets/usage_guide.py - :start-after: [START client_list_entries_filter] - :end-before: [END client_list_entries_filter] - :dedent: 4 - -Sort entries in descending timestamp order. - -.. literalinclude:: ../samples/snippets/usage_guide.py - :start-after: [START client_list_entries_order_by] - :end-before: [END client_list_entries_order_by] - :dedent: 4 - -Retrieve entries for a single logger, sorting in descending timestamp order: - -.. literalinclude:: ../samples/snippets/usage_guide.py - :start-after: [START logger_list_entries] - :end-before: [END logger_list_entries] - :dedent: 4 - -And as a practical example, retrieve all `GKE Admin Activity audit logs`_ -from the past 24 hours: - -.. _GKE Admin Activity audit logs: https://cloud.google.com/kubernetes-engine/docs/how-to/audit-logging#audit_logs_in_your_project - -.. literalinclude:: ../samples/snippets/usage_guide.py - :start-after: [START logging_list_gke_audit_logs] - :end-before: [END logging_list_gke_audit_logs] - :dedent: 4 - -Delete all entries for a logger -------------------------------- - -.. literalinclude:: ../samples/snippets/usage_guide.py - :start-after: [START logger_delete] - :end-before: [END logger_delete] - :dedent: 8 - - -Manage log metrics ------------------- - -Metrics are counters of entries which match a given filter. They can be -used within Cloud Monitoring to create charts and alerts. - -List all metrics for a project: - -.. literalinclude:: ../samples/snippets/usage_guide.py - :start-after: [START client_list_metrics] - :end-before: [END client_list_metrics] - :dedent: 4 - -Create a metric: - -.. literalinclude:: ../samples/snippets/usage_guide.py - :start-after: [START metric_create] - :end-before: [END metric_create] - :dedent: 4 - -Refresh local information about a metric: - -.. literalinclude:: ../samples/snippets/usage_guide.py - :start-after: [START metric_reload] - :end-before: [END metric_reload] - :dedent: 4 - -Update a metric: - -.. literalinclude:: ../samples/snippets/usage_guide.py - :start-after: [START metric_update] - :end-before: [END metric_update] - :dedent: 4 - -Delete a metric: - -.. literalinclude:: ../samples/snippets/usage_guide.py - :start-after: [START metric_delete] - :end-before: [END metric_delete] - :dedent: 4 - -Export log entries using sinks ------------------------------- - -Sinks allow exporting entries which match a given filter to Cloud Storage -buckets, BigQuery datasets, or Cloud Pub/Sub topics. - -Export to Cloud Storage -~~~~~~~~~~~~~~~~~~~~~~~ - -Make sure that the storage bucket you want to export logs too has -``cloud-logs@google.com`` as the owner. See -`Setting permissions for Cloud Storage`_. - -.. _Setting permissions for Cloud Storage: https://cloud.google.com/logging/docs/export/configure_export_v2#errors_exporting_to_cloud_storage - -Add ``cloud-logs@google.com`` as the owner of the bucket: - -.. literalinclude:: ../samples/snippets/usage_guide.py - :start-after: [START sink_bucket_permissions] - :end-before: [END sink_bucket_permissions] - :dedent: 4 - -Create a Cloud Storage sink: - -.. literalinclude:: ../samples/snippets/usage_guide.py - :start-after: [START sink_storage_create] - :end-before: [END sink_storage_create] - :dedent: 4 - - -Export to BigQuery -~~~~~~~~~~~~~~~~~~ - -To export logs to BigQuery you must log into the Cloud Platform Console -and add ``cloud-logs@google.com`` to a dataset. - -See: `Setting permissions for BigQuery`_ - -.. _Setting permissions for BigQuery: https://cloud.google.com/logging/docs/export/configure_export_v2#errors_exporting_to_bigquery - -.. literalinclude:: ../samples/snippets/usage_guide.py - :start-after: [START sink_dataset_permissions] - :end-before: [END sink_dataset_permissions] - :dedent: 4 - -Create a BigQuery sink: - -.. literalinclude:: ../samples/snippets/usage_guide.py - :start-after: [START sink_bigquery_create] - :end-before: [END sink_bigquery_create] - :dedent: 4 - - -Export to Pub/Sub -~~~~~~~~~~~~~~~~~ - -To export logs to BigQuery you must log into the Cloud Platform Console -and add ``cloud-logs@google.com`` to a topic. - -See: `Setting permissions for Pub/Sub`_ - -.. _Setting permissions for Pub/Sub: https://cloud.google.com/logging/docs/export/configure_export_v2#errors_exporting_logs_to_cloud_pubsub - -.. literalinclude:: ../samples/snippets/usage_guide.py - :start-after: [START sink_topic_permissions] - :end-before: [END sink_topic_permissions] - :dedent: 4 - -Create a Cloud Pub/Sub sink: - -.. literalinclude:: ../samples/snippets/usage_guide.py - :start-after: [START sink_pubsub_create] - :end-before: [END sink_pubsub_create] - :dedent: 4 - -Manage Sinks -~~~~~~~~~~~~ - -List all sinks for a project: - -.. literalinclude:: ../samples/snippets/usage_guide.py - :start-after: [START client_list_sinks] - :end-before: [END client_list_sinks] - :dedent: 4 - -Refresh local information about a sink: - -.. literalinclude:: ../samples/snippets/usage_guide.py - :start-after: [START sink_reload] - :end-before: [END sink_reload] - :dedent: 4 - -Update a sink: - -.. literalinclude:: ../samples/snippets/usage_guide.py - :start-after: [START sink_update] - :end-before: [END sink_update] - :dedent: 4 - -Delete a sink: - -.. literalinclude:: ../samples/snippets/usage_guide.py - :start-after: [START sink_delete] - :end-before: [END sink_delete] - :dedent: 4 - -Integration with Python logging module --------------------------------------- - -It's possible to tie the Python :mod:`logging` module directly into Google -Cloud Logging. There are different handler options to accomplish this. -To automatically pick the default for your current environment, use -:meth:`~google.cloud.logging.client.Client.get_default_handler`. - -.. literalinclude:: ../samples/snippets/usage_guide.py - :start-after: [START create_default_handler] - :end-before: [END create_default_handler] - :dedent: 4 - -It is also possible to attach the handler to the root Python logger, so that -for example a plain ``logging.warn`` call would be sent to Cloud Logging, -as well as any other loggers created. A helper method -:meth:`~google.cloud.logging.client.Client.setup_logging` is provided -to configure this automatically. - -.. literalinclude:: ../samples/snippets/usage_guide.py - :start-after: [START setup_logging] - :end-before: [END setup_logging] - :dedent: 4 - -.. note:: - - To reduce cost and quota usage, do not enable Cloud Logging - handlers while testing locally. - -You can also exclude certain loggers: - -.. literalinclude:: ../samples/snippets/usage_guide.py - :start-after: [START setup_logging_excludes] - :end-before: [END setup_logging_excludes] - :dedent: 4 - -Cloud Logging Handler -~~~~~~~~~~~~~~~~~~~~~ - -If you prefer not to use -:meth:`~google.cloud.logging.client.Client.get_default_handler`, you can -directly create a -:class:`~google.cloud.logging.handlers.handlers.CloudLoggingHandler` instance -which will write directly to the API. - -.. literalinclude:: ../samples/snippets/usage_guide.py - :start-after: [START create_cloud_handler] - :end-before: [END create_cloud_handler] - :dedent: 4 - -.. note:: - - This handler by default uses an asynchronous transport that sends log - entries on a background thread. However, the API call will still be made - in the same process. For other transport options, see the transports - section. - -All logs will go to a single custom log, which defaults to "python". The name -of the Python logger will be included in the structured log entry under the -"python_logger" field. You can change it by providing a name to the handler: - -.. literalinclude:: ../samples/snippets/usage_guide.py - :start-after: [START create_named_handler] - :end-before: [END create_named_handler] - :dedent: 4 - -Cloud Logging Handler transports -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - -The :class:`~google.cloud.logging.handlers.handlers.CloudLoggingHandler` -logging handler can use different transports. The default is -:class:`~google.cloud.logging.handlers.BackgroundThreadTransport`. - - 1. :class:`~google.cloud.logging.handlers.BackgroundThreadTransport` this is - the default. It writes entries on a background - :class:`python.threading.Thread`. - - 1. :class:`~google.cloud.logging.handlers.SyncTransport` this handler does a - direct API call on each logging statement to write the entry. - - -.. _Google Kubernetes Engine: https://cloud.google.com/kubernetes-engine - -fluentd logging handlers -~~~~~~~~~~~~~~~~~~~~~~~~ - -Besides :class:`~google.cloud.logging.handlers.handlers.CloudLoggingHandler`, -which writes directly to the API, two other handlers are provided. -:class:`~google.cloud.logging.handlers.app_engine.AppEngineHandler`, which is -recommended when running on the Google App Engine Flexible vanilla runtimes -(i.e. your app.yaml contains ``runtime: python``), and -:class:`~google.cloud.logging.handlers.container_engine.ContainerEngineHandler` -, which is recommended when running on `Google Kubernetes Engine`_ with the -Cloud Logging plugin enabled. - -:meth:`~google.cloud.logging.client.Client.get_default_handler` and -:meth:`~google.cloud.logging.client.Client.setup_logging` will attempt to use -the environment to automatically detect whether the code is running in -these platforms and use the appropriate handler. - -In both cases, the fluentd agent is configured to automatically parse log files -in an expected format and forward them to Cloud Logging. The handlers -provided help set the correct metadata such as log level so that logs can be -filtered accordingly. diff --git a/docs/v2.rst b/docs/v2.rst deleted file mode 100644 index 823097bd7..000000000 --- a/docs/v2.rst +++ /dev/null @@ -1,19 +0,0 @@ -v2 ----------------- -.. toctree:: - :maxdepth: 2 - - usage - client - logger - entries - metric - resource - sink - stdlib-usage - handlers - handlers-app-engine - handlers-container-engine - transports-sync - transports-thread - transports-base diff --git a/google/cloud/logging_v2/logger.py b/google/cloud/logging_v2/logger.py index 542e4d629..02ecb6905 100644 --- a/google/cloud/logging_v2/logger.py +++ b/google/cloud/logging_v2/logger.py @@ -158,7 +158,7 @@ def _do_log(self, client, _entry_class, payload=None, **kw): client.logging_api.write_entries([api_repr]) def log_empty(self, *, client=None, **kw): - """Log an empty message via a POST request + """Log an empty message See https://cloud.google.com/logging/docs/reference/v2/rest/v2/entries/write @@ -173,7 +173,7 @@ def log_empty(self, *, client=None, **kw): self._do_log(client, LogEntry, **kw) def log_text(self, text, *, client=None, **kw): - """Log a text message via a POST request + """Log a text message See https://cloud.google.com/logging/docs/reference/v2/rest/v2/entries/write @@ -189,7 +189,7 @@ def log_text(self, text, *, client=None, **kw): self._do_log(client, TextEntry, text, **kw) def log_struct(self, info, *, client=None, **kw): - """Log a structured message via a POST request + """Log a dictionary message See https://cloud.google.com/logging/docs/reference/v2/rest/v2/entries/write @@ -209,7 +209,7 @@ def log_struct(self, info, *, client=None, **kw): self._do_log(client, StructEntry, info, **kw) def log_proto(self, message, *, client=None, **kw): - """Log a protobuf message via a POST request + """Log a protobuf message See https://cloud.google.com/logging/docs/reference/v2/rest/v2/entries/list @@ -226,8 +226,7 @@ def log_proto(self, message, *, client=None, **kw): self._do_log(client, ProtobufEntry, message, **kw) def log(self, message=None, *, client=None, **kw): - """Log an arbitrary message via a POST request. - Type will be inferred based on the input message. + """Log an arbitrary message. Type will be inferred based on the input. See https://cloud.google.com/logging/docs/reference/v2/rest/v2/entries/list diff --git a/samples/snippets/usage_guide.py b/samples/snippets/usage_guide.py index e519c75c1..fdbbe1211 100644 --- a/samples/snippets/usage_guide.py +++ b/samples/snippets/usage_guide.py @@ -100,15 +100,65 @@ def client_list_entries(client, to_delete): # pylint: disable=unused-argument @snippet -def logger_usage(client, to_delete): +def client_setup(client2, to_delete): + """Client setup.""" + + # [START usage_client_setup] + import google.cloud.logging + + # if project not given, it will be inferred from the environment + client = google.cloud.logging.Client(project="my-project") + # [END usage_client_setup] + to_delete.append(client) + + # [START usage_http_client_setup] + http_client = google.cloud.logging.Client(_use_grpc=False) + # [END usage_http_client_setup] + to_delete.append(http_client) + + +@snippet +def logger_usage(client_true, to_delete): """Logger usage.""" - log_name = "logger_usage_%d" % (_millis()) + import google.cloud.logging # [START logger_create] - logger = client.logger(log_name) + client = google.cloud.logging.Client(project="my-project") + logger = client.logger(name="log_id") + # logger will bind to logName "projects/my_project/logs/log_id" # [END logger_create] + client = client_true + + log_id = "logger_usage_%d" % (_millis()) + # [START logger_custom_labels] + custom_labels = {"my-key": "my-value"} + label_logger = client.logger(log_id, labels=custom_labels) + # [END logger_custom_labels] + to_delete.append(label_logger) + # [START logger_custom_resource] + from google.cloud.logging_v2.resource import Resource + + resource = Resource(type="global", labels={}) + global_logger = client.logger(log_id, resource=resource) + # [END logger_custom_resource] + to_delete.append(global_logger) + + logger = client_true.logger(log_id) to_delete.append(logger) + # [START logger_log_basic] + logger.log("A simple entry") # API call + # [END logger_log_basic] + + # [START logger_log_fields] + logger.log( + "an entry with fields set", + severity="ERROR", + insert_id="0123", + labels={"my-label": "my-value"}, + ) # API call + # [END logger_log_fields] + # [START logger_log_text] logger.log_text("A simple entry") # API call # [END logger_log_text] @@ -135,6 +185,20 @@ def logger_usage(client, to_delete): ) # [END logger_log_resource_text] + # [START logger_log_batch] + batch = logger.batch() + batch.log("first log") + batch.log("second log") + batch.commit() + # [END logger_log_batch] + + # [START logger_log_batch_context] + with logger.batch() as batch: + batch.log("first log") + # do work + batch.log("last log") + # [END logger_log_batch_context] + # [START logger_list_entries] from google.cloud.logging import DESCENDING @@ -357,12 +421,10 @@ def logging_handler(client): # [START create_cloud_handler] from google.cloud.logging.handlers import CloudLoggingHandler + from google.cloud.logging_v2.handlers import setup_logging handler = CloudLoggingHandler(client) - cloud_logger = logging.getLogger("cloudLogger") - cloud_logger.setLevel(logging.INFO) - cloud_logger.addHandler(handler) - cloud_logger.error("bad news") + setup_logging(handler) # [END create_cloud_handler] # [START create_named_handler] @@ -370,6 +432,39 @@ def logging_handler(client): # [END create_named_handler] +@snippet +def logging_json(client): + # [START logging_json_dumps] + import logging + import json + + data_dict = {"hello": "world"} + logging.info(json.dumps(data_dict)) + # [END logging_json_dumps] + + # [START logging_extra_json_fields] + import logging + + data_dict = {"hello": "world"} + logging.info("message field", extra={"json_fields": data_dict}) + # [END logging_extra_json_fields] + + +@snippet +def using_extras(client): + import logging + + # [START logging_extras] + my_labels = {"foo": "bar"} + my_http = {"requestUrl": "localhost"} + my_trace = "01234" + + logging.info( + "hello", extra={"labels": my_labels, "http_request": my_http, "trace": my_trace} + ) + # [END logging_extras] + + @snippet def setup_logging(client): import logging From d86be6cf83c3f3b91c4fc0b2e0666b0ca1d7e248 Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Thu, 27 Jan 2022 19:58:13 +0100 Subject: [PATCH 33/34] chore(deps): update dependency google-cloud-storage to v2.1.0 (#469) --- samples/snippets/requirements.txt | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/samples/snippets/requirements.txt b/samples/snippets/requirements.txt index d75e274c2..0ab529f1c 100644 --- a/samples/snippets/requirements.txt +++ b/samples/snippets/requirements.txt @@ -1,4 +1,5 @@ google-cloud-logging==2.7.0 google-cloud-bigquery==2.32.0 -google-cloud-storage==2.0.0 +google-cloud-storage==2.0.0; python_version == '3.6' +google-cloud-storage==2.1.0; python_version >= '3.7' google-cloud-pubsub==2.9.0 From 10727ef3c8cca7e20484e58e6afdc79e81a4d4c9 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Tue, 1 Feb 2022 13:51:04 -0800 Subject: [PATCH 34/34] chore(main): release 3.0.0 (#473) --- CHANGELOG.md | 39 +++++++++++++++++++++++++++++++++++++++ setup.py | 2 +- 2 files changed, 40 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 068ad3df2..9bfce6bf1 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -4,6 +4,45 @@ [1]: https://pypi.org/project/google-cloud-logging/#history +## [3.0.0](https://github.com/googleapis/python-logging/compare/v2.7.0...v3.0.0) (2022-01-27) + + +### ⚠ BREAKING CHANGES + +* make logging API more friendly to use (#422) +* api consistency between HTTP and Gapic layers (#375) +* support string-encoded json (#339) +* Infer default resource in logger (#315) +* support json logs (#316) +* deprecate AppEngineHandler and ContainerEngineHandler (#310) + +### Features + +* add api key support ([#472](https://github.com/googleapis/python-logging/issues/472)) ([81ca8c6](https://github.com/googleapis/python-logging/commit/81ca8c616acb988be1fbecfc2a0b1a5b39280149)) +* add json_fields extras argument for adding to jsonPayload ([#447](https://github.com/googleapis/python-logging/issues/447)) ([a760e02](https://github.com/googleapis/python-logging/commit/a760e02371a55d6262e42de9e0222fffa2c7192b)) +* avoid importing grpc when explicitly disabled ([#416](https://github.com/googleapis/python-logging/issues/416)) ([818213e](https://github.com/googleapis/python-logging/commit/818213e143d6a1941211a48e0b23069a426ac300)) +* Infer default resource in logger ([#315](https://github.com/googleapis/python-logging/issues/315)) ([c632503](https://github.com/googleapis/python-logging/commit/c63250399fcd6e1317d341e98fab11095c443e5e)) +* make logging API more friendly to use ([#422](https://github.com/googleapis/python-logging/issues/422)) ([83d9ca8](https://github.com/googleapis/python-logging/commit/83d9ca8521fe7c470bb6755a48a97496515d7abc)) +* support json logs ([#316](https://github.com/googleapis/python-logging/issues/316)) ([5267152](https://github.com/googleapis/python-logging/commit/5267152574b2ee96eb6f5c536a762f58bd2f886e)) +* support string-encoded json ([#339](https://github.com/googleapis/python-logging/issues/339)) ([6fa1773](https://github.com/googleapis/python-logging/commit/6fa17735fe3edb45483ec5e3abd1f53c24ffa881)) +* trace improvements ([#450](https://github.com/googleapis/python-logging/issues/450)) ([e0c5fc0](https://github.com/googleapis/python-logging/commit/e0c5fc02160ae87faf4ba5c2b62be86de6b02cf3)) + + +### Bug Fixes + +* allow reading logs from non-project paths ([#444](https://github.com/googleapis/python-logging/issues/444)) ([97e32b6](https://github.com/googleapis/python-logging/commit/97e32b67603553fe350b6327455fc9f80b8aa6ce)) +* api consistency between HTTP and Gapic layers ([#375](https://github.com/googleapis/python-logging/issues/375)) ([e1506fa](https://github.com/googleapis/python-logging/commit/e1506fa9030776353878048ce562c53bf6ccf7bf)) + + +### Miscellaneous Chores + +* deprecate AppEngineHandler and ContainerEngineHandler ([#310](https://github.com/googleapis/python-logging/issues/310)) ([e3cac88](https://github.com/googleapis/python-logging/commit/e3cac888d40bf67af11e57b74615b0c3b8e8aa3e)) + + +### Documentation + +* update usage guide for v3.0.0 ([#456](https://github.com/googleapis/python-logging/issues/456)) ([8a67b73](https://github.com/googleapis/python-logging/commit/8a67b73cdfcb9da545671be6cf59c724360b1544)) + ## [2.7.0](https://www.github.com/googleapis/python-logging/compare/v2.6.0...v2.7.0) (2021-11-02) diff --git a/setup.py b/setup.py index b31ae8f78..2cf113e34 100644 --- a/setup.py +++ b/setup.py @@ -22,7 +22,7 @@ name = "google-cloud-logging" description = "Stackdriver Logging API client library" -version = "2.7.0" +version = "3.0.0" # Should be one of: # 'Development Status :: 3 - Alpha' # 'Development Status :: 4 - Beta'