diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS
new file mode 100644
index 0000000..30c3973
--- /dev/null
+++ b/.github/CODEOWNERS
@@ -0,0 +1,11 @@
+# Code owners file.
+# This file controls who is tagged for review for any given pull request.
+#
+# For syntax help see:
+# https://help.github.com/en/github/creating-cloning-and-archiving-repositories/about-code-owners#codeowners-syntax
+
+# The @googleapis/yoshi-python is the default owner for changes in this repo
+* @googleapis/yoshi-python
+
+# The python-samples-reviewers team is the default owner for samples changes
+/samples/ @googleapis/python-samples-owners
\ No newline at end of file
diff --git a/.github/snippet-bot.yml b/.github/snippet-bot.yml
new file mode 100644
index 0000000..e69de29
diff --git a/.kokoro/docs/common.cfg b/.kokoro/docs/common.cfg
index d68fb9e..cb42692 100644
--- a/.kokoro/docs/common.cfg
+++ b/.kokoro/docs/common.cfg
@@ -30,7 +30,7 @@ env_vars: {
env_vars: {
key: "V2_STAGING_BUCKET"
- value: "docs-staging-v2-staging"
+ value: "docs-staging-v2"
}
# It will upload the docker image after successful builds.
diff --git a/.kokoro/populate-secrets.sh b/.kokoro/populate-secrets.sh
new file mode 100755
index 0000000..f525142
--- /dev/null
+++ b/.kokoro/populate-secrets.sh
@@ -0,0 +1,43 @@
+#!/bin/bash
+# Copyright 2020 Google LLC.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+set -eo pipefail
+
+function now { date +"%Y-%m-%d %H:%M:%S" | tr -d '\n' ;}
+function msg { println "$*" >&2 ;}
+function println { printf '%s\n' "$(now) $*" ;}
+
+
+# Populates requested secrets set in SECRET_MANAGER_KEYS from service account:
+# kokoro-trampoline@cloud-devrel-kokoro-resources.iam.gserviceaccount.com
+SECRET_LOCATION="${KOKORO_GFILE_DIR}/secret_manager"
+msg "Creating folder on disk for secrets: ${SECRET_LOCATION}"
+mkdir -p ${SECRET_LOCATION}
+for key in $(echo ${SECRET_MANAGER_KEYS} | sed "s/,/ /g")
+do
+ msg "Retrieving secret ${key}"
+ docker run --entrypoint=gcloud \
+ --volume=${KOKORO_GFILE_DIR}:${KOKORO_GFILE_DIR} \
+ gcr.io/google.com/cloudsdktool/cloud-sdk \
+ secrets versions access latest \
+ --project cloud-devrel-kokoro-resources \
+ --secret ${key} > \
+ "${SECRET_LOCATION}/${key}"
+ if [[ $? == 0 ]]; then
+ msg "Secret written to ${SECRET_LOCATION}/${key}"
+ else
+ msg "Error retrieving secret ${key}"
+ fi
+done
diff --git a/.kokoro/release/common.cfg b/.kokoro/release/common.cfg
index 56fda51..1fecf62 100644
--- a/.kokoro/release/common.cfg
+++ b/.kokoro/release/common.cfg
@@ -23,42 +23,18 @@ env_vars: {
value: "github/python-containeranalysis/.kokoro/release.sh"
}
-# Fetch the token needed for reporting release status to GitHub
-before_action {
- fetch_keystore {
- keystore_resource {
- keystore_config_id: 73713
- keyname: "yoshi-automation-github-key"
- }
- }
-}
-
-# Fetch PyPI password
-before_action {
- fetch_keystore {
- keystore_resource {
- keystore_config_id: 73713
- keyname: "google_cloud_pypi_password"
- }
- }
-}
-
-# Fetch magictoken to use with Magic Github Proxy
-before_action {
- fetch_keystore {
- keystore_resource {
- keystore_config_id: 73713
- keyname: "releasetool-magictoken"
- }
- }
+# Fetch PyPI password
+before_action {
+ fetch_keystore {
+ keystore_resource {
+ keystore_config_id: 73713
+ keyname: "google_cloud_pypi_password"
+ }
+ }
}
-# Fetch api key to use with Magic Github Proxy
-before_action {
- fetch_keystore {
- keystore_resource {
- keystore_config_id: 73713
- keyname: "magic-github-proxy-api-key"
- }
- }
-}
+# Tokens needed to report release status back to GitHub
+env_vars: {
+ key: "SECRET_MANAGER_KEYS"
+ value: "releasetool-publish-reporter-app,releasetool-publish-reporter-googleapis-installation,releasetool-publish-reporter-pem"
+}
\ No newline at end of file
diff --git a/.kokoro/samples/python3.6/common.cfg b/.kokoro/samples/python3.6/common.cfg
index fc51ea6..4bb8955 100644
--- a/.kokoro/samples/python3.6/common.cfg
+++ b/.kokoro/samples/python3.6/common.cfg
@@ -13,6 +13,12 @@ env_vars: {
value: "py-3.6"
}
+# Declare build specific Cloud project.
+env_vars: {
+ key: "BUILD_SPECIFIC_GCLOUD_PROJECT"
+ value: "python-docs-samples-tests-py36"
+}
+
env_vars: {
key: "TRAMPOLINE_BUILD_FILE"
value: "github/python-containeranalysis/.kokoro/test-samples.sh"
diff --git a/.kokoro/samples/python3.7/common.cfg b/.kokoro/samples/python3.7/common.cfg
index 5b9ee9a..b91e7fb 100644
--- a/.kokoro/samples/python3.7/common.cfg
+++ b/.kokoro/samples/python3.7/common.cfg
@@ -13,6 +13,12 @@ env_vars: {
value: "py-3.7"
}
+# Declare build specific Cloud project.
+env_vars: {
+ key: "BUILD_SPECIFIC_GCLOUD_PROJECT"
+ value: "python-docs-samples-tests-py37"
+}
+
env_vars: {
key: "TRAMPOLINE_BUILD_FILE"
value: "github/python-containeranalysis/.kokoro/test-samples.sh"
diff --git a/.kokoro/samples/python3.8/common.cfg b/.kokoro/samples/python3.8/common.cfg
index 1026b49..28f9a95 100644
--- a/.kokoro/samples/python3.8/common.cfg
+++ b/.kokoro/samples/python3.8/common.cfg
@@ -13,6 +13,12 @@ env_vars: {
value: "py-3.8"
}
+# Declare build specific Cloud project.
+env_vars: {
+ key: "BUILD_SPECIFIC_GCLOUD_PROJECT"
+ value: "python-docs-samples-tests-py38"
+}
+
env_vars: {
key: "TRAMPOLINE_BUILD_FILE"
value: "github/python-containeranalysis/.kokoro/test-samples.sh"
diff --git a/.kokoro/test-samples.sh b/.kokoro/test-samples.sh
index a31dcf5..8c20b95 100755
--- a/.kokoro/test-samples.sh
+++ b/.kokoro/test-samples.sh
@@ -28,6 +28,12 @@ if [[ $KOKORO_BUILD_ARTIFACTS_SUBDIR = *"periodic"* ]]; then
git checkout $LATEST_RELEASE
fi
+# Exit early if samples directory doesn't exist
+if [ ! -d "./samples" ]; then
+ echo "No tests run. `./samples` not found"
+ exit 0
+fi
+
# Disable buffering, so that the logs stream through.
export PYTHONUNBUFFERED=1
@@ -101,4 +107,4 @@ cd "$ROOT"
# Workaround for Kokoro permissions issue: delete secrets
rm testing/{test-env.sh,client-secrets.json,service-account.json}
-exit "$RTN"
\ No newline at end of file
+exit "$RTN"
diff --git a/.kokoro/trampoline.sh b/.kokoro/trampoline.sh
index e8c4251..f39236e 100755
--- a/.kokoro/trampoline.sh
+++ b/.kokoro/trampoline.sh
@@ -15,9 +15,14 @@
set -eo pipefail
-python3 "${KOKORO_GFILE_DIR}/trampoline_v1.py" || ret_code=$?
+# Always run the cleanup script, regardless of the success of bouncing into
+# the container.
+function cleanup() {
+ chmod +x ${KOKORO_GFILE_DIR}/trampoline_cleanup.sh
+ ${KOKORO_GFILE_DIR}/trampoline_cleanup.sh
+ echo "cleanup";
+}
+trap cleanup EXIT
-chmod +x ${KOKORO_GFILE_DIR}/trampoline_cleanup.sh
-${KOKORO_GFILE_DIR}/trampoline_cleanup.sh || true
-
-exit ${ret_code}
+$(dirname $0)/populate-secrets.sh # Secret Manager secrets.
+python3 "${KOKORO_GFILE_DIR}/trampoline_v1.py"
\ No newline at end of file
diff --git a/CHANGELOG.md b/CHANGELOG.md
index 6e96bd9..6ede2b6 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -1,5 +1,12 @@
# Changelog
+## [2.1.0](https://www.github.com/googleapis/python-containeranalysis/compare/v2.0.0...v2.1.0) (2020-11-18)
+
+
+### Features
+
+* add GetVulnerabilityOccurrencesSummary ([#42](https://www.github.com/googleapis/python-containeranalysis/issues/42)) ([7f3e8b3](https://www.github.com/googleapis/python-containeranalysis/commit/7f3e8b3357bdce56aa1cf362b60f02717365c421))
+
## [2.0.0](https://www.github.com/googleapis/python-containeranalysis/compare/v1.0.3...v2.0.0) (2020-08-12)
diff --git a/CODE_OF_CONDUCT.md b/CODE_OF_CONDUCT.md
index b3d1f60..039f436 100644
--- a/CODE_OF_CONDUCT.md
+++ b/CODE_OF_CONDUCT.md
@@ -1,44 +1,95 @@
-# Contributor Code of Conduct
+# Code of Conduct
-As contributors and maintainers of this project,
-and in the interest of fostering an open and welcoming community,
-we pledge to respect all people who contribute through reporting issues,
-posting feature requests, updating documentation,
-submitting pull requests or patches, and other activities.
+## Our Pledge
-We are committed to making participation in this project
-a harassment-free experience for everyone,
-regardless of level of experience, gender, gender identity and expression,
-sexual orientation, disability, personal appearance,
-body size, race, ethnicity, age, religion, or nationality.
+In the interest of fostering an open and welcoming environment, we as
+contributors and maintainers pledge to making participation in our project and
+our community a harassment-free experience for everyone, regardless of age, body
+size, disability, ethnicity, gender identity and expression, level of
+experience, education, socio-economic status, nationality, personal appearance,
+race, religion, or sexual identity and orientation.
+
+## Our Standards
+
+Examples of behavior that contributes to creating a positive environment
+include:
+
+* Using welcoming and inclusive language
+* Being respectful of differing viewpoints and experiences
+* Gracefully accepting constructive criticism
+* Focusing on what is best for the community
+* Showing empathy towards other community members
Examples of unacceptable behavior by participants include:
-* The use of sexualized language or imagery
-* Personal attacks
-* Trolling or insulting/derogatory comments
-* Public or private harassment
-* Publishing other's private information,
-such as physical or electronic
-addresses, without explicit permission
-* Other unethical or unprofessional conduct.
+* The use of sexualized language or imagery and unwelcome sexual attention or
+ advances
+* Trolling, insulting/derogatory comments, and personal or political attacks
+* Public or private harassment
+* Publishing others' private information, such as a physical or electronic
+ address, without explicit permission
+* Other conduct which could reasonably be considered inappropriate in a
+ professional setting
+
+## Our Responsibilities
+
+Project maintainers are responsible for clarifying the standards of acceptable
+behavior and are expected to take appropriate and fair corrective action in
+response to any instances of unacceptable behavior.
Project maintainers have the right and responsibility to remove, edit, or reject
-comments, commits, code, wiki edits, issues, and other contributions
-that are not aligned to this Code of Conduct.
-By adopting this Code of Conduct,
-project maintainers commit themselves to fairly and consistently
-applying these principles to every aspect of managing this project.
-Project maintainers who do not follow or enforce the Code of Conduct
-may be permanently removed from the project team.
-
-This code of conduct applies both within project spaces and in public spaces
-when an individual is representing the project or its community.
-
-Instances of abusive, harassing, or otherwise unacceptable behavior
-may be reported by opening an issue
-or contacting one or more of the project maintainers.
-
-This Code of Conduct is adapted from the [Contributor Covenant](http://contributor-covenant.org), version 1.2.0,
-available at [http://contributor-covenant.org/version/1/2/0/](http://contributor-covenant.org/version/1/2/0/)
+comments, commits, code, wiki edits, issues, and other contributions that are
+not aligned to this Code of Conduct, or to ban temporarily or permanently any
+contributor for other behaviors that they deem inappropriate, threatening,
+offensive, or harmful.
+
+## Scope
+
+This Code of Conduct applies both within project spaces and in public spaces
+when an individual is representing the project or its community. Examples of
+representing a project or community include using an official project e-mail
+address, posting via an official social media account, or acting as an appointed
+representative at an online or offline event. Representation of a project may be
+further defined and clarified by project maintainers.
+
+This Code of Conduct also applies outside the project spaces when the Project
+Steward has a reasonable belief that an individual's behavior may have a
+negative impact on the project or its community.
+
+## Conflict Resolution
+
+We do not believe that all conflict is bad; healthy debate and disagreement
+often yield positive results. However, it is never okay to be disrespectful or
+to engage in behavior that violates the project’s code of conduct.
+
+If you see someone violating the code of conduct, you are encouraged to address
+the behavior directly with those involved. Many issues can be resolved quickly
+and easily, and this gives people more control over the outcome of their
+dispute. If you are unable to resolve the matter for any reason, or if the
+behavior is threatening or harassing, report it. We are dedicated to providing
+an environment where participants feel welcome and safe.
+
+
+Reports should be directed to *googleapis-stewards@google.com*, the
+Project Steward(s) for *Google Cloud Client Libraries*. It is the Project Steward’s duty to
+receive and address reported violations of the code of conduct. They will then
+work with a committee consisting of representatives from the Open Source
+Programs Office and the Google Open Source Strategy team. If for any reason you
+are uncomfortable reaching out to the Project Steward, please email
+opensource@google.com.
+
+We will investigate every complaint, but you may not receive a direct response.
+We will use our discretion in determining when and how to follow up on reported
+incidents, which may range from not taking action to permanent expulsion from
+the project and project-sponsored spaces. We will notify the accused of the
+report and provide them an opportunity to discuss it before any action is taken.
+The identity of the reporter will be omitted from the details of the report
+supplied to the accused. In potentially harmful situations, such as ongoing
+harassment or threats to anyone's safety, we may take action without notice.
+
+## Attribution
+
+This Code of Conduct is adapted from the Contributor Covenant, version 1.4,
+available at
+https://www.contributor-covenant.org/version/1/4/code-of-conduct.html
\ No newline at end of file
diff --git a/CONTRIBUTING.rst b/CONTRIBUTING.rst
index 952df1f..37164c5 100644
--- a/CONTRIBUTING.rst
+++ b/CONTRIBUTING.rst
@@ -80,25 +80,6 @@ We use `nox `__ to instrument our tests.
.. nox: https://pypi.org/project/nox/
-Note on Editable Installs / Develop Mode
-========================================
-
-- As mentioned previously, using ``setuptools`` in `develop mode`_
- or a ``pip`` `editable install`_ is not possible with this
- library. This is because this library uses `namespace packages`_.
- For context see `Issue #2316`_ and the relevant `PyPA issue`_.
-
- Since ``editable`` / ``develop`` mode can't be used, packages
- need to be installed directly. Hence your changes to the source
- tree don't get incorporated into the **already installed**
- package.
-
-.. _namespace packages: https://www.python.org/dev/peps/pep-0420/
-.. _Issue #2316: https://github.com/GoogleCloudPlatform/google-cloud-python/issues/2316
-.. _PyPA issue: https://github.com/pypa/packaging-problems/issues/12
-.. _develop mode: https://setuptools.readthedocs.io/en/latest/setuptools.html#development-mode
-.. _editable install: https://pip.pypa.io/en/stable/reference/pip_install/#editable-installs
-
*****************************************
I'm getting weird errors... Can you help?
*****************************************
diff --git a/devtools-containeranalysis-v1-py.tar.gz b/devtools-containeranalysis-v1-py.tar.gz
new file mode 100644
index 0000000..e69de29
diff --git a/docs/conf.py b/docs/conf.py
index 9fb6f92..ed14131 100644
--- a/docs/conf.py
+++ b/docs/conf.py
@@ -29,7 +29,7 @@
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
-needs_sphinx = "1.6.3"
+needs_sphinx = "1.5.5"
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
@@ -39,6 +39,7 @@
"sphinx.ext.autosummary",
"sphinx.ext.intersphinx",
"sphinx.ext.coverage",
+ "sphinx.ext.doctest",
"sphinx.ext.napoleon",
"sphinx.ext.todo",
"sphinx.ext.viewcode",
@@ -348,6 +349,7 @@
"google-auth": ("https://google-auth.readthedocs.io/en/stable", None),
"google.api_core": ("https://googleapis.dev/python/google-api-core/latest/", None,),
"grpc": ("https://grpc.io/grpc/python/", None),
+ "proto-plus": ("https://proto-plus-python.readthedocs.io/en/latest/", None),
}
diff --git a/docs/containeranalysis_v1/types.rst b/docs/containeranalysis_v1/types.rst
index b779ea6..c0d2aa5 100644
--- a/docs/containeranalysis_v1/types.rst
+++ b/docs/containeranalysis_v1/types.rst
@@ -3,3 +3,4 @@ Types for Google Cloud Devtools Containeranalysis v1 API
.. automodule:: google.cloud.devtools.containeranalysis_v1.types
:members:
+ :show-inheritance:
diff --git a/google/cloud/devtools/containeranalysis/__init__.py b/google/cloud/devtools/containeranalysis/__init__.py
index fb5a9de..b86315f 100644
--- a/google/cloud/devtools/containeranalysis/__init__.py
+++ b/google/cloud/devtools/containeranalysis/__init__.py
@@ -21,8 +21,16 @@
from google.cloud.devtools.containeranalysis_v1.services.container_analysis.client import (
ContainerAnalysisClient,
)
+from google.cloud.devtools.containeranalysis_v1.types.containeranalysis import (
+ GetVulnerabilityOccurrencesSummaryRequest,
+)
+from google.cloud.devtools.containeranalysis_v1.types.containeranalysis import (
+ VulnerabilityOccurrencesSummary,
+)
__all__ = (
"ContainerAnalysisAsyncClient",
"ContainerAnalysisClient",
+ "GetVulnerabilityOccurrencesSummaryRequest",
+ "VulnerabilityOccurrencesSummary",
)
diff --git a/google/cloud/devtools/containeranalysis_v1/__init__.py b/google/cloud/devtools/containeranalysis_v1/__init__.py
index 66e394e..c5ddb67 100644
--- a/google/cloud/devtools/containeranalysis_v1/__init__.py
+++ b/google/cloud/devtools/containeranalysis_v1/__init__.py
@@ -16,6 +16,12 @@
#
from .services.container_analysis import ContainerAnalysisClient
+from .types.containeranalysis import GetVulnerabilityOccurrencesSummaryRequest
+from .types.containeranalysis import VulnerabilityOccurrencesSummary
-__all__ = ("ContainerAnalysisClient",)
+__all__ = (
+ "GetVulnerabilityOccurrencesSummaryRequest",
+ "VulnerabilityOccurrencesSummary",
+ "ContainerAnalysisClient",
+)
diff --git a/google/cloud/devtools/containeranalysis_v1/proto/containeranalysis.proto b/google/cloud/devtools/containeranalysis_v1/proto/containeranalysis.proto
index 55b842a..f1f1e27 100644
--- a/google/cloud/devtools/containeranalysis_v1/proto/containeranalysis.proto
+++ b/google/cloud/devtools/containeranalysis_v1/proto/containeranalysis.proto
@@ -1,4 +1,4 @@
-// Copyright 2019 Google LLC.
+// Copyright 2019 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
@@ -11,7 +11,6 @@
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
-//
syntax = "proto3";
@@ -19,9 +18,12 @@ package google.devtools.containeranalysis.v1;
import "google/api/annotations.proto";
import "google/api/client.proto";
+import "google/api/field_behavior.proto";
+import "google/api/resource.proto";
import "google/iam/v1/iam_policy.proto";
import "google/iam/v1/policy.proto";
import "google/protobuf/timestamp.proto";
+import "grafeas/v1/vulnerability.proto";
option csharp_namespace = "Google.Cloud.DevTools.ContainerAnalysis.V1";
option go_package = "google.golang.org/genproto/googleapis/devtools/containeranalysis/v1;containeranalysis";
@@ -105,4 +107,48 @@ service ContainerAnalysis {
};
option (google.api.method_signature) = "resource,permissions";
}
+
+ // Gets a summary of the number and severity of occurrences.
+ rpc GetVulnerabilityOccurrencesSummary(GetVulnerabilityOccurrencesSummaryRequest) returns (VulnerabilityOccurrencesSummary) {
+ option (google.api.http) = {
+ get: "/v1/{parent=projects/*}/occurrences:vulnerabilitySummary"
+ };
+ option (google.api.method_signature) = "parent,filter";
+ }
+}
+
+// Request to get a vulnerability summary for some set of occurrences.
+message GetVulnerabilityOccurrencesSummaryRequest {
+ // The name of the project to get a vulnerability summary for in the form of
+ // `projects/[PROJECT_ID]`.
+ string parent = 1 [
+ (google.api.resource_reference).type = "cloudresourcemanager.googleapis.com/Project",
+ (google.api.field_behavior) = REQUIRED
+ ];
+
+ // The filter expression.
+ string filter = 2;
+}
+
+// A summary of how many vulnerability occurrences there are per resource and
+// severity type.
+message VulnerabilityOccurrencesSummary {
+ // Per resource and severity counts of fixable and total vulnerabilities.
+ message FixableTotalByDigest {
+ // The affected resource.
+ string resource_uri = 1;
+
+ // The severity for this count. SEVERITY_UNSPECIFIED indicates total across
+ // all severities.
+ grafeas.v1.Severity severity = 2;
+
+ // The number of fixable vulnerabilities associated with this resource.
+ int64 fixable_count = 3;
+
+ // The total number of vulnerabilities associated with this resource.
+ int64 total_count = 4;
+ }
+
+ // A listing by resource of the number of fixable and total vulnerabilities.
+ repeated FixableTotalByDigest counts = 1;
}
diff --git a/google/cloud/devtools/containeranalysis_v1/services/container_analysis/async_client.py b/google/cloud/devtools/containeranalysis_v1/services/container_analysis/async_client.py
index 8365efe..65ed9b8 100644
--- a/google/cloud/devtools/containeranalysis_v1/services/container_analysis/async_client.py
+++ b/google/cloud/devtools/containeranalysis_v1/services/container_analysis/async_client.py
@@ -28,10 +28,11 @@
from google.auth import credentials # type: ignore
from google.oauth2 import service_account # type: ignore
+from google.cloud.devtools.containeranalysis_v1.types import containeranalysis
from google.iam.v1 import iam_policy_pb2 as iam_policy # type: ignore
from google.iam.v1 import policy_pb2 as policy # type: ignore
-from .transports.base import ContainerAnalysisTransport
+from .transports.base import ContainerAnalysisTransport, DEFAULT_CLIENT_INFO
from .transports.grpc_asyncio import ContainerAnalysisGrpcAsyncIOTransport
from .client import ContainerAnalysisClient
@@ -63,9 +64,47 @@ class ContainerAnalysisAsyncClient:
DEFAULT_ENDPOINT = ContainerAnalysisClient.DEFAULT_ENDPOINT
DEFAULT_MTLS_ENDPOINT = ContainerAnalysisClient.DEFAULT_MTLS_ENDPOINT
+ common_billing_account_path = staticmethod(
+ ContainerAnalysisClient.common_billing_account_path
+ )
+ parse_common_billing_account_path = staticmethod(
+ ContainerAnalysisClient.parse_common_billing_account_path
+ )
+
+ common_folder_path = staticmethod(ContainerAnalysisClient.common_folder_path)
+ parse_common_folder_path = staticmethod(
+ ContainerAnalysisClient.parse_common_folder_path
+ )
+
+ common_organization_path = staticmethod(
+ ContainerAnalysisClient.common_organization_path
+ )
+ parse_common_organization_path = staticmethod(
+ ContainerAnalysisClient.parse_common_organization_path
+ )
+
+ common_project_path = staticmethod(ContainerAnalysisClient.common_project_path)
+ parse_common_project_path = staticmethod(
+ ContainerAnalysisClient.parse_common_project_path
+ )
+
+ common_location_path = staticmethod(ContainerAnalysisClient.common_location_path)
+ parse_common_location_path = staticmethod(
+ ContainerAnalysisClient.parse_common_location_path
+ )
+
from_service_account_file = ContainerAnalysisClient.from_service_account_file
from_service_account_json = from_service_account_file
+ @property
+ def transport(self) -> ContainerAnalysisTransport:
+ """Return the transport used by the client instance.
+
+ Returns:
+ ContainerAnalysisTransport: The transport used by the client instance.
+ """
+ return self._client.transport
+
get_transport_class = functools.partial(
type(ContainerAnalysisClient).get_transport_class, type(ContainerAnalysisClient)
)
@@ -76,6 +115,7 @@ def __init__(
credentials: credentials.Credentials = None,
transport: Union[str, ContainerAnalysisTransport] = "grpc_asyncio",
client_options: ClientOptions = None,
+ client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
) -> None:
"""Instantiate the container analysis client.
@@ -91,16 +131,19 @@ def __init__(
client_options (ClientOptions): Custom options for the client. It
won't take effect if a ``transport`` instance is provided.
(1) The ``api_endpoint`` property can be used to override the
- default endpoint provided by the client. GOOGLE_API_USE_MTLS
+ default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT
environment variable can also be used to override the endpoint:
"always" (always use the default mTLS endpoint), "never" (always
- use the default regular endpoint, this is the default value for
- the environment variable) and "auto" (auto switch to the default
- mTLS endpoint if client SSL credentials is present). However,
- the ``api_endpoint`` property takes precedence if provided.
- (2) The ``client_cert_source`` property is used to provide client
- SSL credentials for mutual TLS transport. If not provided, the
- default SSL credentials will be used if present.
+ use the default regular endpoint) and "auto" (auto switch to the
+ default mTLS endpoint if client certificate is present, this is
+ the default value). However, the ``api_endpoint`` property takes
+ precedence if provided.
+ (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable
+ is "true", then the ``client_cert_source`` property can be used
+ to provide client certificate for mutual TLS transport. If
+ not provided, the default SSL client certificate will be used if
+ present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not
+ set, no client certificate will be used.
Raises:
google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport
@@ -108,7 +151,10 @@ def __init__(
"""
self._client = ContainerAnalysisClient(
- credentials=credentials, transport=transport, client_options=client_options,
+ credentials=credentials,
+ transport=transport,
+ client_options=client_options,
+ client_info=client_info,
)
def get_grafeas_client(self) -> grafeas_v1.GrafeasClient:
@@ -230,7 +276,8 @@ async def set_iam_policy(
# Create or coerce a protobuf request object.
# Sanity check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
- if request is not None and any([resource]):
+ has_flattened_params = any([resource])
+ if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
@@ -242,20 +289,14 @@ async def set_iam_policy(
request = iam_policy.SetIamPolicyRequest(**request)
elif not request:
- request = iam_policy.SetIamPolicyRequest()
-
- # If we have keyword arguments corresponding to fields on the
- # request, apply these.
-
- if resource is not None:
- request.resource = resource
+ request = iam_policy.SetIamPolicyRequest(resource=resource,)
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = gapic_v1.method_async.wrap_method(
self._client._transport.set_iam_policy,
default_timeout=30.0,
- client_info=_client_info,
+ client_info=DEFAULT_CLIENT_INFO,
)
# Certain fields should be provided within the metadata header;
@@ -380,7 +421,8 @@ async def get_iam_policy(
# Create or coerce a protobuf request object.
# Sanity check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
- if request is not None and any([resource]):
+ has_flattened_params = any([resource])
+ if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
@@ -392,20 +434,14 @@ async def get_iam_policy(
request = iam_policy.GetIamPolicyRequest(**request)
elif not request:
- request = iam_policy.GetIamPolicyRequest()
-
- # If we have keyword arguments corresponding to fields on the
- # request, apply these.
-
- if resource is not None:
- request.resource = resource
+ request = iam_policy.GetIamPolicyRequest(resource=resource,)
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = gapic_v1.method_async.wrap_method(
self._client._transport.get_iam_policy,
default_timeout=30.0,
- client_info=_client_info,
+ client_info=DEFAULT_CLIENT_INFO,
)
# Certain fields should be provided within the metadata header;
@@ -473,7 +509,8 @@ async def test_iam_permissions(
# Create or coerce a protobuf request object.
# Sanity check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
- if request is not None and any([resource, permissions]):
+ has_flattened_params = any([resource, permissions])
+ if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
@@ -485,29 +522,104 @@ async def test_iam_permissions(
request = iam_policy.TestIamPermissionsRequest(**request)
elif not request:
- request = iam_policy.TestIamPermissionsRequest()
+ request = iam_policy.TestIamPermissionsRequest(
+ resource=resource, permissions=permissions,
+ )
+
+ # Wrap the RPC method; this adds retry and timeout information,
+ # and friendly error handling.
+ rpc = gapic_v1.method_async.wrap_method(
+ self._client._transport.test_iam_permissions,
+ default_timeout=30.0,
+ client_info=DEFAULT_CLIENT_INFO,
+ )
+
+ # Certain fields should be provided within the metadata header;
+ # add these here.
+ metadata = tuple(metadata) + (
+ gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)),
+ )
+
+ # Send the request.
+ response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,)
+
+ # Done; return the response.
+ return response
+
+ async def get_vulnerability_occurrences_summary(
+ self,
+ request: containeranalysis.GetVulnerabilityOccurrencesSummaryRequest = None,
+ *,
+ parent: str = None,
+ filter: str = None,
+ retry: retries.Retry = gapic_v1.method.DEFAULT,
+ timeout: float = None,
+ metadata: Sequence[Tuple[str, str]] = (),
+ ) -> containeranalysis.VulnerabilityOccurrencesSummary:
+ r"""Gets a summary of the number and severity of
+ occurrences.
+
+ Args:
+ request (:class:`~.containeranalysis.GetVulnerabilityOccurrencesSummaryRequest`):
+ The request object. Request to get a vulnerability
+ summary for some set of occurrences.
+ parent (:class:`str`):
+ The name of the project to get a vulnerability summary
+ for in the form of ``projects/[PROJECT_ID]``.
+ This corresponds to the ``parent`` field
+ on the ``request`` instance; if ``request`` is provided, this
+ should not be set.
+ filter (:class:`str`):
+ The filter expression.
+ This corresponds to the ``filter`` field
+ on the ``request`` instance; if ``request`` is provided, this
+ should not be set.
+
+ retry (google.api_core.retry.Retry): Designation of what errors, if any,
+ should be retried.
+ timeout (float): The timeout for this request.
+ metadata (Sequence[Tuple[str, str]]): Strings which should be
+ sent along with the request as metadata.
+
+ Returns:
+ ~.containeranalysis.VulnerabilityOccurrencesSummary:
+ A summary of how many vulnerability
+ occurrences there are per resource and
+ severity type.
+
+ """
+ # Create or coerce a protobuf request object.
+ # Sanity check: If we got a request object, we should *not* have
+ # gotten any keyword arguments that map to the request.
+ has_flattened_params = any([parent, filter])
+ if request is not None and has_flattened_params:
+ raise ValueError(
+ "If the `request` argument is set, then none of "
+ "the individual field arguments should be set."
+ )
+
+ request = containeranalysis.GetVulnerabilityOccurrencesSummaryRequest(request)
# If we have keyword arguments corresponding to fields on the
# request, apply these.
- if resource is not None:
- request.resource = resource
-
- if permissions:
- request.permissions.extend(permissions)
+ if parent is not None:
+ request.parent = parent
+ if filter is not None:
+ request.filter = filter
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = gapic_v1.method_async.wrap_method(
- self._client._transport.test_iam_permissions,
- default_timeout=30.0,
- client_info=_client_info,
+ self._client._transport.get_vulnerability_occurrences_summary,
+ default_timeout=None,
+ client_info=DEFAULT_CLIENT_INFO,
)
# Certain fields should be provided within the metadata header;
# add these here.
metadata = tuple(metadata) + (
- gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)),
+ gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)),
)
# Send the request.
@@ -518,13 +630,13 @@ async def test_iam_permissions(
try:
- _client_info = gapic_v1.client_info.ClientInfo(
+ DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(
gapic_version=pkg_resources.get_distribution(
"google-cloud-containeranalysis",
).version,
)
except pkg_resources.DistributionNotFound:
- _client_info = gapic_v1.client_info.ClientInfo()
+ DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo()
__all__ = ("ContainerAnalysisAsyncClient",)
diff --git a/google/cloud/devtools/containeranalysis_v1/services/container_analysis/client.py b/google/cloud/devtools/containeranalysis_v1/services/container_analysis/client.py
index 67bc8ad..c38a489 100644
--- a/google/cloud/devtools/containeranalysis_v1/services/container_analysis/client.py
+++ b/google/cloud/devtools/containeranalysis_v1/services/container_analysis/client.py
@@ -16,24 +16,27 @@
#
from collections import OrderedDict
+from distutils import util
import os
import re
-from typing import Callable, Dict, Sequence, Tuple, Type, Union
+from typing import Callable, Dict, Optional, Sequence, Tuple, Type, Union
import pkg_resources
-import google.api_core.client_options as ClientOptions # type: ignore
+from google.api_core import client_options as client_options_lib # type: ignore
from google.api_core import exceptions # type: ignore
from google.api_core import gapic_v1 # type: ignore
from google.api_core import retry as retries # type: ignore
from google.auth import credentials # type: ignore
from google.auth.transport import mtls # type: ignore
+from google.auth.transport.grpc import SslCredentials # type: ignore
from google.auth.exceptions import MutualTLSChannelError # type: ignore
from google.oauth2 import service_account # type: ignore
+from google.cloud.devtools.containeranalysis_v1.types import containeranalysis
from google.iam.v1 import iam_policy_pb2 as iam_policy # type: ignore
from google.iam.v1 import policy_pb2 as policy # type: ignore
-from .transports.base import ContainerAnalysisTransport
+from .transports.base import ContainerAnalysisTransport, DEFAULT_CLIENT_INFO
from .transports.grpc import ContainerAnalysisGrpcTransport
from .transports.grpc_asyncio import ContainerAnalysisGrpcAsyncIOTransport
@@ -149,12 +152,81 @@ def from_service_account_file(cls, filename: str, *args, **kwargs):
from_service_account_json = from_service_account_file
+ @property
+ def transport(self) -> ContainerAnalysisTransport:
+ """Return the transport used by the client instance.
+
+ Returns:
+ ContainerAnalysisTransport: The transport used by the client instance.
+ """
+ return self._transport
+
+ @staticmethod
+ def common_billing_account_path(billing_account: str,) -> str:
+ """Return a fully-qualified billing_account string."""
+ return "billingAccounts/{billing_account}".format(
+ billing_account=billing_account,
+ )
+
+ @staticmethod
+ def parse_common_billing_account_path(path: str) -> Dict[str, str]:
+ """Parse a billing_account path into its component segments."""
+ m = re.match(r"^billingAccounts/(?P.+?)$", path)
+ return m.groupdict() if m else {}
+
+ @staticmethod
+ def common_folder_path(folder: str,) -> str:
+ """Return a fully-qualified folder string."""
+ return "folders/{folder}".format(folder=folder,)
+
+ @staticmethod
+ def parse_common_folder_path(path: str) -> Dict[str, str]:
+ """Parse a folder path into its component segments."""
+ m = re.match(r"^folders/(?P.+?)$", path)
+ return m.groupdict() if m else {}
+
+ @staticmethod
+ def common_organization_path(organization: str,) -> str:
+ """Return a fully-qualified organization string."""
+ return "organizations/{organization}".format(organization=organization,)
+
+ @staticmethod
+ def parse_common_organization_path(path: str) -> Dict[str, str]:
+ """Parse a organization path into its component segments."""
+ m = re.match(r"^organizations/(?P.+?)$", path)
+ return m.groupdict() if m else {}
+
+ @staticmethod
+ def common_project_path(project: str,) -> str:
+ """Return a fully-qualified project string."""
+ return "projects/{project}".format(project=project,)
+
+ @staticmethod
+ def parse_common_project_path(path: str) -> Dict[str, str]:
+ """Parse a project path into its component segments."""
+ m = re.match(r"^projects/(?P.+?)$", path)
+ return m.groupdict() if m else {}
+
+ @staticmethod
+ def common_location_path(project: str, location: str,) -> str:
+ """Return a fully-qualified location string."""
+ return "projects/{project}/locations/{location}".format(
+ project=project, location=location,
+ )
+
+ @staticmethod
+ def parse_common_location_path(path: str) -> Dict[str, str]:
+ """Parse a location path into its component segments."""
+ m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path)
+ return m.groupdict() if m else {}
+
def __init__(
self,
*,
- credentials: credentials.Credentials = None,
- transport: Union[str, ContainerAnalysisTransport] = None,
- client_options: ClientOptions = None,
+ credentials: Optional[credentials.Credentials] = None,
+ transport: Union[str, ContainerAnalysisTransport, None] = None,
+ client_options: Optional[client_options_lib.ClientOptions] = None,
+ client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
) -> None:
"""Instantiate the container analysis client.
@@ -167,48 +239,74 @@ def __init__(
transport (Union[str, ~.ContainerAnalysisTransport]): The
transport to use. If set to None, a transport is chosen
automatically.
- client_options (ClientOptions): Custom options for the client. It
- won't take effect if a ``transport`` instance is provided.
+ client_options (client_options_lib.ClientOptions): Custom options for the
+ client. It won't take effect if a ``transport`` instance is provided.
(1) The ``api_endpoint`` property can be used to override the
- default endpoint provided by the client. GOOGLE_API_USE_MTLS
+ default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT
environment variable can also be used to override the endpoint:
"always" (always use the default mTLS endpoint), "never" (always
- use the default regular endpoint, this is the default value for
- the environment variable) and "auto" (auto switch to the default
- mTLS endpoint if client SSL credentials is present). However,
- the ``api_endpoint`` property takes precedence if provided.
- (2) The ``client_cert_source`` property is used to provide client
- SSL credentials for mutual TLS transport. If not provided, the
- default SSL credentials will be used if present.
+ use the default regular endpoint) and "auto" (auto switch to the
+ default mTLS endpoint if client certificate is present, this is
+ the default value). However, the ``api_endpoint`` property takes
+ precedence if provided.
+ (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable
+ is "true", then the ``client_cert_source`` property can be used
+ to provide client certificate for mutual TLS transport. If
+ not provided, the default SSL client certificate will be used if
+ present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not
+ set, no client certificate will be used.
+ client_info (google.api_core.gapic_v1.client_info.ClientInfo):
+ The client info used to send a user-agent string along with
+ API requests. If ``None``, then default info will be used.
+ Generally, you only need to set this if you're developing
+ your own client library.
Raises:
google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport
creation failed for any reason.
"""
if isinstance(client_options, dict):
- client_options = ClientOptions.from_dict(client_options)
+ client_options = client_options_lib.from_dict(client_options)
if client_options is None:
- client_options = ClientOptions.ClientOptions()
+ client_options = client_options_lib.ClientOptions()
+
+ # Create SSL credentials for mutual TLS if needed.
+ use_client_cert = bool(
+ util.strtobool(os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false"))
+ )
+
+ ssl_credentials = None
+ is_mtls = False
+ if use_client_cert:
+ if client_options.client_cert_source:
+ import grpc # type: ignore
+
+ cert, key = client_options.client_cert_source()
+ ssl_credentials = grpc.ssl_channel_credentials(
+ certificate_chain=cert, private_key=key
+ )
+ is_mtls = True
+ else:
+ creds = SslCredentials()
+ is_mtls = creds.is_mtls
+ ssl_credentials = creds.ssl_credentials if is_mtls else None
- if client_options.api_endpoint is None:
- use_mtls_env = os.getenv("GOOGLE_API_USE_MTLS", "never")
+ # Figure out which api endpoint to use.
+ if client_options.api_endpoint is not None:
+ api_endpoint = client_options.api_endpoint
+ else:
+ use_mtls_env = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto")
if use_mtls_env == "never":
- client_options.api_endpoint = self.DEFAULT_ENDPOINT
+ api_endpoint = self.DEFAULT_ENDPOINT
elif use_mtls_env == "always":
- client_options.api_endpoint = self.DEFAULT_MTLS_ENDPOINT
+ api_endpoint = self.DEFAULT_MTLS_ENDPOINT
elif use_mtls_env == "auto":
- has_client_cert_source = (
- client_options.client_cert_source is not None
- or mtls.has_default_client_cert_source()
- )
- client_options.api_endpoint = (
- self.DEFAULT_MTLS_ENDPOINT
- if has_client_cert_source
- else self.DEFAULT_ENDPOINT
+ api_endpoint = (
+ self.DEFAULT_MTLS_ENDPOINT if is_mtls else self.DEFAULT_ENDPOINT
)
else:
raise MutualTLSChannelError(
- "Unsupported GOOGLE_API_USE_MTLS value. Accepted values: never, auto, always"
+ "Unsupported GOOGLE_API_USE_MTLS_ENDPOINT value. Accepted values: never, auto, always"
)
# Save or instantiate the transport.
@@ -232,11 +330,11 @@ def __init__(
self._transport = Transport(
credentials=credentials,
credentials_file=client_options.credentials_file,
- host=client_options.api_endpoint,
+ host=api_endpoint,
scopes=client_options.scopes,
- api_mtls_endpoint=client_options.api_endpoint,
- client_cert_source=client_options.client_cert_source,
+ ssl_channel_credentials=ssl_credentials,
quota_project_id=client_options.quota_project_id,
+ client_info=client_info,
)
def get_grafeas_client(self) -> grafeas_v1.GrafeasClient:
@@ -371,13 +469,7 @@ def set_iam_policy(
request = iam_policy.SetIamPolicyRequest(**request)
elif not request:
- request = iam_policy.SetIamPolicyRequest()
-
- # If we have keyword arguments corresponding to fields on the
- # request, apply these.
-
- if resource is not None:
- request.resource = resource
+ request = iam_policy.SetIamPolicyRequest(resource=resource,)
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
@@ -518,13 +610,7 @@ def get_iam_policy(
request = iam_policy.GetIamPolicyRequest(**request)
elif not request:
- request = iam_policy.GetIamPolicyRequest()
-
- # If we have keyword arguments corresponding to fields on the
- # request, apply these.
-
- if resource is not None:
- request.resource = resource
+ request = iam_policy.GetIamPolicyRequest(resource=resource,)
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
@@ -608,25 +694,107 @@ def test_iam_permissions(
request = iam_policy.TestIamPermissionsRequest(**request)
elif not request:
- request = iam_policy.TestIamPermissionsRequest()
+ request = iam_policy.TestIamPermissionsRequest(
+ resource=resource, permissions=permissions,
+ )
+
+ # Wrap the RPC method; this adds retry and timeout information,
+ # and friendly error handling.
+ rpc = self._transport._wrapped_methods[self._transport.test_iam_permissions]
+
+ # Certain fields should be provided within the metadata header;
+ # add these here.
+ metadata = tuple(metadata) + (
+ gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)),
+ )
+
+ # Send the request.
+ response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,)
+
+ # Done; return the response.
+ return response
+
+ def get_vulnerability_occurrences_summary(
+ self,
+ request: containeranalysis.GetVulnerabilityOccurrencesSummaryRequest = None,
+ *,
+ parent: str = None,
+ filter: str = None,
+ retry: retries.Retry = gapic_v1.method.DEFAULT,
+ timeout: float = None,
+ metadata: Sequence[Tuple[str, str]] = (),
+ ) -> containeranalysis.VulnerabilityOccurrencesSummary:
+ r"""Gets a summary of the number and severity of
+ occurrences.
+
+ Args:
+ request (:class:`~.containeranalysis.GetVulnerabilityOccurrencesSummaryRequest`):
+ The request object. Request to get a vulnerability
+ summary for some set of occurrences.
+ parent (:class:`str`):
+ The name of the project to get a vulnerability summary
+ for in the form of ``projects/[PROJECT_ID]``.
+ This corresponds to the ``parent`` field
+ on the ``request`` instance; if ``request`` is provided, this
+ should not be set.
+ filter (:class:`str`):
+ The filter expression.
+ This corresponds to the ``filter`` field
+ on the ``request`` instance; if ``request`` is provided, this
+ should not be set.
+
+ retry (google.api_core.retry.Retry): Designation of what errors, if any,
+ should be retried.
+ timeout (float): The timeout for this request.
+ metadata (Sequence[Tuple[str, str]]): Strings which should be
+ sent along with the request as metadata.
+
+ Returns:
+ ~.containeranalysis.VulnerabilityOccurrencesSummary:
+ A summary of how many vulnerability
+ occurrences there are per resource and
+ severity type.
+
+ """
+ # Create or coerce a protobuf request object.
+ # Sanity check: If we got a request object, we should *not* have
+ # gotten any keyword arguments that map to the request.
+ has_flattened_params = any([parent, filter])
+ if request is not None and has_flattened_params:
+ raise ValueError(
+ "If the `request` argument is set, then none of "
+ "the individual field arguments should be set."
+ )
+
+ # Minor optimization to avoid making a copy if the user passes
+ # in a containeranalysis.GetVulnerabilityOccurrencesSummaryRequest.
+ # There's no risk of modifying the input as we've already verified
+ # there are no flattened fields.
+ if not isinstance(
+ request, containeranalysis.GetVulnerabilityOccurrencesSummaryRequest
+ ):
+ request = containeranalysis.GetVulnerabilityOccurrencesSummaryRequest(
+ request
+ )
# If we have keyword arguments corresponding to fields on the
# request, apply these.
- if resource is not None:
- request.resource = resource
-
- if permissions:
- request.permissions.extend(permissions)
+ if parent is not None:
+ request.parent = parent
+ if filter is not None:
+ request.filter = filter
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
- rpc = self._transport._wrapped_methods[self._transport.test_iam_permissions]
+ rpc = self._transport._wrapped_methods[
+ self._transport.get_vulnerability_occurrences_summary
+ ]
# Certain fields should be provided within the metadata header;
# add these here.
metadata = tuple(metadata) + (
- gapic_v1.routing_header.to_grpc_metadata((("resource", request.resource),)),
+ gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)),
)
# Send the request.
@@ -637,13 +805,13 @@ def test_iam_permissions(
try:
- _client_info = gapic_v1.client_info.ClientInfo(
+ DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(
gapic_version=pkg_resources.get_distribution(
"google-cloud-containeranalysis",
).version,
)
except pkg_resources.DistributionNotFound:
- _client_info = gapic_v1.client_info.ClientInfo()
+ DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo()
__all__ = ("ContainerAnalysisClient",)
diff --git a/google/cloud/devtools/containeranalysis_v1/services/container_analysis/transports/base.py b/google/cloud/devtools/containeranalysis_v1/services/container_analysis/transports/base.py
index 3e33d8b..1ff3797 100644
--- a/google/cloud/devtools/containeranalysis_v1/services/container_analysis/transports/base.py
+++ b/google/cloud/devtools/containeranalysis_v1/services/container_analysis/transports/base.py
@@ -19,24 +19,25 @@
import typing
import pkg_resources
-from google import auth
+from google import auth # type: ignore
from google.api_core import exceptions # type: ignore
from google.api_core import gapic_v1 # type: ignore
from google.api_core import retry as retries # type: ignore
from google.auth import credentials # type: ignore
+from google.cloud.devtools.containeranalysis_v1.types import containeranalysis
from google.iam.v1 import iam_policy_pb2 as iam_policy # type: ignore
from google.iam.v1 import policy_pb2 as policy # type: ignore
try:
- _client_info = gapic_v1.client_info.ClientInfo(
+ DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(
gapic_version=pkg_resources.get_distribution(
"google-cloud-devtools-containeranalysis",
).version,
)
except pkg_resources.DistributionNotFound:
- _client_info = gapic_v1.client_info.ClientInfo()
+ DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo()
class ContainerAnalysisTransport(abc.ABC):
@@ -52,6 +53,7 @@ def __init__(
credentials_file: typing.Optional[str] = None,
scopes: typing.Optional[typing.Sequence[str]] = AUTH_SCOPES,
quota_project_id: typing.Optional[str] = None,
+ client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
**kwargs,
) -> None:
"""Instantiate the transport.
@@ -69,6 +71,11 @@ def __init__(
scope (Optional[Sequence[str]]): A list of scopes.
quota_project_id (Optional[str]): An optional project to use for billing
and quota.
+ client_info (google.api_core.gapic_v1.client_info.ClientInfo):
+ The client info used to send a user-agent string along with
+ API requests. If ``None``, then default info will be used.
+ Generally, you only need to set this if you're developing
+ your own client library.
"""
# Save the hostname. Default to port 443 (HTTPS) if none is specified.
if ":" not in host:
@@ -96,21 +103,26 @@ def __init__(
self._credentials = credentials
# Lifted into its own function so it can be stubbed out during tests.
- self._prep_wrapped_messages()
+ self._prep_wrapped_messages(client_info)
- def _prep_wrapped_messages(self):
+ def _prep_wrapped_messages(self, client_info):
# Precompute the wrapped methods.
self._wrapped_methods = {
self.set_iam_policy: gapic_v1.method.wrap_method(
- self.set_iam_policy, default_timeout=30.0, client_info=_client_info,
+ self.set_iam_policy, default_timeout=30.0, client_info=client_info,
),
self.get_iam_policy: gapic_v1.method.wrap_method(
- self.get_iam_policy, default_timeout=30.0, client_info=_client_info,
+ self.get_iam_policy, default_timeout=30.0, client_info=client_info,
),
self.test_iam_permissions: gapic_v1.method.wrap_method(
self.test_iam_permissions,
default_timeout=30.0,
- client_info=_client_info,
+ client_info=client_info,
+ ),
+ self.get_vulnerability_occurrences_summary: gapic_v1.method.wrap_method(
+ self.get_vulnerability_occurrences_summary,
+ default_timeout=None,
+ client_info=client_info,
),
}
@@ -144,5 +156,17 @@ def test_iam_permissions(
]:
raise NotImplementedError()
+ @property
+ def get_vulnerability_occurrences_summary(
+ self,
+ ) -> typing.Callable[
+ [containeranalysis.GetVulnerabilityOccurrencesSummaryRequest],
+ typing.Union[
+ containeranalysis.VulnerabilityOccurrencesSummary,
+ typing.Awaitable[containeranalysis.VulnerabilityOccurrencesSummary],
+ ],
+ ]:
+ raise NotImplementedError()
+
__all__ = ("ContainerAnalysisTransport",)
diff --git a/google/cloud/devtools/containeranalysis_v1/services/container_analysis/transports/grpc.py b/google/cloud/devtools/containeranalysis_v1/services/container_analysis/transports/grpc.py
index dd846b9..ddd59e6 100644
--- a/google/cloud/devtools/containeranalysis_v1/services/container_analysis/transports/grpc.py
+++ b/google/cloud/devtools/containeranalysis_v1/services/container_analysis/transports/grpc.py
@@ -15,20 +15,22 @@
# limitations under the License.
#
+import warnings
from typing import Callable, Dict, Optional, Sequence, Tuple
from google.api_core import grpc_helpers # type: ignore
+from google.api_core import gapic_v1 # type: ignore
from google import auth # type: ignore
from google.auth import credentials # type: ignore
from google.auth.transport.grpc import SslCredentials # type: ignore
-
import grpc # type: ignore
+from google.cloud.devtools.containeranalysis_v1.types import containeranalysis
from google.iam.v1 import iam_policy_pb2 as iam_policy # type: ignore
from google.iam.v1 import policy_pb2 as policy # type: ignore
-from .base import ContainerAnalysisTransport
+from .base import ContainerAnalysisTransport, DEFAULT_CLIENT_INFO
class ContainerAnalysisGrpcTransport(ContainerAnalysisTransport):
@@ -70,7 +72,9 @@ def __init__(
channel: grpc.Channel = None,
api_mtls_endpoint: str = None,
client_cert_source: Callable[[], Tuple[bytes, bytes]] = None,
- quota_project_id: Optional[str] = None
+ ssl_channel_credentials: grpc.ChannelCredentials = None,
+ quota_project_id: Optional[str] = None,
+ client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
) -> None:
"""Instantiate the transport.
@@ -89,16 +93,23 @@ def __init__(
ignored if ``channel`` is provided.
channel (Optional[grpc.Channel]): A ``Channel`` instance through
which to make calls.
- api_mtls_endpoint (Optional[str]): The mutual TLS endpoint. If
- provided, it overrides the ``host`` argument and tries to create
+ api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint.
+ If provided, it overrides the ``host`` argument and tries to create
a mutual TLS channel with client SSL credentials from
``client_cert_source`` or applicatin default SSL credentials.
- client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): A
- callback to provide client SSL certificate bytes and private key
- bytes, both in PEM format. It is ignored if ``api_mtls_endpoint``
- is None.
+ client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]):
+ Deprecated. A callback to provide client SSL certificate bytes and
+ private key bytes, both in PEM format. It is ignored if
+ ``api_mtls_endpoint`` is None.
+ ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials
+ for grpc channel. It is ignored if ``channel`` is provided.
quota_project_id (Optional[str]): An optional project to use for billing
and quota.
+ client_info (google.api_core.gapic_v1.client_info.ClientInfo):
+ The client info used to send a user-agent string along with
+ API requests. If ``None``, then default info will be used.
+ Generally, you only need to set this if you're developing
+ your own client library.
Raises:
google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport
@@ -106,6 +117,8 @@ def __init__(
google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials``
and ``credentials_file`` are passed.
"""
+ self._ssl_channel_credentials = ssl_channel_credentials
+
if channel:
# Sanity check: Ensure that channel and credentials are not both
# provided.
@@ -113,7 +126,13 @@ def __init__(
# If a channel was explicitly provided, set it.
self._grpc_channel = channel
+ self._ssl_channel_credentials = None
elif api_mtls_endpoint:
+ warnings.warn(
+ "api_mtls_endpoint and client_cert_source are deprecated",
+ DeprecationWarning,
+ )
+
host = (
api_mtls_endpoint
if ":" in api_mtls_endpoint
@@ -144,6 +163,24 @@ def __init__(
scopes=scopes or self.AUTH_SCOPES,
quota_project_id=quota_project_id,
)
+ self._ssl_channel_credentials = ssl_credentials
+ else:
+ host = host if ":" in host else host + ":443"
+
+ if credentials is None:
+ credentials, _ = auth.default(
+ scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id
+ )
+
+ # create a new channel. The provided one is ignored.
+ self._grpc_channel = type(self).create_channel(
+ host,
+ credentials=credentials,
+ credentials_file=credentials_file,
+ ssl_credentials=ssl_channel_credentials,
+ scopes=scopes or self.AUTH_SCOPES,
+ quota_project_id=quota_project_id,
+ )
self._stubs = {} # type: Dict[str, Callable]
@@ -154,6 +191,7 @@ def __init__(
credentials_file=credentials_file,
scopes=scopes or self.AUTH_SCOPES,
quota_project_id=quota_project_id,
+ client_info=client_info,
)
@classmethod
@@ -164,7 +202,7 @@ def create_channel(
credentials_file: str = None,
scopes: Optional[Sequence[str]] = None,
quota_project_id: Optional[str] = None,
- **kwargs
+ **kwargs,
) -> grpc.Channel:
"""Create and return a gRPC channel object.
Args:
@@ -198,24 +236,13 @@ def create_channel(
credentials_file=credentials_file,
scopes=scopes,
quota_project_id=quota_project_id,
- **kwargs
+ **kwargs,
)
@property
def grpc_channel(self) -> grpc.Channel:
- """Create the channel designed to connect to this service.
-
- This property caches on the instance; repeated calls return
- the same channel.
+ """Return the channel designed to connect to this service.
"""
- # Sanity check: Only create a new channel if we do not already
- # have one.
- if not hasattr(self, "_grpc_channel"):
- self._grpc_channel = self.create_channel(
- self._host, credentials=self._credentials,
- )
-
- # Return the channel from cache.
return self._grpc_channel
@property
@@ -321,5 +348,38 @@ def test_iam_permissions(
)
return self._stubs["test_iam_permissions"]
+ @property
+ def get_vulnerability_occurrences_summary(
+ self,
+ ) -> Callable[
+ [containeranalysis.GetVulnerabilityOccurrencesSummaryRequest],
+ containeranalysis.VulnerabilityOccurrencesSummary,
+ ]:
+ r"""Return a callable for the get vulnerability occurrences
+ summary method over gRPC.
+
+ Gets a summary of the number and severity of
+ occurrences.
+
+ Returns:
+ Callable[[~.GetVulnerabilityOccurrencesSummaryRequest],
+ ~.VulnerabilityOccurrencesSummary]:
+ A function that, when called, will call the underlying RPC
+ on the server.
+ """
+ # Generate a "stub function" on-the-fly which will actually make
+ # the request.
+ # gRPC handles serialization and deserialization, so we just need
+ # to pass in the functions for each.
+ if "get_vulnerability_occurrences_summary" not in self._stubs:
+ self._stubs[
+ "get_vulnerability_occurrences_summary"
+ ] = self.grpc_channel.unary_unary(
+ "/google.devtools.containeranalysis.v1.ContainerAnalysis/GetVulnerabilityOccurrencesSummary",
+ request_serializer=containeranalysis.GetVulnerabilityOccurrencesSummaryRequest.serialize,
+ response_deserializer=containeranalysis.VulnerabilityOccurrencesSummary.deserialize,
+ )
+ return self._stubs["get_vulnerability_occurrences_summary"]
+
__all__ = ("ContainerAnalysisGrpcTransport",)
diff --git a/google/cloud/devtools/containeranalysis_v1/services/container_analysis/transports/grpc_asyncio.py b/google/cloud/devtools/containeranalysis_v1/services/container_analysis/transports/grpc_asyncio.py
index 24bad10..af63a0d 100644
--- a/google/cloud/devtools/containeranalysis_v1/services/container_analysis/transports/grpc_asyncio.py
+++ b/google/cloud/devtools/containeranalysis_v1/services/container_analysis/transports/grpc_asyncio.py
@@ -15,19 +15,23 @@
# limitations under the License.
#
+import warnings
from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple
+from google.api_core import gapic_v1 # type: ignore
from google.api_core import grpc_helpers_async # type: ignore
+from google import auth # type: ignore
from google.auth import credentials # type: ignore
from google.auth.transport.grpc import SslCredentials # type: ignore
import grpc # type: ignore
from grpc.experimental import aio # type: ignore
+from google.cloud.devtools.containeranalysis_v1.types import containeranalysis
from google.iam.v1 import iam_policy_pb2 as iam_policy # type: ignore
from google.iam.v1 import policy_pb2 as policy # type: ignore
-from .base import ContainerAnalysisTransport
+from .base import ContainerAnalysisTransport, DEFAULT_CLIENT_INFO
from .grpc import ContainerAnalysisGrpcTransport
@@ -112,7 +116,9 @@ def __init__(
channel: aio.Channel = None,
api_mtls_endpoint: str = None,
client_cert_source: Callable[[], Tuple[bytes, bytes]] = None,
+ ssl_channel_credentials: grpc.ChannelCredentials = None,
quota_project_id=None,
+ client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
) -> None:
"""Instantiate the transport.
@@ -132,16 +138,23 @@ def __init__(
are passed to :func:`google.auth.default`.
channel (Optional[aio.Channel]): A ``Channel`` instance through
which to make calls.
- api_mtls_endpoint (Optional[str]): The mutual TLS endpoint. If
- provided, it overrides the ``host`` argument and tries to create
+ api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint.
+ If provided, it overrides the ``host`` argument and tries to create
a mutual TLS channel with client SSL credentials from
``client_cert_source`` or applicatin default SSL credentials.
- client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): A
- callback to provide client SSL certificate bytes and private key
- bytes, both in PEM format. It is ignored if ``api_mtls_endpoint``
- is None.
+ client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]):
+ Deprecated. A callback to provide client SSL certificate bytes and
+ private key bytes, both in PEM format. It is ignored if
+ ``api_mtls_endpoint`` is None.
+ ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials
+ for grpc channel. It is ignored if ``channel`` is provided.
quota_project_id (Optional[str]): An optional project to use for billing
and quota.
+ client_info (google.api_core.gapic_v1.client_info.ClientInfo):
+ The client info used to send a user-agent string along with
+ API requests. If ``None``, then default info will be used.
+ Generally, you only need to set this if you're developing
+ your own client library.
Raises:
google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport
@@ -149,6 +162,8 @@ def __init__(
google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials``
and ``credentials_file`` are passed.
"""
+ self._ssl_channel_credentials = ssl_channel_credentials
+
if channel:
# Sanity check: Ensure that channel and credentials are not both
# provided.
@@ -156,13 +171,24 @@ def __init__(
# If a channel was explicitly provided, set it.
self._grpc_channel = channel
+ self._ssl_channel_credentials = None
elif api_mtls_endpoint:
+ warnings.warn(
+ "api_mtls_endpoint and client_cert_source are deprecated",
+ DeprecationWarning,
+ )
+
host = (
api_mtls_endpoint
if ":" in api_mtls_endpoint
else api_mtls_endpoint + ":443"
)
+ if credentials is None:
+ credentials, _ = auth.default(
+ scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id
+ )
+
# Create SSL credentials with client_cert_source or application
# default SSL credentials.
if client_cert_source:
@@ -182,6 +208,24 @@ def __init__(
scopes=scopes or self.AUTH_SCOPES,
quota_project_id=quota_project_id,
)
+ self._ssl_channel_credentials = ssl_credentials
+ else:
+ host = host if ":" in host else host + ":443"
+
+ if credentials is None:
+ credentials, _ = auth.default(
+ scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id
+ )
+
+ # create a new channel. The provided one is ignored.
+ self._grpc_channel = type(self).create_channel(
+ host,
+ credentials=credentials,
+ credentials_file=credentials_file,
+ ssl_credentials=ssl_channel_credentials,
+ scopes=scopes or self.AUTH_SCOPES,
+ quota_project_id=quota_project_id,
+ )
# Run the base constructor.
super().__init__(
@@ -190,6 +234,7 @@ def __init__(
credentials_file=credentials_file,
scopes=scopes or self.AUTH_SCOPES,
quota_project_id=quota_project_id,
+ client_info=client_info,
)
self._stubs = {}
@@ -201,13 +246,6 @@ def grpc_channel(self) -> aio.Channel:
This property caches on the instance; repeated calls return
the same channel.
"""
- # Sanity check: Only create a new channel if we do not already
- # have one.
- if not hasattr(self, "_grpc_channel"):
- self._grpc_channel = self.create_channel(
- self._host, credentials=self._credentials,
- )
-
# Return the channel from cache.
return self._grpc_channel
@@ -315,5 +353,38 @@ def test_iam_permissions(
)
return self._stubs["test_iam_permissions"]
+ @property
+ def get_vulnerability_occurrences_summary(
+ self,
+ ) -> Callable[
+ [containeranalysis.GetVulnerabilityOccurrencesSummaryRequest],
+ Awaitable[containeranalysis.VulnerabilityOccurrencesSummary],
+ ]:
+ r"""Return a callable for the get vulnerability occurrences
+ summary method over gRPC.
+
+ Gets a summary of the number and severity of
+ occurrences.
+
+ Returns:
+ Callable[[~.GetVulnerabilityOccurrencesSummaryRequest],
+ Awaitable[~.VulnerabilityOccurrencesSummary]]:
+ A function that, when called, will call the underlying RPC
+ on the server.
+ """
+ # Generate a "stub function" on-the-fly which will actually make
+ # the request.
+ # gRPC handles serialization and deserialization, so we just need
+ # to pass in the functions for each.
+ if "get_vulnerability_occurrences_summary" not in self._stubs:
+ self._stubs[
+ "get_vulnerability_occurrences_summary"
+ ] = self.grpc_channel.unary_unary(
+ "/google.devtools.containeranalysis.v1.ContainerAnalysis/GetVulnerabilityOccurrencesSummary",
+ request_serializer=containeranalysis.GetVulnerabilityOccurrencesSummaryRequest.serialize,
+ response_deserializer=containeranalysis.VulnerabilityOccurrencesSummary.deserialize,
+ )
+ return self._stubs["get_vulnerability_occurrences_summary"]
+
__all__ = ("ContainerAnalysisGrpcAsyncIOTransport",)
diff --git a/google/cloud/devtools/containeranalysis_v1/types/__init__.py b/google/cloud/devtools/containeranalysis_v1/types/__init__.py
index e1d07d2..4690bc4 100644
--- a/google/cloud/devtools/containeranalysis_v1/types/__init__.py
+++ b/google/cloud/devtools/containeranalysis_v1/types/__init__.py
@@ -15,5 +15,13 @@
# limitations under the License.
#
+from .containeranalysis import (
+ GetVulnerabilityOccurrencesSummaryRequest,
+ VulnerabilityOccurrencesSummary,
+)
-__all__ = ()
+
+__all__ = (
+ "GetVulnerabilityOccurrencesSummaryRequest",
+ "VulnerabilityOccurrencesSummary",
+)
diff --git a/google/cloud/devtools/containeranalysis_v1/types/containeranalysis.py b/google/cloud/devtools/containeranalysis_v1/types/containeranalysis.py
index 0d29807..fa22eaa 100644
--- a/google/cloud/devtools/containeranalysis_v1/types/containeranalysis.py
+++ b/google/cloud/devtools/containeranalysis_v1/types/containeranalysis.py
@@ -15,10 +15,75 @@
# limitations under the License.
#
+import proto # type: ignore
+
+
+from grafeas.grafeas_v1.types import vulnerability # type: ignore
+
__protobuf__ = proto.module(
- package="google.devtools.containeranalysis.v1", manifest={},
+ package="google.devtools.containeranalysis.v1",
+ manifest={
+ "GetVulnerabilityOccurrencesSummaryRequest",
+ "VulnerabilityOccurrencesSummary",
+ },
)
+class GetVulnerabilityOccurrencesSummaryRequest(proto.Message):
+ r"""Request to get a vulnerability summary for some set of
+ occurrences.
+
+ Attributes:
+ parent (str):
+ The name of the project to get a vulnerability summary for
+ in the form of ``projects/[PROJECT_ID]``.
+ filter (str):
+ The filter expression.
+ """
+
+ parent = proto.Field(proto.STRING, number=1)
+
+ filter = proto.Field(proto.STRING, number=2)
+
+
+class VulnerabilityOccurrencesSummary(proto.Message):
+ r"""A summary of how many vulnerability occurrences there are per
+ resource and severity type.
+
+ Attributes:
+ counts (Sequence[~.containeranalysis.VulnerabilityOccurrencesSummary.FixableTotalByDigest]):
+ A listing by resource of the number of
+ fixable and total vulnerabilities.
+ """
+
+ class FixableTotalByDigest(proto.Message):
+ r"""Per resource and severity counts of fixable and total
+ vulnerabilities.
+
+ Attributes:
+ resource_uri (str):
+ The affected resource.
+ severity (~.vulnerability.Severity):
+ The severity for this count. SEVERITY_UNSPECIFIED indicates
+ total across all severities.
+ fixable_count (int):
+ The number of fixable vulnerabilities
+ associated with this resource.
+ total_count (int):
+ The total number of vulnerabilities
+ associated with this resource.
+ """
+
+ resource_uri = proto.Field(proto.STRING, number=1)
+
+ severity = proto.Field(proto.ENUM, number=2, enum=vulnerability.Severity,)
+
+ fixable_count = proto.Field(proto.INT64, number=3)
+
+ total_count = proto.Field(proto.INT64, number=4)
+
+ counts = proto.RepeatedField(proto.MESSAGE, number=1, message=FixableTotalByDigest,)
+
+
__all__ = tuple(sorted(__protobuf__.manifest))
diff --git a/noxfile.py b/noxfile.py
index b3ff7a2..27349c9 100644
--- a/noxfile.py
+++ b/noxfile.py
@@ -28,7 +28,7 @@
DEFAULT_PYTHON_VERSION = "3.8"
SYSTEM_TEST_PYTHON_VERSIONS = ["3.8"]
-UNIT_TEST_PYTHON_VERSIONS = ["3.6", "3.7", "3.8"]
+UNIT_TEST_PYTHON_VERSIONS = ["3.6", "3.7", "3.8", "3.9"]
@nox.session(python=DEFAULT_PYTHON_VERSION)
@@ -72,7 +72,9 @@ def default(session):
# Install all test dependencies, then install this package in-place.
session.install("asyncmock", "pytest-asyncio")
- session.install("mock", "pytest", "pytest-cov")
+ session.install(
+ "mock", "pytest", "pytest-cov",
+ )
session.install("-e", ".")
# Run py.test against the unit tests.
@@ -173,7 +175,9 @@ def docfx(session):
"""Build the docfx yaml files for this library."""
session.install("-e", ".")
- session.install("sphinx", "alabaster", "recommonmark", "sphinx-docfx-yaml")
+ # sphinx-docfx-yaml supports up to sphinx version 1.5.5.
+ # https://github.com/docascode/sphinx-docfx-yaml/issues/97
+ session.install("sphinx==1.5.5", "alabaster", "recommonmark", "sphinx-docfx-yaml")
shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True)
session.run(
diff --git a/samples/snippets/noxfile.py b/samples/snippets/noxfile.py
index ba55d7c..b90eef0 100644
--- a/samples/snippets/noxfile.py
+++ b/samples/snippets/noxfile.py
@@ -39,6 +39,10 @@
# You can opt out from the test for specific Python versions.
'ignored_versions': ["2.7"],
+ # Old samples are opted out of enforcing Python type hints
+ # All new samples should feature them
+ 'enforce_type_hints': False,
+
# An envvar key for determining the project id to use. Change it
# to 'BUILD_SPECIFIC_GCLOUD_PROJECT' if you want to opt in using a
# build specific Cloud project. You can also use your own string
@@ -132,7 +136,10 @@ def _determine_local_import_names(start_dir):
@nox.session
def lint(session):
- session.install("flake8", "flake8-import-order")
+ if not TEST_CONFIG['enforce_type_hints']:
+ session.install("flake8", "flake8-import-order")
+ else:
+ session.install("flake8", "flake8-import-order", "flake8-annotations")
local_names = _determine_local_import_names(".")
args = FLAKE8_COMMON_ARGS + [
@@ -141,8 +148,18 @@ def lint(session):
"."
]
session.run("flake8", *args)
+#
+# Black
+#
+@nox.session
+def blacken(session):
+ session.install("black")
+ python_files = [path for path in os.listdir(".") if path.endswith(".py")]
+
+ session.run("black", *python_files)
+
#
# Sample Tests
#
@@ -201,6 +218,11 @@ def _get_repo_root():
break
if Path(p / ".git").exists():
return str(p)
+ # .git is not available in repos cloned via Cloud Build
+ # setup.py is always in the library's root, so use that instead
+ # https://github.com/googleapis/synthtool/issues/792
+ if Path(p / "setup.py").exists():
+ return str(p)
p = p.parent
raise Exception("Unable to detect repository root.")
diff --git a/samples/snippets/requirements.txt b/samples/snippets/requirements.txt
index af8750f..33b4937 100644
--- a/samples/snippets/requirements.txt
+++ b/samples/snippets/requirements.txt
@@ -1,6 +1,6 @@
-google-cloud-pubsub==1.7.0
-google-cloud-containeranalysis==1.0.3
-grafeas==1.0.0
+google-cloud-pubsub==2.1.0
+google-cloud-containeranalysis==2.0.0
+grafeas==1.0.1
pytest==5.3.0; python_version > "3.0"
pytest==4.6.6; python_version < "3.0"
flaky==3.7.0
diff --git a/samples/snippets/samples.py b/samples/snippets/samples.py
index caf0899..48d193b 100644
--- a/samples/snippets/samples.py
+++ b/samples/snippets/samples.py
@@ -265,11 +265,11 @@ def create_occurrence_subscription(subscription_id, project_id):
topic_id = 'container-analysis-occurrences-v1'
client = SubscriberClient()
- topic_name = client.topic_path(project_id, topic_id)
+ topic_name = f"projects/{project_id}/topics/{topic_id}"
subscription_name = client.subscription_path(project_id, subscription_id)
success = True
try:
- client.create_subscription(subscription_name, topic_name)
+ client.create_subscription({"name": subscription_name, "topic": topic_name})
except AlreadyExists:
# if subscription already exists, do nothing
pass
diff --git a/samples/snippets/samples_test.py b/samples/snippets/samples_test.py
index 7a1bd81..7145dd7 100644
--- a/samples/snippets/samples_test.py
+++ b/samples/snippets/samples_test.py
@@ -152,7 +152,7 @@ def test_pubsub(self):
client = SubscriberClient()
try:
topic_id = 'container-analysis-occurrences-v1'
- topic_name = client.topic_path(PROJECT_ID, topic_id)
+ topic_name = {"name": f"projects/{PROJECT_ID}/topics/{topic_id}"}
publisher = PublisherClient()
publisher.create_topic(topic_name)
except AlreadyExists:
@@ -185,7 +185,7 @@ def test_pubsub(self):
assert message_count <= receiver.msg_count
finally:
# clean up
- client.delete_subscription(subscription_name)
+ client.delete_subscription({"subscription": subscription_name})
def test_poll_discovery_occurrence(self):
# try with no discovery occurrence
diff --git a/scripts/decrypt-secrets.sh b/scripts/decrypt-secrets.sh
index ff599eb..21f6d2a 100755
--- a/scripts/decrypt-secrets.sh
+++ b/scripts/decrypt-secrets.sh
@@ -20,14 +20,27 @@ ROOT=$( dirname "$DIR" )
# Work from the project root.
cd $ROOT
+# Prevent it from overriding files.
+# We recommend that sample authors use their own service account files and cloud project.
+# In that case, they are supposed to prepare these files by themselves.
+if [[ -f "testing/test-env.sh" ]] || \
+ [[ -f "testing/service-account.json" ]] || \
+ [[ -f "testing/client-secrets.json" ]]; then
+ echo "One or more target files exist, aborting."
+ exit 1
+fi
+
# Use SECRET_MANAGER_PROJECT if set, fallback to cloud-devrel-kokoro-resources.
PROJECT_ID="${SECRET_MANAGER_PROJECT:-cloud-devrel-kokoro-resources}"
gcloud secrets versions access latest --secret="python-docs-samples-test-env" \
+ --project="${PROJECT_ID}" \
> testing/test-env.sh
gcloud secrets versions access latest \
--secret="python-docs-samples-service-account" \
+ --project="${PROJECT_ID}" \
> testing/service-account.json
gcloud secrets versions access latest \
--secret="python-docs-samples-client-secrets" \
- > testing/client-secrets.json
\ No newline at end of file
+ --project="${PROJECT_ID}" \
+ > testing/client-secrets.json
diff --git a/scripts/fixup_containeranalysis_v1_keywords.py b/scripts/fixup_containeranalysis_v1_keywords.py
index 5129740..f5bb70f 100644
--- a/scripts/fixup_containeranalysis_v1_keywords.py
+++ b/scripts/fixup_containeranalysis_v1_keywords.py
@@ -1,3 +1,4 @@
+#! /usr/bin/env python3
# -*- coding: utf-8 -*-
# Copyright 2020 Google LLC
@@ -41,6 +42,7 @@ class containeranalysisCallTransformer(cst.CSTTransformer):
CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata')
METHOD_TO_PARAMS: Dict[str, Tuple[str]] = {
'get_iam_policy': ('resource', 'options', ),
+ 'get_vulnerability_occurrences_summary': ('parent', 'filter', ),
'set_iam_policy': ('resource', 'policy', ),
'test_iam_permissions': ('resource', 'permissions', ),
diff --git a/setup.py b/setup.py
index c4f727a..9216a3e 100644
--- a/setup.py
+++ b/setup.py
@@ -22,10 +22,10 @@
name = "google-cloud-containeranalysis"
description = "Container Analysis API API client library"
-version = "2.0.0"
+version = "2.1.0"
release_status = "Development Status :: 5 - Production/Stable"
dependencies = [
- "google-api-core[grpc] >= 1.21.0, < 2.0.0dev",
+ "google-api-core[grpc] >= 1.22.0, < 2.0.0dev",
"grpc-google-iam-v1 >= 0.12.3, < 0.13dev",
"proto-plus >= 1.4.0",
"libcst >= 0.2.5",
diff --git a/synth.metadata b/synth.metadata
index 2386c34..cabe0c7 100644
--- a/synth.metadata
+++ b/synth.metadata
@@ -3,30 +3,30 @@
{
"git": {
"name": ".",
- "remote": "git@github.com:googleapis/python-containeranalysis",
- "sha": "1a93756ed78656cbbdadf164b3de3ab4a3ba6232"
+ "remote": "https://github.com/googleapis/python-containeranalysis.git",
+ "sha": "2ce48dca5bf1ca50f30266dd0b2deb5e36346c6c"
}
},
{
"git": {
"name": "googleapis",
"remote": "https://github.com/googleapis/googleapis.git",
- "sha": "43a62a87b70010d9cf9be31e99ea230a535e1b47",
- "internalRef": "326109811"
+ "sha": "07d41a7e5cade45aba6f0d277c89722b48f2c956",
+ "internalRef": "339292950"
}
},
{
"git": {
"name": "synthtool",
"remote": "https://github.com/googleapis/synthtool.git",
- "sha": "cd886e8e4be8311aaaffcdcbae44d2cb57d06f8f"
+ "sha": "d5fc0bcf9ea9789c5b0e3154a9e3b29e5cea6116"
}
},
{
"git": {
"name": "synthtool",
"remote": "https://github.com/googleapis/synthtool.git",
- "sha": "cd886e8e4be8311aaaffcdcbae44d2cb57d06f8f"
+ "sha": "d5fc0bcf9ea9789c5b0e3154a9e3b29e5cea6116"
}
}
],
@@ -40,5 +40,94 @@
"generator": "bazel"
}
}
+ ],
+ "generatedFiles": [
+ ".flake8",
+ ".github/CONTRIBUTING.md",
+ ".github/ISSUE_TEMPLATE/bug_report.md",
+ ".github/ISSUE_TEMPLATE/feature_request.md",
+ ".github/ISSUE_TEMPLATE/support_request.md",
+ ".github/PULL_REQUEST_TEMPLATE.md",
+ ".github/release-please.yml",
+ ".github/snippet-bot.yml",
+ ".gitignore",
+ ".kokoro/build.sh",
+ ".kokoro/continuous/common.cfg",
+ ".kokoro/continuous/continuous.cfg",
+ ".kokoro/docker/docs/Dockerfile",
+ ".kokoro/docker/docs/fetch_gpg_keys.sh",
+ ".kokoro/docs/common.cfg",
+ ".kokoro/docs/docs-presubmit.cfg",
+ ".kokoro/docs/docs.cfg",
+ ".kokoro/populate-secrets.sh",
+ ".kokoro/presubmit/common.cfg",
+ ".kokoro/presubmit/presubmit.cfg",
+ ".kokoro/publish-docs.sh",
+ ".kokoro/release.sh",
+ ".kokoro/release/common.cfg",
+ ".kokoro/release/release.cfg",
+ ".kokoro/samples/lint/common.cfg",
+ ".kokoro/samples/lint/continuous.cfg",
+ ".kokoro/samples/lint/periodic.cfg",
+ ".kokoro/samples/lint/presubmit.cfg",
+ ".kokoro/samples/python3.6/common.cfg",
+ ".kokoro/samples/python3.6/continuous.cfg",
+ ".kokoro/samples/python3.6/periodic.cfg",
+ ".kokoro/samples/python3.6/presubmit.cfg",
+ ".kokoro/samples/python3.7/common.cfg",
+ ".kokoro/samples/python3.7/continuous.cfg",
+ ".kokoro/samples/python3.7/periodic.cfg",
+ ".kokoro/samples/python3.7/presubmit.cfg",
+ ".kokoro/samples/python3.8/common.cfg",
+ ".kokoro/samples/python3.8/continuous.cfg",
+ ".kokoro/samples/python3.8/periodic.cfg",
+ ".kokoro/samples/python3.8/presubmit.cfg",
+ ".kokoro/test-samples.sh",
+ ".kokoro/trampoline.sh",
+ ".kokoro/trampoline_v2.sh",
+ ".trampolinerc",
+ "CODE_OF_CONDUCT.md",
+ "CONTRIBUTING.rst",
+ "LICENSE",
+ "MANIFEST.in",
+ "docs/_static/custom.css",
+ "docs/_templates/layout.html",
+ "docs/conf.py",
+ "docs/containeranalysis_v1/services.rst",
+ "docs/containeranalysis_v1/types.rst",
+ "docs/multiprocessing.rst",
+ "google/cloud/devtools/containeranalysis/__init__.py",
+ "google/cloud/devtools/containeranalysis/py.typed",
+ "google/cloud/devtools/containeranalysis_v1/__init__.py",
+ "google/cloud/devtools/containeranalysis_v1/proto/containeranalysis.proto",
+ "google/cloud/devtools/containeranalysis_v1/py.typed",
+ "google/cloud/devtools/containeranalysis_v1/services/__init__.py",
+ "google/cloud/devtools/containeranalysis_v1/services/container_analysis/__init__.py",
+ "google/cloud/devtools/containeranalysis_v1/services/container_analysis/async_client.py",
+ "google/cloud/devtools/containeranalysis_v1/services/container_analysis/client.py",
+ "google/cloud/devtools/containeranalysis_v1/services/container_analysis/transports/__init__.py",
+ "google/cloud/devtools/containeranalysis_v1/services/container_analysis/transports/base.py",
+ "google/cloud/devtools/containeranalysis_v1/services/container_analysis/transports/grpc.py",
+ "google/cloud/devtools/containeranalysis_v1/services/container_analysis/transports/grpc_asyncio.py",
+ "google/cloud/devtools/containeranalysis_v1/types/__init__.py",
+ "google/cloud/devtools/containeranalysis_v1/types/containeranalysis.py",
+ "mypy.ini",
+ "noxfile.py",
+ "renovate.json",
+ "samples/AUTHORING_GUIDE.md",
+ "samples/CONTRIBUTING.md",
+ "samples/snippets/noxfile.py",
+ "scripts/decrypt-secrets.sh",
+ "scripts/fixup_containeranalysis_v1_keywords.py",
+ "scripts/readme-gen/readme_gen.py",
+ "scripts/readme-gen/templates/README.tmpl.rst",
+ "scripts/readme-gen/templates/auth.tmpl.rst",
+ "scripts/readme-gen/templates/auth_api_key.tmpl.rst",
+ "scripts/readme-gen/templates/install_deps.tmpl.rst",
+ "scripts/readme-gen/templates/install_portaudio.tmpl.rst",
+ "setup.cfg",
+ "testing/.gitignore",
+ "tests/unit/gapic/containeranalysis_v1/__init__.py",
+ "tests/unit/gapic/containeranalysis_v1/test_container_analysis.py"
]
}
\ No newline at end of file
diff --git a/tests/unit/gapic/containeranalysis_v1/test_container_analysis.py b/tests/unit/gapic/containeranalysis_v1/test_container_analysis.py
index 8edd78f..a8f1156 100644
--- a/tests/unit/gapic/containeranalysis_v1/test_container_analysis.py
+++ b/tests/unit/gapic/containeranalysis_v1/test_container_analysis.py
@@ -27,6 +27,7 @@
from google import auth
from google.api_core import client_options
from google.api_core import exceptions
+from google.api_core import gapic_v1
from google.api_core import grpc_helpers
from google.api_core import grpc_helpers_async
from google.auth import credentials
@@ -40,6 +41,7 @@
from google.cloud.devtools.containeranalysis_v1.services.container_analysis import (
transports,
)
+from google.cloud.devtools.containeranalysis_v1.types import containeranalysis
from google.iam.v1 import iam_policy_pb2 as iam_policy # type: ignore
from google.iam.v1 import options_pb2 as options # type: ignore
from google.iam.v1 import policy_pb2 as policy # type: ignore
@@ -102,12 +104,12 @@ def test_container_analysis_client_from_service_account_file(client_class):
) as factory:
factory.return_value = creds
client = client_class.from_service_account_file("dummy/file/path.json")
- assert client._transport._credentials == creds
+ assert client.transport._credentials == creds
client = client_class.from_service_account_json("dummy/file/path.json")
- assert client._transport._credentials == creds
+ assert client.transport._credentials == creds
- assert client._transport._host == "containeranalysis.googleapis.com:443"
+ assert client.transport._host == "containeranalysis.googleapis.com:443"
def test_container_analysis_client_get_transport_class():
@@ -163,14 +165,14 @@ def test_container_analysis_client_client_options(
credentials_file=None,
host="squid.clam.whelk",
scopes=None,
- api_mtls_endpoint="squid.clam.whelk",
- client_cert_source=None,
+ ssl_channel_credentials=None,
quota_project_id=None,
+ client_info=transports.base.DEFAULT_CLIENT_INFO,
)
- # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS is
+ # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is
# "never".
- with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS": "never"}):
+ with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}):
with mock.patch.object(transport_class, "__init__") as patched:
patched.return_value = None
client = client_class()
@@ -179,14 +181,14 @@ def test_container_analysis_client_client_options(
credentials_file=None,
host=client.DEFAULT_ENDPOINT,
scopes=None,
- api_mtls_endpoint=client.DEFAULT_ENDPOINT,
- client_cert_source=None,
+ ssl_channel_credentials=None,
quota_project_id=None,
+ client_info=transports.base.DEFAULT_CLIENT_INFO,
)
- # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS is
+ # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is
# "always".
- with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS": "always"}):
+ with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}):
with mock.patch.object(transport_class, "__init__") as patched:
patched.return_value = None
client = client_class()
@@ -195,90 +197,185 @@ def test_container_analysis_client_client_options(
credentials_file=None,
host=client.DEFAULT_MTLS_ENDPOINT,
scopes=None,
- api_mtls_endpoint=client.DEFAULT_MTLS_ENDPOINT,
- client_cert_source=None,
+ ssl_channel_credentials=None,
quota_project_id=None,
+ client_info=transports.base.DEFAULT_CLIENT_INFO,
)
- # Check the case api_endpoint is not provided, GOOGLE_API_USE_MTLS is
- # "auto", and client_cert_source is provided.
- with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS": "auto"}):
+ # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has
+ # unsupported value.
+ with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}):
+ with pytest.raises(MutualTLSChannelError):
+ client = client_class()
+
+ # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value.
+ with mock.patch.dict(
+ os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}
+ ):
+ with pytest.raises(ValueError):
+ client = client_class()
+
+ # Check the case quota_project_id is provided
+ options = client_options.ClientOptions(quota_project_id="octopus")
+ with mock.patch.object(transport_class, "__init__") as patched:
+ patched.return_value = None
+ client = client_class(client_options=options)
+ patched.assert_called_once_with(
+ credentials=None,
+ credentials_file=None,
+ host=client.DEFAULT_ENDPOINT,
+ scopes=None,
+ ssl_channel_credentials=None,
+ quota_project_id="octopus",
+ client_info=transports.base.DEFAULT_CLIENT_INFO,
+ )
+
+
+@pytest.mark.parametrize(
+ "client_class,transport_class,transport_name,use_client_cert_env",
+ [
+ (
+ ContainerAnalysisClient,
+ transports.ContainerAnalysisGrpcTransport,
+ "grpc",
+ "true",
+ ),
+ (
+ ContainerAnalysisAsyncClient,
+ transports.ContainerAnalysisGrpcAsyncIOTransport,
+ "grpc_asyncio",
+ "true",
+ ),
+ (
+ ContainerAnalysisClient,
+ transports.ContainerAnalysisGrpcTransport,
+ "grpc",
+ "false",
+ ),
+ (
+ ContainerAnalysisAsyncClient,
+ transports.ContainerAnalysisGrpcAsyncIOTransport,
+ "grpc_asyncio",
+ "false",
+ ),
+ ],
+)
+@mock.patch.object(
+ ContainerAnalysisClient,
+ "DEFAULT_ENDPOINT",
+ modify_default_endpoint(ContainerAnalysisClient),
+)
+@mock.patch.object(
+ ContainerAnalysisAsyncClient,
+ "DEFAULT_ENDPOINT",
+ modify_default_endpoint(ContainerAnalysisAsyncClient),
+)
+@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"})
+def test_container_analysis_client_mtls_env_auto(
+ client_class, transport_class, transport_name, use_client_cert_env
+):
+ # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default
+ # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists.
+
+ # Check the case client_cert_source is provided. Whether client cert is used depends on
+ # GOOGLE_API_USE_CLIENT_CERTIFICATE value.
+ with mock.patch.dict(
+ os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}
+ ):
options = client_options.ClientOptions(
client_cert_source=client_cert_source_callback
)
with mock.patch.object(transport_class, "__init__") as patched:
- patched.return_value = None
- client = client_class(client_options=options)
- patched.assert_called_once_with(
- credentials=None,
- credentials_file=None,
- host=client.DEFAULT_MTLS_ENDPOINT,
- scopes=None,
- api_mtls_endpoint=client.DEFAULT_MTLS_ENDPOINT,
- client_cert_source=client_cert_source_callback,
- quota_project_id=None,
- )
-
- # Check the case api_endpoint is not provided, GOOGLE_API_USE_MTLS is
- # "auto", and default_client_cert_source is provided.
- with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS": "auto"}):
- with mock.patch.object(transport_class, "__init__") as patched:
+ ssl_channel_creds = mock.Mock()
with mock.patch(
- "google.auth.transport.mtls.has_default_client_cert_source",
- return_value=True,
+ "grpc.ssl_channel_credentials", return_value=ssl_channel_creds
):
patched.return_value = None
- client = client_class()
+ client = client_class(client_options=options)
+
+ if use_client_cert_env == "false":
+ expected_ssl_channel_creds = None
+ expected_host = client.DEFAULT_ENDPOINT
+ else:
+ expected_ssl_channel_creds = ssl_channel_creds
+ expected_host = client.DEFAULT_MTLS_ENDPOINT
+
patched.assert_called_once_with(
credentials=None,
credentials_file=None,
- host=client.DEFAULT_MTLS_ENDPOINT,
+ host=expected_host,
scopes=None,
- api_mtls_endpoint=client.DEFAULT_MTLS_ENDPOINT,
- client_cert_source=None,
+ ssl_channel_credentials=expected_ssl_channel_creds,
quota_project_id=None,
+ client_info=transports.base.DEFAULT_CLIENT_INFO,
)
- # Check the case api_endpoint is not provided, GOOGLE_API_USE_MTLS is
- # "auto", but client_cert_source and default_client_cert_source are None.
- with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS": "auto"}):
+ # Check the case ADC client cert is provided. Whether client cert is used depends on
+ # GOOGLE_API_USE_CLIENT_CERTIFICATE value.
+ with mock.patch.dict(
+ os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}
+ ):
with mock.patch.object(transport_class, "__init__") as patched:
with mock.patch(
- "google.auth.transport.mtls.has_default_client_cert_source",
- return_value=False,
+ "google.auth.transport.grpc.SslCredentials.__init__", return_value=None
):
- patched.return_value = None
- client = client_class()
- patched.assert_called_once_with(
- credentials=None,
- credentials_file=None,
- host=client.DEFAULT_ENDPOINT,
- scopes=None,
- api_mtls_endpoint=client.DEFAULT_ENDPOINT,
- client_cert_source=None,
- quota_project_id=None,
- )
-
- # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS has
- # unsupported value.
- with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS": "Unsupported"}):
- with pytest.raises(MutualTLSChannelError):
- client = client_class()
-
- # Check the case quota_project_id is provided
- options = client_options.ClientOptions(quota_project_id="octopus")
- with mock.patch.object(transport_class, "__init__") as patched:
- patched.return_value = None
- client = client_class(client_options=options)
- patched.assert_called_once_with(
- credentials=None,
- credentials_file=None,
- host=client.DEFAULT_ENDPOINT,
- scopes=None,
- api_mtls_endpoint=client.DEFAULT_ENDPOINT,
- client_cert_source=None,
- quota_project_id="octopus",
- )
+ with mock.patch(
+ "google.auth.transport.grpc.SslCredentials.is_mtls",
+ new_callable=mock.PropertyMock,
+ ) as is_mtls_mock:
+ with mock.patch(
+ "google.auth.transport.grpc.SslCredentials.ssl_credentials",
+ new_callable=mock.PropertyMock,
+ ) as ssl_credentials_mock:
+ if use_client_cert_env == "false":
+ is_mtls_mock.return_value = False
+ ssl_credentials_mock.return_value = None
+ expected_host = client.DEFAULT_ENDPOINT
+ expected_ssl_channel_creds = None
+ else:
+ is_mtls_mock.return_value = True
+ ssl_credentials_mock.return_value = mock.Mock()
+ expected_host = client.DEFAULT_MTLS_ENDPOINT
+ expected_ssl_channel_creds = (
+ ssl_credentials_mock.return_value
+ )
+
+ patched.return_value = None
+ client = client_class()
+ patched.assert_called_once_with(
+ credentials=None,
+ credentials_file=None,
+ host=expected_host,
+ scopes=None,
+ ssl_channel_credentials=expected_ssl_channel_creds,
+ quota_project_id=None,
+ client_info=transports.base.DEFAULT_CLIENT_INFO,
+ )
+
+ # Check the case client_cert_source and ADC client cert are not provided.
+ with mock.patch.dict(
+ os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}
+ ):
+ with mock.patch.object(transport_class, "__init__") as patched:
+ with mock.patch(
+ "google.auth.transport.grpc.SslCredentials.__init__", return_value=None
+ ):
+ with mock.patch(
+ "google.auth.transport.grpc.SslCredentials.is_mtls",
+ new_callable=mock.PropertyMock,
+ ) as is_mtls_mock:
+ is_mtls_mock.return_value = False
+ patched.return_value = None
+ client = client_class()
+ patched.assert_called_once_with(
+ credentials=None,
+ credentials_file=None,
+ host=client.DEFAULT_ENDPOINT,
+ scopes=None,
+ ssl_channel_credentials=None,
+ quota_project_id=None,
+ client_info=transports.base.DEFAULT_CLIENT_INFO,
+ )
@pytest.mark.parametrize(
@@ -305,9 +402,9 @@ def test_container_analysis_client_client_options_scopes(
credentials_file=None,
host=client.DEFAULT_ENDPOINT,
scopes=["1", "2"],
- api_mtls_endpoint=client.DEFAULT_ENDPOINT,
- client_cert_source=None,
+ ssl_channel_credentials=None,
quota_project_id=None,
+ client_info=transports.base.DEFAULT_CLIENT_INFO,
)
@@ -335,9 +432,9 @@ def test_container_analysis_client_client_options_credentials_file(
credentials_file="credentials.json",
host=client.DEFAULT_ENDPOINT,
scopes=None,
- api_mtls_endpoint=client.DEFAULT_ENDPOINT,
- client_cert_source=None,
+ ssl_channel_credentials=None,
quota_project_id=None,
+ client_info=transports.base.DEFAULT_CLIENT_INFO,
)
@@ -354,9 +451,9 @@ def test_container_analysis_client_client_options_from_dict():
credentials_file=None,
host="squid.clam.whelk",
scopes=None,
- api_mtls_endpoint="squid.clam.whelk",
- client_cert_source=None,
+ ssl_channel_credentials=None,
quota_project_id=None,
+ client_info=transports.base.DEFAULT_CLIENT_INFO,
)
@@ -372,7 +469,7 @@ def test_set_iam_policy(
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(type(client._transport.set_iam_policy), "__call__") as call:
+ with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = policy.Policy(version=774, etag=b"etag_blob",)
@@ -385,6 +482,7 @@ def test_set_iam_policy(
assert args[0] == iam_policy.SetIamPolicyRequest()
# Establish that the response is the type that we expect.
+
assert isinstance(response, policy.Policy)
assert response.version == 774
@@ -397,19 +495,19 @@ def test_set_iam_policy_from_dict():
@pytest.mark.asyncio
-async def test_set_iam_policy_async(transport: str = "grpc_asyncio"):
+async def test_set_iam_policy_async(
+ transport: str = "grpc_asyncio", request_type=iam_policy.SetIamPolicyRequest
+):
client = ContainerAnalysisAsyncClient(
credentials=credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
- request = iam_policy.SetIamPolicyRequest()
+ request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(
- type(client._client._transport.set_iam_policy), "__call__"
- ) as call:
+ with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
policy.Policy(version=774, etag=b"etag_blob",)
@@ -421,7 +519,7 @@ async def test_set_iam_policy_async(transport: str = "grpc_asyncio"):
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
- assert args[0] == request
+ assert args[0] == iam_policy.SetIamPolicyRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, policy.Policy)
@@ -431,6 +529,11 @@ async def test_set_iam_policy_async(transport: str = "grpc_asyncio"):
assert response.etag == b"etag_blob"
+@pytest.mark.asyncio
+async def test_set_iam_policy_async_from_dict():
+ await test_set_iam_policy_async(request_type=dict)
+
+
def test_set_iam_policy_field_headers():
client = ContainerAnalysisClient(credentials=credentials.AnonymousCredentials(),)
@@ -440,7 +543,7 @@ def test_set_iam_policy_field_headers():
request.resource = "resource/value"
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(type(client._transport.set_iam_policy), "__call__") as call:
+ with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call:
call.return_value = policy.Policy()
client.set_iam_policy(request)
@@ -467,9 +570,7 @@ async def test_set_iam_policy_field_headers_async():
request.resource = "resource/value"
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(
- type(client._client._transport.set_iam_policy), "__call__"
- ) as call:
+ with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy.Policy())
await client.set_iam_policy(request)
@@ -484,10 +585,10 @@ async def test_set_iam_policy_field_headers_async():
assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"]
-def test_set_iam_policy_from_dict():
+def test_set_iam_policy_from_dict_foreign():
client = ContainerAnalysisClient(credentials=credentials.AnonymousCredentials(),)
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(type(client._transport.set_iam_policy), "__call__") as call:
+ with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = policy.Policy()
@@ -504,7 +605,7 @@ def test_set_iam_policy_flattened():
client = ContainerAnalysisClient(credentials=credentials.AnonymousCredentials(),)
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(type(client._transport.set_iam_policy), "__call__") as call:
+ with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = policy.Policy()
@@ -538,9 +639,7 @@ async def test_set_iam_policy_flattened_async():
)
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(
- type(client._client._transport.set_iam_policy), "__call__"
- ) as call:
+ with mock.patch.object(type(client.transport.set_iam_policy), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = policy.Policy()
@@ -583,7 +682,7 @@ def test_get_iam_policy(
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(type(client._transport.get_iam_policy), "__call__") as call:
+ with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = policy.Policy(version=774, etag=b"etag_blob",)
@@ -596,6 +695,7 @@ def test_get_iam_policy(
assert args[0] == iam_policy.GetIamPolicyRequest()
# Establish that the response is the type that we expect.
+
assert isinstance(response, policy.Policy)
assert response.version == 774
@@ -608,19 +708,19 @@ def test_get_iam_policy_from_dict():
@pytest.mark.asyncio
-async def test_get_iam_policy_async(transport: str = "grpc_asyncio"):
+async def test_get_iam_policy_async(
+ transport: str = "grpc_asyncio", request_type=iam_policy.GetIamPolicyRequest
+):
client = ContainerAnalysisAsyncClient(
credentials=credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
- request = iam_policy.GetIamPolicyRequest()
+ request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(
- type(client._client._transport.get_iam_policy), "__call__"
- ) as call:
+ with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
policy.Policy(version=774, etag=b"etag_blob",)
@@ -632,7 +732,7 @@ async def test_get_iam_policy_async(transport: str = "grpc_asyncio"):
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
- assert args[0] == request
+ assert args[0] == iam_policy.GetIamPolicyRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, policy.Policy)
@@ -642,6 +742,11 @@ async def test_get_iam_policy_async(transport: str = "grpc_asyncio"):
assert response.etag == b"etag_blob"
+@pytest.mark.asyncio
+async def test_get_iam_policy_async_from_dict():
+ await test_get_iam_policy_async(request_type=dict)
+
+
def test_get_iam_policy_field_headers():
client = ContainerAnalysisClient(credentials=credentials.AnonymousCredentials(),)
@@ -651,7 +756,7 @@ def test_get_iam_policy_field_headers():
request.resource = "resource/value"
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(type(client._transport.get_iam_policy), "__call__") as call:
+ with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call:
call.return_value = policy.Policy()
client.get_iam_policy(request)
@@ -678,9 +783,7 @@ async def test_get_iam_policy_field_headers_async():
request.resource = "resource/value"
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(
- type(client._client._transport.get_iam_policy), "__call__"
- ) as call:
+ with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(policy.Policy())
await client.get_iam_policy(request)
@@ -695,10 +798,10 @@ async def test_get_iam_policy_field_headers_async():
assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"]
-def test_get_iam_policy_from_dict():
+def test_get_iam_policy_from_dict_foreign():
client = ContainerAnalysisClient(credentials=credentials.AnonymousCredentials(),)
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(type(client._transport.get_iam_policy), "__call__") as call:
+ with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = policy.Policy()
@@ -715,7 +818,7 @@ def test_get_iam_policy_flattened():
client = ContainerAnalysisClient(credentials=credentials.AnonymousCredentials(),)
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(type(client._transport.get_iam_policy), "__call__") as call:
+ with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = policy.Policy()
@@ -749,9 +852,7 @@ async def test_get_iam_policy_flattened_async():
)
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(
- type(client._client._transport.get_iam_policy), "__call__"
- ) as call:
+ with mock.patch.object(type(client.transport.get_iam_policy), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = policy.Policy()
@@ -795,7 +896,7 @@ def test_test_iam_permissions(
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._transport.test_iam_permissions), "__call__"
+ type(client.transport.test_iam_permissions), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = iam_policy.TestIamPermissionsResponse(
@@ -811,6 +912,7 @@ def test_test_iam_permissions(
assert args[0] == iam_policy.TestIamPermissionsRequest()
# Establish that the response is the type that we expect.
+
assert isinstance(response, iam_policy.TestIamPermissionsResponse)
assert response.permissions == ["permissions_value"]
@@ -821,18 +923,20 @@ def test_test_iam_permissions_from_dict():
@pytest.mark.asyncio
-async def test_test_iam_permissions_async(transport: str = "grpc_asyncio"):
+async def test_test_iam_permissions_async(
+ transport: str = "grpc_asyncio", request_type=iam_policy.TestIamPermissionsRequest
+):
client = ContainerAnalysisAsyncClient(
credentials=credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
- request = iam_policy.TestIamPermissionsRequest()
+ request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._client._transport.test_iam_permissions), "__call__"
+ type(client.transport.test_iam_permissions), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
@@ -845,7 +949,7 @@ async def test_test_iam_permissions_async(transport: str = "grpc_asyncio"):
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
- assert args[0] == request
+ assert args[0] == iam_policy.TestIamPermissionsRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, iam_policy.TestIamPermissionsResponse)
@@ -853,6 +957,11 @@ async def test_test_iam_permissions_async(transport: str = "grpc_asyncio"):
assert response.permissions == ["permissions_value"]
+@pytest.mark.asyncio
+async def test_test_iam_permissions_async_from_dict():
+ await test_test_iam_permissions_async(request_type=dict)
+
+
def test_test_iam_permissions_field_headers():
client = ContainerAnalysisClient(credentials=credentials.AnonymousCredentials(),)
@@ -863,7 +972,7 @@ def test_test_iam_permissions_field_headers():
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._transport.test_iam_permissions), "__call__"
+ type(client.transport.test_iam_permissions), "__call__"
) as call:
call.return_value = iam_policy.TestIamPermissionsResponse()
@@ -892,7 +1001,7 @@ async def test_test_iam_permissions_field_headers_async():
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._client._transport.test_iam_permissions), "__call__"
+ type(client.transport.test_iam_permissions), "__call__"
) as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
iam_policy.TestIamPermissionsResponse()
@@ -910,11 +1019,11 @@ async def test_test_iam_permissions_field_headers_async():
assert ("x-goog-request-params", "resource=resource/value",) in kw["metadata"]
-def test_test_iam_permissions_from_dict():
+def test_test_iam_permissions_from_dict_foreign():
client = ContainerAnalysisClient(credentials=credentials.AnonymousCredentials(),)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._transport.test_iam_permissions), "__call__"
+ type(client.transport.test_iam_permissions), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = iam_policy.TestIamPermissionsResponse()
@@ -933,7 +1042,7 @@ def test_test_iam_permissions_flattened():
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._transport.test_iam_permissions), "__call__"
+ type(client.transport.test_iam_permissions), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = iam_policy.TestIamPermissionsResponse()
@@ -975,7 +1084,7 @@ async def test_test_iam_permissions_flattened_async():
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._client._transport.test_iam_permissions), "__call__"
+ type(client.transport.test_iam_permissions), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = iam_policy.TestIamPermissionsResponse()
@@ -1015,6 +1124,225 @@ async def test_test_iam_permissions_flattened_error_async():
)
+def test_get_vulnerability_occurrences_summary(
+ transport: str = "grpc",
+ request_type=containeranalysis.GetVulnerabilityOccurrencesSummaryRequest,
+):
+ client = ContainerAnalysisClient(
+ credentials=credentials.AnonymousCredentials(), transport=transport,
+ )
+
+ # Everything is optional in proto3 as far as the runtime is concerned,
+ # and we are mocking out the actual API, so just send an empty request.
+ request = request_type()
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client.transport.get_vulnerability_occurrences_summary), "__call__"
+ ) as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = containeranalysis.VulnerabilityOccurrencesSummary()
+
+ response = client.get_vulnerability_occurrences_summary(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls) == 1
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0] == containeranalysis.GetVulnerabilityOccurrencesSummaryRequest()
+
+ # Establish that the response is the type that we expect.
+
+ assert isinstance(response, containeranalysis.VulnerabilityOccurrencesSummary)
+
+
+def test_get_vulnerability_occurrences_summary_from_dict():
+ test_get_vulnerability_occurrences_summary(request_type=dict)
+
+
+@pytest.mark.asyncio
+async def test_get_vulnerability_occurrences_summary_async(
+ transport: str = "grpc_asyncio",
+ request_type=containeranalysis.GetVulnerabilityOccurrencesSummaryRequest,
+):
+ client = ContainerAnalysisAsyncClient(
+ credentials=credentials.AnonymousCredentials(), transport=transport,
+ )
+
+ # Everything is optional in proto3 as far as the runtime is concerned,
+ # and we are mocking out the actual API, so just send an empty request.
+ request = request_type()
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client.transport.get_vulnerability_occurrences_summary), "__call__"
+ ) as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
+ containeranalysis.VulnerabilityOccurrencesSummary()
+ )
+
+ response = await client.get_vulnerability_occurrences_summary(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls)
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0] == containeranalysis.GetVulnerabilityOccurrencesSummaryRequest()
+
+ # Establish that the response is the type that we expect.
+ assert isinstance(response, containeranalysis.VulnerabilityOccurrencesSummary)
+
+
+@pytest.mark.asyncio
+async def test_get_vulnerability_occurrences_summary_async_from_dict():
+ await test_get_vulnerability_occurrences_summary_async(request_type=dict)
+
+
+def test_get_vulnerability_occurrences_summary_field_headers():
+ client = ContainerAnalysisClient(credentials=credentials.AnonymousCredentials(),)
+
+ # Any value that is part of the HTTP/1.1 URI should be sent as
+ # a field header. Set these to a non-empty value.
+ request = containeranalysis.GetVulnerabilityOccurrencesSummaryRequest()
+ request.parent = "parent/value"
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client.transport.get_vulnerability_occurrences_summary), "__call__"
+ ) as call:
+ call.return_value = containeranalysis.VulnerabilityOccurrencesSummary()
+
+ client.get_vulnerability_occurrences_summary(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls) == 1
+ _, args, _ = call.mock_calls[0]
+ assert args[0] == request
+
+ # Establish that the field header was sent.
+ _, _, kw = call.mock_calls[0]
+ assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"]
+
+
+@pytest.mark.asyncio
+async def test_get_vulnerability_occurrences_summary_field_headers_async():
+ client = ContainerAnalysisAsyncClient(
+ credentials=credentials.AnonymousCredentials(),
+ )
+
+ # Any value that is part of the HTTP/1.1 URI should be sent as
+ # a field header. Set these to a non-empty value.
+ request = containeranalysis.GetVulnerabilityOccurrencesSummaryRequest()
+ request.parent = "parent/value"
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client.transport.get_vulnerability_occurrences_summary), "__call__"
+ ) as call:
+ call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
+ containeranalysis.VulnerabilityOccurrencesSummary()
+ )
+
+ await client.get_vulnerability_occurrences_summary(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls)
+ _, args, _ = call.mock_calls[0]
+ assert args[0] == request
+
+ # Establish that the field header was sent.
+ _, _, kw = call.mock_calls[0]
+ assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"]
+
+
+def test_get_vulnerability_occurrences_summary_flattened():
+ client = ContainerAnalysisClient(credentials=credentials.AnonymousCredentials(),)
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client.transport.get_vulnerability_occurrences_summary), "__call__"
+ ) as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = containeranalysis.VulnerabilityOccurrencesSummary()
+
+ # Call the method with a truthy value for each flattened field,
+ # using the keyword arguments to the method.
+ client.get_vulnerability_occurrences_summary(
+ parent="parent_value", filter="filter_value",
+ )
+
+ # Establish that the underlying call was made with the expected
+ # request object values.
+ assert len(call.mock_calls) == 1
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0].parent == "parent_value"
+
+ assert args[0].filter == "filter_value"
+
+
+def test_get_vulnerability_occurrences_summary_flattened_error():
+ client = ContainerAnalysisClient(credentials=credentials.AnonymousCredentials(),)
+
+ # Attempting to call a method with both a request object and flattened
+ # fields is an error.
+ with pytest.raises(ValueError):
+ client.get_vulnerability_occurrences_summary(
+ containeranalysis.GetVulnerabilityOccurrencesSummaryRequest(),
+ parent="parent_value",
+ filter="filter_value",
+ )
+
+
+@pytest.mark.asyncio
+async def test_get_vulnerability_occurrences_summary_flattened_async():
+ client = ContainerAnalysisAsyncClient(
+ credentials=credentials.AnonymousCredentials(),
+ )
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client.transport.get_vulnerability_occurrences_summary), "__call__"
+ ) as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = containeranalysis.VulnerabilityOccurrencesSummary()
+
+ call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
+ containeranalysis.VulnerabilityOccurrencesSummary()
+ )
+ # Call the method with a truthy value for each flattened field,
+ # using the keyword arguments to the method.
+ response = await client.get_vulnerability_occurrences_summary(
+ parent="parent_value", filter="filter_value",
+ )
+
+ # Establish that the underlying call was made with the expected
+ # request object values.
+ assert len(call.mock_calls)
+ _, args, _ = call.mock_calls[0]
+
+ assert args[0].parent == "parent_value"
+
+ assert args[0].filter == "filter_value"
+
+
+@pytest.mark.asyncio
+async def test_get_vulnerability_occurrences_summary_flattened_error_async():
+ client = ContainerAnalysisAsyncClient(
+ credentials=credentials.AnonymousCredentials(),
+ )
+
+ # Attempting to call a method with both a request object and flattened
+ # fields is an error.
+ with pytest.raises(ValueError):
+ await client.get_vulnerability_occurrences_summary(
+ containeranalysis.GetVulnerabilityOccurrencesSummaryRequest(),
+ parent="parent_value",
+ filter="filter_value",
+ )
+
+
def test_credentials_transport_error():
# It is an error to provide credentials and a transport instance.
transport = transports.ContainerAnalysisGrpcTransport(
@@ -1051,7 +1379,7 @@ def test_transport_instance():
credentials=credentials.AnonymousCredentials(),
)
client = ContainerAnalysisClient(transport=transport)
- assert client._transport is transport
+ assert client.transport is transport
def test_transport_get_channel():
@@ -1069,10 +1397,25 @@ def test_transport_get_channel():
assert channel
+@pytest.mark.parametrize(
+ "transport_class",
+ [
+ transports.ContainerAnalysisGrpcTransport,
+ transports.ContainerAnalysisGrpcAsyncIOTransport,
+ ],
+)
+def test_transport_adc(transport_class):
+ # Test default credentials are used if not provided.
+ with mock.patch.object(auth, "default") as adc:
+ adc.return_value = (credentials.AnonymousCredentials(), None)
+ transport_class()
+ adc.assert_called_once()
+
+
def test_transport_grpc_default():
# A client should use the gRPC transport by default.
client = ContainerAnalysisClient(credentials=credentials.AnonymousCredentials(),)
- assert isinstance(client._transport, transports.ContainerAnalysisGrpcTransport,)
+ assert isinstance(client.transport, transports.ContainerAnalysisGrpcTransport,)
def test_container_analysis_base_transport_error():
@@ -1100,6 +1443,7 @@ def test_container_analysis_base_transport():
"set_iam_policy",
"get_iam_policy",
"test_iam_permissions",
+ "get_vulnerability_occurrences_summary",
)
for method in methods:
with pytest.raises(NotImplementedError):
@@ -1125,6 +1469,17 @@ def test_container_analysis_base_transport_with_credentials_file():
)
+def test_container_analysis_base_transport_with_adc():
+ # Test the default credentials are used if credentials and credentials_file are None.
+ with mock.patch.object(auth, "default") as adc, mock.patch(
+ "google.cloud.devtools.containeranalysis_v1.services.container_analysis.transports.ContainerAnalysisTransport._prep_wrapped_messages"
+ ) as Transport:
+ Transport.return_value = None
+ adc.return_value = (credentials.AnonymousCredentials(), None)
+ transport = transports.ContainerAnalysisTransport()
+ adc.assert_called_once()
+
+
def test_container_analysis_auth_adc():
# If no credentials are provided, we should use ADC credentials.
with mock.patch.object(auth, "default") as adc:
@@ -1157,7 +1512,7 @@ def test_container_analysis_host_no_port():
api_endpoint="containeranalysis.googleapis.com"
),
)
- assert client._transport._host == "containeranalysis.googleapis.com:443"
+ assert client.transport._host == "containeranalysis.googleapis.com:443"
def test_container_analysis_host_with_port():
@@ -1167,182 +1522,238 @@ def test_container_analysis_host_with_port():
api_endpoint="containeranalysis.googleapis.com:8000"
),
)
- assert client._transport._host == "containeranalysis.googleapis.com:8000"
+ assert client.transport._host == "containeranalysis.googleapis.com:8000"
def test_container_analysis_grpc_transport_channel():
channel = grpc.insecure_channel("http://localhost/")
- # Check that if channel is provided, mtls endpoint and client_cert_source
- # won't be used.
- callback = mock.MagicMock()
+ # Check that channel is used if provided.
transport = transports.ContainerAnalysisGrpcTransport(
- host="squid.clam.whelk",
- channel=channel,
- api_mtls_endpoint="mtls.squid.clam.whelk",
- client_cert_source=callback,
+ host="squid.clam.whelk", channel=channel,
)
assert transport.grpc_channel == channel
assert transport._host == "squid.clam.whelk:443"
- assert not callback.called
+ assert transport._ssl_channel_credentials == None
def test_container_analysis_grpc_asyncio_transport_channel():
channel = aio.insecure_channel("http://localhost/")
- # Check that if channel is provided, mtls endpoint and client_cert_source
- # won't be used.
- callback = mock.MagicMock()
+ # Check that channel is used if provided.
transport = transports.ContainerAnalysisGrpcAsyncIOTransport(
- host="squid.clam.whelk",
- channel=channel,
- api_mtls_endpoint="mtls.squid.clam.whelk",
- client_cert_source=callback,
+ host="squid.clam.whelk", channel=channel,
)
assert transport.grpc_channel == channel
assert transport._host == "squid.clam.whelk:443"
- assert not callback.called
+ assert transport._ssl_channel_credentials == None
-@mock.patch("grpc.ssl_channel_credentials", autospec=True)
-@mock.patch("google.api_core.grpc_helpers.create_channel", autospec=True)
-def test_container_analysis_grpc_transport_channel_mtls_with_client_cert_source(
- grpc_create_channel, grpc_ssl_channel_cred
+@pytest.mark.parametrize(
+ "transport_class",
+ [
+ transports.ContainerAnalysisGrpcTransport,
+ transports.ContainerAnalysisGrpcAsyncIOTransport,
+ ],
+)
+def test_container_analysis_transport_channel_mtls_with_client_cert_source(
+ transport_class,
):
- # Check that if channel is None, but api_mtls_endpoint and client_cert_source
- # are provided, then a mTLS channel will be created.
- mock_cred = mock.Mock()
+ with mock.patch(
+ "grpc.ssl_channel_credentials", autospec=True
+ ) as grpc_ssl_channel_cred:
+ with mock.patch.object(
+ transport_class, "create_channel", autospec=True
+ ) as grpc_create_channel:
+ mock_ssl_cred = mock.Mock()
+ grpc_ssl_channel_cred.return_value = mock_ssl_cred
+
+ mock_grpc_channel = mock.Mock()
+ grpc_create_channel.return_value = mock_grpc_channel
+
+ cred = credentials.AnonymousCredentials()
+ with pytest.warns(DeprecationWarning):
+ with mock.patch.object(auth, "default") as adc:
+ adc.return_value = (cred, None)
+ transport = transport_class(
+ host="squid.clam.whelk",
+ api_mtls_endpoint="mtls.squid.clam.whelk",
+ client_cert_source=client_cert_source_callback,
+ )
+ adc.assert_called_once()
+
+ grpc_ssl_channel_cred.assert_called_once_with(
+ certificate_chain=b"cert bytes", private_key=b"key bytes"
+ )
+ grpc_create_channel.assert_called_once_with(
+ "mtls.squid.clam.whelk:443",
+ credentials=cred,
+ credentials_file=None,
+ scopes=("https://www.googleapis.com/auth/cloud-platform",),
+ ssl_credentials=mock_ssl_cred,
+ quota_project_id=None,
+ )
+ assert transport.grpc_channel == mock_grpc_channel
+ assert transport._ssl_channel_credentials == mock_ssl_cred
+
+@pytest.mark.parametrize(
+ "transport_class",
+ [
+ transports.ContainerAnalysisGrpcTransport,
+ transports.ContainerAnalysisGrpcAsyncIOTransport,
+ ],
+)
+def test_container_analysis_transport_channel_mtls_with_adc(transport_class):
mock_ssl_cred = mock.Mock()
- grpc_ssl_channel_cred.return_value = mock_ssl_cred
+ with mock.patch.multiple(
+ "google.auth.transport.grpc.SslCredentials",
+ __init__=mock.Mock(return_value=None),
+ ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred),
+ ):
+ with mock.patch.object(
+ transport_class, "create_channel", autospec=True
+ ) as grpc_create_channel:
+ mock_grpc_channel = mock.Mock()
+ grpc_create_channel.return_value = mock_grpc_channel
+ mock_cred = mock.Mock()
+
+ with pytest.warns(DeprecationWarning):
+ transport = transport_class(
+ host="squid.clam.whelk",
+ credentials=mock_cred,
+ api_mtls_endpoint="mtls.squid.clam.whelk",
+ client_cert_source=None,
+ )
- mock_grpc_channel = mock.Mock()
- grpc_create_channel.return_value = mock_grpc_channel
+ grpc_create_channel.assert_called_once_with(
+ "mtls.squid.clam.whelk:443",
+ credentials=mock_cred,
+ credentials_file=None,
+ scopes=("https://www.googleapis.com/auth/cloud-platform",),
+ ssl_credentials=mock_ssl_cred,
+ quota_project_id=None,
+ )
+ assert transport.grpc_channel == mock_grpc_channel
- transport = transports.ContainerAnalysisGrpcTransport(
- host="squid.clam.whelk",
- credentials=mock_cred,
- api_mtls_endpoint="mtls.squid.clam.whelk",
- client_cert_source=client_cert_source_callback,
- )
- grpc_ssl_channel_cred.assert_called_once_with(
- certificate_chain=b"cert bytes", private_key=b"key bytes"
- )
- grpc_create_channel.assert_called_once_with(
- "mtls.squid.clam.whelk:443",
- credentials=mock_cred,
- credentials_file=None,
- scopes=("https://www.googleapis.com/auth/cloud-platform",),
- ssl_credentials=mock_ssl_cred,
- quota_project_id=None,
+
+def test_common_billing_account_path():
+ billing_account = "squid"
+
+ expected = "billingAccounts/{billing_account}".format(
+ billing_account=billing_account,
)
- assert transport.grpc_channel == mock_grpc_channel
+ actual = ContainerAnalysisClient.common_billing_account_path(billing_account)
+ assert expected == actual
-@mock.patch("grpc.ssl_channel_credentials", autospec=True)
-@mock.patch("google.api_core.grpc_helpers_async.create_channel", autospec=True)
-def test_container_analysis_grpc_asyncio_transport_channel_mtls_with_client_cert_source(
- grpc_create_channel, grpc_ssl_channel_cred
-):
- # Check that if channel is None, but api_mtls_endpoint and client_cert_source
- # are provided, then a mTLS channel will be created.
- mock_cred = mock.Mock()
+def test_parse_common_billing_account_path():
+ expected = {
+ "billing_account": "clam",
+ }
+ path = ContainerAnalysisClient.common_billing_account_path(**expected)
- mock_ssl_cred = mock.Mock()
- grpc_ssl_channel_cred.return_value = mock_ssl_cred
+ # Check that the path construction is reversible.
+ actual = ContainerAnalysisClient.parse_common_billing_account_path(path)
+ assert expected == actual
- mock_grpc_channel = mock.Mock()
- grpc_create_channel.return_value = mock_grpc_channel
- transport = transports.ContainerAnalysisGrpcAsyncIOTransport(
- host="squid.clam.whelk",
- credentials=mock_cred,
- api_mtls_endpoint="mtls.squid.clam.whelk",
- client_cert_source=client_cert_source_callback,
- )
- grpc_ssl_channel_cred.assert_called_once_with(
- certificate_chain=b"cert bytes", private_key=b"key bytes"
- )
- grpc_create_channel.assert_called_once_with(
- "mtls.squid.clam.whelk:443",
- credentials=mock_cred,
- credentials_file=None,
- scopes=("https://www.googleapis.com/auth/cloud-platform",),
- ssl_credentials=mock_ssl_cred,
- quota_project_id=None,
+def test_common_folder_path():
+ folder = "whelk"
+
+ expected = "folders/{folder}".format(folder=folder,)
+ actual = ContainerAnalysisClient.common_folder_path(folder)
+ assert expected == actual
+
+
+def test_parse_common_folder_path():
+ expected = {
+ "folder": "octopus",
+ }
+ path = ContainerAnalysisClient.common_folder_path(**expected)
+
+ # Check that the path construction is reversible.
+ actual = ContainerAnalysisClient.parse_common_folder_path(path)
+ assert expected == actual
+
+
+def test_common_organization_path():
+ organization = "oyster"
+
+ expected = "organizations/{organization}".format(organization=organization,)
+ actual = ContainerAnalysisClient.common_organization_path(organization)
+ assert expected == actual
+
+
+def test_parse_common_organization_path():
+ expected = {
+ "organization": "nudibranch",
+ }
+ path = ContainerAnalysisClient.common_organization_path(**expected)
+
+ # Check that the path construction is reversible.
+ actual = ContainerAnalysisClient.parse_common_organization_path(path)
+ assert expected == actual
+
+
+def test_common_project_path():
+ project = "cuttlefish"
+
+ expected = "projects/{project}".format(project=project,)
+ actual = ContainerAnalysisClient.common_project_path(project)
+ assert expected == actual
+
+
+def test_parse_common_project_path():
+ expected = {
+ "project": "mussel",
+ }
+ path = ContainerAnalysisClient.common_project_path(**expected)
+
+ # Check that the path construction is reversible.
+ actual = ContainerAnalysisClient.parse_common_project_path(path)
+ assert expected == actual
+
+
+def test_common_location_path():
+ project = "winkle"
+ location = "nautilus"
+
+ expected = "projects/{project}/locations/{location}".format(
+ project=project, location=location,
)
- assert transport.grpc_channel == mock_grpc_channel
+ actual = ContainerAnalysisClient.common_location_path(project, location)
+ assert expected == actual
-@pytest.mark.parametrize(
- "api_mtls_endpoint", ["mtls.squid.clam.whelk", "mtls.squid.clam.whelk:443"]
-)
-@mock.patch("google.api_core.grpc_helpers.create_channel", autospec=True)
-def test_container_analysis_grpc_transport_channel_mtls_with_adc(
- grpc_create_channel, api_mtls_endpoint
-):
- # Check that if channel and client_cert_source are None, but api_mtls_endpoint
- # is provided, then a mTLS channel will be created with SSL ADC.
- mock_grpc_channel = mock.Mock()
- grpc_create_channel.return_value = mock_grpc_channel
+def test_parse_common_location_path():
+ expected = {
+ "project": "scallop",
+ "location": "abalone",
+ }
+ path = ContainerAnalysisClient.common_location_path(**expected)
- # Mock google.auth.transport.grpc.SslCredentials class.
- mock_ssl_cred = mock.Mock()
- with mock.patch.multiple(
- "google.auth.transport.grpc.SslCredentials",
- __init__=mock.Mock(return_value=None),
- ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred),
- ):
- mock_cred = mock.Mock()
- transport = transports.ContainerAnalysisGrpcTransport(
- host="squid.clam.whelk",
- credentials=mock_cred,
- api_mtls_endpoint=api_mtls_endpoint,
- client_cert_source=None,
- )
- grpc_create_channel.assert_called_once_with(
- "mtls.squid.clam.whelk:443",
- credentials=mock_cred,
- credentials_file=None,
- scopes=("https://www.googleapis.com/auth/cloud-platform",),
- ssl_credentials=mock_ssl_cred,
- quota_project_id=None,
- )
- assert transport.grpc_channel == mock_grpc_channel
+ # Check that the path construction is reversible.
+ actual = ContainerAnalysisClient.parse_common_location_path(path)
+ assert expected == actual
-@pytest.mark.parametrize(
- "api_mtls_endpoint", ["mtls.squid.clam.whelk", "mtls.squid.clam.whelk:443"]
-)
-@mock.patch("google.api_core.grpc_helpers_async.create_channel", autospec=True)
-def test_container_analysis_grpc_asyncio_transport_channel_mtls_with_adc(
- grpc_create_channel, api_mtls_endpoint
-):
- # Check that if channel and client_cert_source are None, but api_mtls_endpoint
- # is provided, then a mTLS channel will be created with SSL ADC.
- mock_grpc_channel = mock.Mock()
- grpc_create_channel.return_value = mock_grpc_channel
+def test_client_withDEFAULT_CLIENT_INFO():
+ client_info = gapic_v1.client_info.ClientInfo()
- # Mock google.auth.transport.grpc.SslCredentials class.
- mock_ssl_cred = mock.Mock()
- with mock.patch.multiple(
- "google.auth.transport.grpc.SslCredentials",
- __init__=mock.Mock(return_value=None),
- ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred),
- ):
- mock_cred = mock.Mock()
- transport = transports.ContainerAnalysisGrpcAsyncIOTransport(
- host="squid.clam.whelk",
- credentials=mock_cred,
- api_mtls_endpoint=api_mtls_endpoint,
- client_cert_source=None,
+ with mock.patch.object(
+ transports.ContainerAnalysisTransport, "_prep_wrapped_messages"
+ ) as prep:
+ client = ContainerAnalysisClient(
+ credentials=credentials.AnonymousCredentials(), client_info=client_info,
)
- grpc_create_channel.assert_called_once_with(
- "mtls.squid.clam.whelk:443",
- credentials=mock_cred,
- credentials_file=None,
- scopes=("https://www.googleapis.com/auth/cloud-platform",),
- ssl_credentials=mock_ssl_cred,
- quota_project_id=None,
+ prep.assert_called_once_with(client_info)
+
+ with mock.patch.object(
+ transports.ContainerAnalysisTransport, "_prep_wrapped_messages"
+ ) as prep:
+ transport_class = ContainerAnalysisClient.get_transport_class()
+ transport = transport_class(
+ credentials=credentials.AnonymousCredentials(), client_info=client_info,
)
- assert transport.grpc_channel == mock_grpc_channel
+ prep.assert_called_once_with(client_info)