From 53def25906fb0e312ce4be998538cb5c07cf13d9 Mon Sep 17 00:00:00 2001
From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com>
Date: Thu, 17 Jun 2021 10:52:40 +0000
Subject: [PATCH 1/9] chore: new owl bot post processor docker image (#57)
Post-Processor: gcr.io/repo-automation-bots/owlbot-python:latest@sha256:58c7342b0bccf85028100adaa3d856cb4a871c22ca9c01960d996e66c40548ce
---
.github/.OwlBot.lock.yaml | 2 +-
docs/conf.py | 12 ++++++------
2 files changed, 7 insertions(+), 7 deletions(-)
diff --git a/.github/.OwlBot.lock.yaml b/.github/.OwlBot.lock.yaml
index 4ef4414..ea06d39 100644
--- a/.github/.OwlBot.lock.yaml
+++ b/.github/.OwlBot.lock.yaml
@@ -1,3 +1,3 @@
docker:
image: gcr.io/repo-automation-bots/owlbot-python:latest
- digest: sha256:c66ba3c8d7bc8566f47df841f98cd0097b28fff0b1864c86f5817f4c8c3e8600
\ No newline at end of file
+ digest: sha256:58c7342b0bccf85028100adaa3d856cb4a871c22ca9c01960d996e66c40548ce
diff --git a/docs/conf.py b/docs/conf.py
index 8c7048e..f4b306b 100644
--- a/docs/conf.py
+++ b/docs/conf.py
@@ -80,9 +80,9 @@
master_doc = "index"
# General information about the project.
-project = u"google-cloud-video-transcoder"
-copyright = u"2019, Google"
-author = u"Google APIs"
+project = "google-cloud-video-transcoder"
+copyright = "2019, Google"
+author = "Google APIs"
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
@@ -281,7 +281,7 @@
(
master_doc,
"google-cloud-video-transcoder.tex",
- u"google-cloud-video-transcoder Documentation",
+ "google-cloud-video-transcoder Documentation",
author,
"manual",
)
@@ -316,7 +316,7 @@
(
master_doc,
"google-cloud-video-transcoder",
- u"google-cloud-video-transcoder Documentation",
+ "google-cloud-video-transcoder Documentation",
[author],
1,
)
@@ -335,7 +335,7 @@
(
master_doc,
"google-cloud-video-transcoder",
- u"google-cloud-video-transcoder Documentation",
+ "google-cloud-video-transcoder Documentation",
author,
"google-cloud-video-transcoder",
"google-cloud-video-transcoder Library",
From 1659ce88ef94139a271be9719a4adaf4e3a600c0 Mon Sep 17 00:00:00 2001
From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com>
Date: Sat, 19 Jun 2021 01:38:17 +0000
Subject: [PATCH 2/9] docs: omit mention of Python 2.7 in 'CONTRIBUTING.rst'
(#1127) (#58)
Closes #1126
Source-Link: https://github.com/googleapis/synthtool/commit/b91f129527853d5b756146a0b5044481fb4e09a8
Post-Processor: gcr.io/repo-automation-bots/owlbot-python:latest@sha256:b6169fc6a5207b11800a7c002d0c5c2bc6d82697185ca12e666f44031468cfcd
---
.github/.OwlBot.lock.yaml | 2 +-
CONTRIBUTING.rst | 7 ++-----
2 files changed, 3 insertions(+), 6 deletions(-)
diff --git a/.github/.OwlBot.lock.yaml b/.github/.OwlBot.lock.yaml
index ea06d39..cc49c6a 100644
--- a/.github/.OwlBot.lock.yaml
+++ b/.github/.OwlBot.lock.yaml
@@ -1,3 +1,3 @@
docker:
image: gcr.io/repo-automation-bots/owlbot-python:latest
- digest: sha256:58c7342b0bccf85028100adaa3d856cb4a871c22ca9c01960d996e66c40548ce
+ digest: sha256:b6169fc6a5207b11800a7c002d0c5c2bc6d82697185ca12e666f44031468cfcd
diff --git a/CONTRIBUTING.rst b/CONTRIBUTING.rst
index c1018f7..07970ca 100644
--- a/CONTRIBUTING.rst
+++ b/CONTRIBUTING.rst
@@ -69,7 +69,6 @@ We use `nox `__ to instrument our tests.
- To test your changes, run unit tests with ``nox``::
- $ nox -s unit-2.7
$ nox -s unit-3.8
$ ...
@@ -144,7 +143,6 @@ Running System Tests
# Run all system tests
$ nox -s system-3.8
- $ nox -s system-2.7
# Run a single system test
$ nox -s system-3.8 -- -k
@@ -152,9 +150,8 @@ Running System Tests
.. note::
- System tests are only configured to run under Python 2.7 and
- Python 3.8. For expediency, we do not run them in older versions
- of Python 3.
+ System tests are only configured to run under Python 3.8.
+ For expediency, we do not run them in older versions of Python 3.
This alone will not run the tests. You'll need to change some local
auth settings and change some configuration in your project to
From cadd38b8441e14af3972570010dadc575fbd3963 Mon Sep 17 00:00:00 2001
From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com>
Date: Sun, 20 Jun 2021 01:00:34 +0000
Subject: [PATCH 3/9] chore: update precommit hook pre-commit/pre-commit-hooks
to v4 (#1083) (#60)
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
[](https://renovatebot.com)
This PR contains the following updates:
| Package | Type | Update | Change |
|---|---|---|---|
| [pre-commit/pre-commit-hooks](https://togithub.com/pre-commit/pre-commit-hooks) | repository | major | `v3.4.0` -> `v4.0.1` |
---
### Release Notes
pre-commit/pre-commit-hooks
### [`v4.0.1`](https://togithub.com/pre-commit/pre-commit-hooks/releases/v4.0.1)
[Compare Source](https://togithub.com/pre-commit/pre-commit-hooks/compare/v4.0.0...v4.0.1)
##### Fixes
- `check-shebang-scripts-are-executable` fix entry point.
- [#602](https://togithub.com/pre-commit/pre-commit-hooks/issues/602) issue by [@Person-93](https://togithub.com/Person-93).
- [#603](https://togithub.com/pre-commit/pre-commit-hooks/issues/603) PR by [@scop](https://togithub.com/scop).
### [`v4.0.0`](https://togithub.com/pre-commit/pre-commit-hooks/releases/v4.0.0)
[Compare Source](https://togithub.com/pre-commit/pre-commit-hooks/compare/v3.4.0...v4.0.0)
##### Features
- `check-json`: report duplicate keys.
- [#558](https://togithub.com/pre-commit/pre-commit-hooks/issues/558) PR by [@AdityaKhursale](https://togithub.com/AdityaKhursale).
- [#554](https://togithub.com/pre-commit/pre-commit-hooks/issues/554) issue by [@adamchainz](https://togithub.com/adamchainz).
- `no-commit-to-branch`: add `main` to default blocked branches.
- [#565](https://togithub.com/pre-commit/pre-commit-hooks/issues/565) PR by [@ndevenish](https://togithub.com/ndevenish).
- `check-case-conflict`: check conflicts in directory names as well.
- [#575](https://togithub.com/pre-commit/pre-commit-hooks/issues/575) PR by [@slsyy](https://togithub.com/slsyy).
- [#70](https://togithub.com/pre-commit/pre-commit-hooks/issues/70) issue by [@andyjack](https://togithub.com/andyjack).
- `check-vcs-permalinks`: forbid other branch names.
- [#582](https://togithub.com/pre-commit/pre-commit-hooks/issues/582) PR by [@jack1142](https://togithub.com/jack1142).
- [#581](https://togithub.com/pre-commit/pre-commit-hooks/issues/581) issue by [@jack1142](https://togithub.com/jack1142).
- `check-shebang-scripts-are-executable`: new hook which ensures shebang'd scripts are executable.
- [#545](https://togithub.com/pre-commit/pre-commit-hooks/issues/545) PR by [@scop](https://togithub.com/scop).
##### Fixes
- `check-executables-have-shebangs`: Short circuit shebang lookup on windows.
- [#544](https://togithub.com/pre-commit/pre-commit-hooks/issues/544) PR by [@scop](https://togithub.com/scop).
- `requirements-txt-fixer`: Fix comments which have indentation
- [#549](https://togithub.com/pre-commit/pre-commit-hooks/issues/549) PR by [@greshilov](https://togithub.com/greshilov).
- [#548](https://togithub.com/pre-commit/pre-commit-hooks/issues/548) issue by [@greshilov](https://togithub.com/greshilov).
- `pretty-format-json`: write to stdout using UTF-8 encoding.
- [#571](https://togithub.com/pre-commit/pre-commit-hooks/issues/571) PR by [@jack1142](https://togithub.com/jack1142).
- [#570](https://togithub.com/pre-commit/pre-commit-hooks/issues/570) issue by [@jack1142](https://togithub.com/jack1142).
- Use more inclusive language.
- [#599](https://togithub.com/pre-commit/pre-commit-hooks/issues/599) PR by [@asottile](https://togithub.com/asottile).
##### Breaking changes
- Remove deprecated hooks: `flake8`, `pyflakes`, `autopep8-wrapper`.
- [#597](https://togithub.com/pre-commit/pre-commit-hooks/issues/597) PR by [@asottile](https://togithub.com/asottile).
---
### Configuration
📅 **Schedule**: At any time (no schedule defined).
🚦 **Automerge**: Disabled by config. Please merge this manually once you are satisfied.
♻️ **Rebasing**: Renovate will not automatically rebase this PR, because other commits have been found.
🔕 **Ignore**: Close this PR and you won't be reminded about this update again.
---
- [ ] If you want to rebase/retry this PR, check this box.
---
This PR has been generated by [WhiteSource Renovate](https://renovate.whitesourcesoftware.com). View repository job log [here](https://app.renovatebot.com/dashboard#github/googleapis/synthtool).
Source-Link: https://github.com/googleapis/synthtool/commit/333fd90856f1454380514bc59fc0936cdaf1c202
Post-Processor: gcr.io/repo-automation-bots/owlbot-python:latest@sha256:b8c131c558606d3cea6e18f8e87befbd448c1482319b0db3c5d5388fa6ea72e3
---
.github/.OwlBot.lock.yaml | 2 +-
.pre-commit-config.yaml | 2 +-
2 files changed, 2 insertions(+), 2 deletions(-)
diff --git a/.github/.OwlBot.lock.yaml b/.github/.OwlBot.lock.yaml
index cc49c6a..9602d54 100644
--- a/.github/.OwlBot.lock.yaml
+++ b/.github/.OwlBot.lock.yaml
@@ -1,3 +1,3 @@
docker:
image: gcr.io/repo-automation-bots/owlbot-python:latest
- digest: sha256:b6169fc6a5207b11800a7c002d0c5c2bc6d82697185ca12e666f44031468cfcd
+ digest: sha256:b8c131c558606d3cea6e18f8e87befbd448c1482319b0db3c5d5388fa6ea72e3
diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml
index 4f00c7c..62eb5a7 100644
--- a/.pre-commit-config.yaml
+++ b/.pre-commit-config.yaml
@@ -16,7 +16,7 @@
# See https://pre-commit.com/hooks.html for more hooks
repos:
- repo: https://github.com/pre-commit/pre-commit-hooks
- rev: v3.4.0
+ rev: v4.0.1
hooks:
- id: trailing-whitespace
- id: end-of-file-fixer
From f1285fc9a0082ecdce9a1c67ac24ca3fe5a72839 Mon Sep 17 00:00:00 2001
From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com>
Date: Mon, 21 Jun 2021 15:55:11 -0600
Subject: [PATCH 4/9] chore: add kokoro 3.9 config templates (#1128) (#61)
fixes #907
Source-Link: https://github.com/googleapis/synthtool/commit/b0eb8a8b30b46a3c98d23c23107acb748c6601a1
Post-Processor: gcr.io/repo-automation-bots/owlbot-python:latest@sha256:df50e8d462f86d6bcb42f27ecad55bb12c404f1c65de9c6fe4c4d25120080bd6
Co-authored-by: Owl Bot
---
.github/.OwlBot.lock.yaml | 2 +-
.kokoro/samples/python3.9/common.cfg | 40 +++++++++++++++++++++
.kokoro/samples/python3.9/continuous.cfg | 6 ++++
.kokoro/samples/python3.9/periodic-head.cfg | 11 ++++++
.kokoro/samples/python3.9/periodic.cfg | 6 ++++
.kokoro/samples/python3.9/presubmit.cfg | 6 ++++
6 files changed, 70 insertions(+), 1 deletion(-)
create mode 100644 .kokoro/samples/python3.9/common.cfg
create mode 100644 .kokoro/samples/python3.9/continuous.cfg
create mode 100644 .kokoro/samples/python3.9/periodic-head.cfg
create mode 100644 .kokoro/samples/python3.9/periodic.cfg
create mode 100644 .kokoro/samples/python3.9/presubmit.cfg
diff --git a/.github/.OwlBot.lock.yaml b/.github/.OwlBot.lock.yaml
index 9602d54..0954585 100644
--- a/.github/.OwlBot.lock.yaml
+++ b/.github/.OwlBot.lock.yaml
@@ -1,3 +1,3 @@
docker:
image: gcr.io/repo-automation-bots/owlbot-python:latest
- digest: sha256:b8c131c558606d3cea6e18f8e87befbd448c1482319b0db3c5d5388fa6ea72e3
+ digest: sha256:df50e8d462f86d6bcb42f27ecad55bb12c404f1c65de9c6fe4c4d25120080bd6
diff --git a/.kokoro/samples/python3.9/common.cfg b/.kokoro/samples/python3.9/common.cfg
new file mode 100644
index 0000000..caac37c
--- /dev/null
+++ b/.kokoro/samples/python3.9/common.cfg
@@ -0,0 +1,40 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+# Build logs will be here
+action {
+ define_artifacts {
+ regex: "**/*sponge_log.xml"
+ }
+}
+
+# Specify which tests to run
+env_vars: {
+ key: "RUN_TESTS_SESSION"
+ value: "py-3.9"
+}
+
+# Declare build specific Cloud project.
+env_vars: {
+ key: "BUILD_SPECIFIC_GCLOUD_PROJECT"
+ value: "python-docs-samples-tests-py39"
+}
+
+env_vars: {
+ key: "TRAMPOLINE_BUILD_FILE"
+ value: "github/python-video-transcoder/.kokoro/test-samples.sh"
+}
+
+# Configure the docker image for kokoro-trampoline.
+env_vars: {
+ key: "TRAMPOLINE_IMAGE"
+ value: "gcr.io/cloud-devrel-kokoro-resources/python-samples-testing-docker"
+}
+
+# Download secrets for samples
+gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/python-docs-samples"
+
+# Download trampoline resources.
+gfile_resources: "/bigstore/cloud-devrel-kokoro-resources/trampoline"
+
+# Use the trampoline script to run in docker.
+build_file: "python-video-transcoder/.kokoro/trampoline.sh"
\ No newline at end of file
diff --git a/.kokoro/samples/python3.9/continuous.cfg b/.kokoro/samples/python3.9/continuous.cfg
new file mode 100644
index 0000000..a1c8d97
--- /dev/null
+++ b/.kokoro/samples/python3.9/continuous.cfg
@@ -0,0 +1,6 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+env_vars: {
+ key: "INSTALL_LIBRARY_FROM_SOURCE"
+ value: "True"
+}
\ No newline at end of file
diff --git a/.kokoro/samples/python3.9/periodic-head.cfg b/.kokoro/samples/python3.9/periodic-head.cfg
new file mode 100644
index 0000000..f9cfcd3
--- /dev/null
+++ b/.kokoro/samples/python3.9/periodic-head.cfg
@@ -0,0 +1,11 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+env_vars: {
+ key: "INSTALL_LIBRARY_FROM_SOURCE"
+ value: "True"
+}
+
+env_vars: {
+ key: "TRAMPOLINE_BUILD_FILE"
+ value: "github/python-pubsub/.kokoro/test-samples-against-head.sh"
+}
diff --git a/.kokoro/samples/python3.9/periodic.cfg b/.kokoro/samples/python3.9/periodic.cfg
new file mode 100644
index 0000000..50fec96
--- /dev/null
+++ b/.kokoro/samples/python3.9/periodic.cfg
@@ -0,0 +1,6 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+env_vars: {
+ key: "INSTALL_LIBRARY_FROM_SOURCE"
+ value: "False"
+}
\ No newline at end of file
diff --git a/.kokoro/samples/python3.9/presubmit.cfg b/.kokoro/samples/python3.9/presubmit.cfg
new file mode 100644
index 0000000..a1c8d97
--- /dev/null
+++ b/.kokoro/samples/python3.9/presubmit.cfg
@@ -0,0 +1,6 @@
+# Format: //devtools/kokoro/config/proto/build.proto
+
+env_vars: {
+ key: "INSTALL_LIBRARY_FROM_SOURCE"
+ value: "True"
+}
\ No newline at end of file
From d43c40e9ab80c42afd25efa1c2980d23dbc50ce2 Mon Sep 17 00:00:00 2001
From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com>
Date: Wed, 23 Jun 2021 20:25:01 +0000
Subject: [PATCH 5/9] feat: add always_use_jwt_access (#62)
... chore: update gapic-generator-ruby to the latest commit chore: release gapic-generator-typescript 1.5.0
Committer: @miraleung
PiperOrigin-RevId: 380641501
Source-Link: https://github.com/googleapis/googleapis/commit/076f7e9f0b258bdb54338895d7251b202e8f0de3
Source-Link: https://github.com/googleapis/googleapis-gen/commit/27e4c88b4048e5f56508d4e1aa417d60a3380892
---
.coveragerc | 1 -
.../transcoder_service/transports/base.py | 40 +++----
.../transcoder_service/transports/grpc.py | 7 +-
.../transports/grpc_asyncio.py | 7 +-
setup.py | 2 +-
testing/constraints-3.6.txt | 2 +-
.../test_transcoder_service.py | 104 +++---------------
7 files changed, 39 insertions(+), 124 deletions(-)
diff --git a/.coveragerc b/.coveragerc
index 8b12d80..6926d19 100644
--- a/.coveragerc
+++ b/.coveragerc
@@ -2,7 +2,6 @@
branch = True
[report]
-fail_under = 100
show_missing = True
omit =
google/cloud/video/transcoder/__init__.py
diff --git a/google/cloud/video/transcoder_v1beta1/services/transcoder_service/transports/base.py b/google/cloud/video/transcoder_v1beta1/services/transcoder_service/transports/base.py
index bb86ccd..bfc94b5 100644
--- a/google/cloud/video/transcoder_v1beta1/services/transcoder_service/transports/base.py
+++ b/google/cloud/video/transcoder_v1beta1/services/transcoder_service/transports/base.py
@@ -24,6 +24,7 @@
from google.api_core import gapic_v1 # type: ignore
from google.api_core import retry as retries # type: ignore
from google.auth import credentials as ga_credentials # type: ignore
+from google.oauth2 import service_account # type: ignore
from google.cloud.video.transcoder_v1beta1.types import resources
from google.cloud.video.transcoder_v1beta1.types import services
@@ -47,8 +48,6 @@
except pkg_resources.DistributionNotFound: # pragma: NO COVER
_GOOGLE_AUTH_VERSION = None
-_API_CORE_VERSION = google.api_core.__version__
-
class TranscoderServiceTransport(abc.ABC):
"""Abstract transport class for TranscoderService."""
@@ -66,6 +65,7 @@ def __init__(
scopes: Optional[Sequence[str]] = None,
quota_project_id: Optional[str] = None,
client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
+ always_use_jwt_access: Optional[bool] = False,
**kwargs,
) -> None:
"""Instantiate the transport.
@@ -89,6 +89,8 @@ def __init__(
API requests. If ``None``, then default info will be used.
Generally, you only need to set this if you're developing
your own client library.
+ always_use_jwt_access (Optional[bool]): Whether self signed JWT should
+ be used for service account credentials.
"""
# Save the hostname. Default to port 443 (HTTPS) if none is specified.
if ":" not in host:
@@ -117,13 +119,20 @@ def __init__(
**scopes_kwargs, quota_project_id=quota_project_id
)
+ # If the credentials is service account credentials, then always try to use self signed JWT.
+ if (
+ always_use_jwt_access
+ and isinstance(credentials, service_account.Credentials)
+ and hasattr(service_account.Credentials, "with_always_use_jwt_access")
+ ):
+ credentials = credentials.with_always_use_jwt_access(True)
+
# Save the credentials.
self._credentials = credentials
- # TODO(busunkim): These two class methods are in the base transport
+ # TODO(busunkim): This method is in the base transport
# to avoid duplicating code across the transport classes. These functions
- # should be deleted once the minimum required versions of google-api-core
- # and google-auth are increased.
+ # should be deleted once the minimum required versions of google-auth is increased.
# TODO: Remove this function once google-auth >= 1.25.0 is required
@classmethod
@@ -144,27 +153,6 @@ def _get_scopes_kwargs(
return scopes_kwargs
- # TODO: Remove this function once google-api-core >= 1.26.0 is required
- @classmethod
- def _get_self_signed_jwt_kwargs(
- cls, host: str, scopes: Optional[Sequence[str]]
- ) -> Dict[str, Union[Optional[Sequence[str]], str]]:
- """Returns kwargs to pass to grpc_helpers.create_channel depending on the google-api-core version"""
-
- self_signed_jwt_kwargs: Dict[str, Union[Optional[Sequence[str]], str]] = {}
-
- if _API_CORE_VERSION and (
- packaging.version.parse(_API_CORE_VERSION)
- >= packaging.version.parse("1.26.0")
- ):
- self_signed_jwt_kwargs["default_scopes"] = cls.AUTH_SCOPES
- self_signed_jwt_kwargs["scopes"] = scopes
- self_signed_jwt_kwargs["default_host"] = cls.DEFAULT_HOST
- else:
- self_signed_jwt_kwargs["scopes"] = scopes or cls.AUTH_SCOPES
-
- return self_signed_jwt_kwargs
-
def _prep_wrapped_messages(self, client_info):
# Precompute the wrapped methods.
self._wrapped_methods = {
diff --git a/google/cloud/video/transcoder_v1beta1/services/transcoder_service/transports/grpc.py b/google/cloud/video/transcoder_v1beta1/services/transcoder_service/transports/grpc.py
index f422db0..d5cd44c 100644
--- a/google/cloud/video/transcoder_v1beta1/services/transcoder_service/transports/grpc.py
+++ b/google/cloud/video/transcoder_v1beta1/services/transcoder_service/transports/grpc.py
@@ -157,6 +157,7 @@ def __init__(
scopes=scopes,
quota_project_id=quota_project_id,
client_info=client_info,
+ always_use_jwt_access=True,
)
if not self._grpc_channel:
@@ -212,14 +213,14 @@ def create_channel(
and ``credentials_file`` are passed.
"""
- self_signed_jwt_kwargs = cls._get_self_signed_jwt_kwargs(host, scopes)
-
return grpc_helpers.create_channel(
host,
credentials=credentials,
credentials_file=credentials_file,
quota_project_id=quota_project_id,
- **self_signed_jwt_kwargs,
+ default_scopes=cls.AUTH_SCOPES,
+ scopes=scopes,
+ default_host=cls.DEFAULT_HOST,
**kwargs,
)
diff --git a/google/cloud/video/transcoder_v1beta1/services/transcoder_service/transports/grpc_asyncio.py b/google/cloud/video/transcoder_v1beta1/services/transcoder_service/transports/grpc_asyncio.py
index 15f78e3..03cb255 100644
--- a/google/cloud/video/transcoder_v1beta1/services/transcoder_service/transports/grpc_asyncio.py
+++ b/google/cloud/video/transcoder_v1beta1/services/transcoder_service/transports/grpc_asyncio.py
@@ -86,14 +86,14 @@ def create_channel(
aio.Channel: A gRPC AsyncIO channel object.
"""
- self_signed_jwt_kwargs = cls._get_self_signed_jwt_kwargs(host, scopes)
-
return grpc_helpers_async.create_channel(
host,
credentials=credentials,
credentials_file=credentials_file,
quota_project_id=quota_project_id,
- **self_signed_jwt_kwargs,
+ default_scopes=cls.AUTH_SCOPES,
+ scopes=scopes,
+ default_host=cls.DEFAULT_HOST,
**kwargs,
)
@@ -203,6 +203,7 @@ def __init__(
scopes=scopes,
quota_project_id=quota_project_id,
client_info=client_info,
+ always_use_jwt_access=True,
)
if not self._grpc_channel:
diff --git a/setup.py b/setup.py
index 0542889..83242bc 100644
--- a/setup.py
+++ b/setup.py
@@ -41,7 +41,7 @@
platforms="Posix; MacOS X; Windows",
include_package_data=True,
install_requires=(
- "google-api-core[grpc] >= 1.22.2, < 2.0.0dev",
+ "google-api-core[grpc] >= 1.26.0, <2.0.0dev",
"proto-plus >= 1.4.0",
"packaging >= 14.3",
),
diff --git a/testing/constraints-3.6.txt b/testing/constraints-3.6.txt
index 4aef616..4fda429 100644
--- a/testing/constraints-3.6.txt
+++ b/testing/constraints-3.6.txt
@@ -5,7 +5,7 @@
#
# e.g., if setup.py has "foo >= 1.14.0, < 2.0.0dev",
# Then this file should have foo==1.14.0
-google-api-core==1.22.2
+google-api-core==1.26.0
proto-plus==1.4.0
packaging==14.3
google-auth==1.24.0 # TODO: remove when google-auth>=1.25.0 si transitively required through google-api-core
diff --git a/tests/unit/gapic/transcoder_v1beta1/test_transcoder_service.py b/tests/unit/gapic/transcoder_v1beta1/test_transcoder_service.py
index 000f05c..b8187dc 100644
--- a/tests/unit/gapic/transcoder_v1beta1/test_transcoder_service.py
+++ b/tests/unit/gapic/transcoder_v1beta1/test_transcoder_service.py
@@ -39,9 +39,6 @@
)
from google.cloud.video.transcoder_v1beta1.services.transcoder_service import pagers
from google.cloud.video.transcoder_v1beta1.services.transcoder_service import transports
-from google.cloud.video.transcoder_v1beta1.services.transcoder_service.transports.base import (
- _API_CORE_VERSION,
-)
from google.cloud.video.transcoder_v1beta1.services.transcoder_service.transports.base import (
_GOOGLE_AUTH_VERSION,
)
@@ -53,8 +50,9 @@
import google.auth
-# TODO(busunkim): Once google-api-core >= 1.26.0 is required:
-# - Delete all the api-core and auth "less than" test cases
+# TODO(busunkim): Once google-auth >= 1.25.0 is required transitively
+# through google-api-core:
+# - Delete the auth "less than" test cases
# - Delete these pytest markers (Make the "greater than or equal to" tests the default).
requires_google_auth_lt_1_25_0 = pytest.mark.skipif(
packaging.version.parse(_GOOGLE_AUTH_VERSION) >= packaging.version.parse("1.25.0"),
@@ -65,16 +63,6 @@
reason="This test requires google-auth >= 1.25.0",
)
-requires_api_core_lt_1_26_0 = pytest.mark.skipif(
- packaging.version.parse(_API_CORE_VERSION) >= packaging.version.parse("1.26.0"),
- reason="This test requires google-api-core < 1.26.0",
-)
-
-requires_api_core_gte_1_26_0 = pytest.mark.skipif(
- packaging.version.parse(_API_CORE_VERSION) < packaging.version.parse("1.26.0"),
- reason="This test requires google-api-core >= 1.26.0",
-)
-
def client_cert_source_callback():
return b"cert bytes", b"key bytes"
@@ -138,6 +126,18 @@ def test_transcoder_service_client_from_service_account_info(client_class):
assert client.transport._host == "transcoder.googleapis.com:443"
+@pytest.mark.parametrize(
+ "client_class", [TranscoderServiceClient, TranscoderServiceAsyncClient,]
+)
+def test_transcoder_service_client_service_account_always_use_jwt(client_class):
+ with mock.patch.object(
+ service_account.Credentials, "with_always_use_jwt_access", create=True
+ ) as use_jwt:
+ creds = service_account.Credentials(None, None, None)
+ client = client_class(credentials=creds)
+ use_jwt.assert_called_with(True)
+
+
@pytest.mark.parametrize(
"client_class", [TranscoderServiceClient, TranscoderServiceAsyncClient,]
)
@@ -2693,7 +2693,6 @@ def test_transcoder_service_transport_auth_adc_old_google_auth(transport_class):
(transports.TranscoderServiceGrpcAsyncIOTransport, grpc_helpers_async),
],
)
-@requires_api_core_gte_1_26_0
def test_transcoder_service_transport_create_channel(transport_class, grpc_helpers):
# If credentials and host are not provided, the transport class should use
# ADC credentials.
@@ -2722,79 +2721,6 @@ def test_transcoder_service_transport_create_channel(transport_class, grpc_helpe
)
-@pytest.mark.parametrize(
- "transport_class,grpc_helpers",
- [
- (transports.TranscoderServiceGrpcTransport, grpc_helpers),
- (transports.TranscoderServiceGrpcAsyncIOTransport, grpc_helpers_async),
- ],
-)
-@requires_api_core_lt_1_26_0
-def test_transcoder_service_transport_create_channel_old_api_core(
- transport_class, grpc_helpers
-):
- # If credentials and host are not provided, the transport class should use
- # ADC credentials.
- with mock.patch.object(
- google.auth, "default", autospec=True
- ) as adc, mock.patch.object(
- grpc_helpers, "create_channel", autospec=True
- ) as create_channel:
- creds = ga_credentials.AnonymousCredentials()
- adc.return_value = (creds, None)
- transport_class(quota_project_id="octopus")
-
- create_channel.assert_called_with(
- "transcoder.googleapis.com:443",
- credentials=creds,
- credentials_file=None,
- quota_project_id="octopus",
- scopes=("https://www.googleapis.com/auth/cloud-platform",),
- ssl_credentials=None,
- options=[
- ("grpc.max_send_message_length", -1),
- ("grpc.max_receive_message_length", -1),
- ],
- )
-
-
-@pytest.mark.parametrize(
- "transport_class,grpc_helpers",
- [
- (transports.TranscoderServiceGrpcTransport, grpc_helpers),
- (transports.TranscoderServiceGrpcAsyncIOTransport, grpc_helpers_async),
- ],
-)
-@requires_api_core_lt_1_26_0
-def test_transcoder_service_transport_create_channel_user_scopes(
- transport_class, grpc_helpers
-):
- # If credentials and host are not provided, the transport class should use
- # ADC credentials.
- with mock.patch.object(
- google.auth, "default", autospec=True
- ) as adc, mock.patch.object(
- grpc_helpers, "create_channel", autospec=True
- ) as create_channel:
- creds = ga_credentials.AnonymousCredentials()
- adc.return_value = (creds, None)
-
- transport_class(quota_project_id="octopus", scopes=["1", "2"])
-
- create_channel.assert_called_with(
- "transcoder.googleapis.com:443",
- credentials=creds,
- credentials_file=None,
- quota_project_id="octopus",
- scopes=["1", "2"],
- ssl_credentials=None,
- options=[
- ("grpc.max_send_message_length", -1),
- ("grpc.max_receive_message_length", -1),
- ],
- )
-
-
@pytest.mark.parametrize(
"transport_class",
[
From 0b497e430ef031cd83945440330acb2adc37713d Mon Sep 17 00:00:00 2001
From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com>
Date: Fri, 25 Jun 2021 20:46:25 +0000
Subject: [PATCH 6/9] chore(python): simplify nox steps in CONTRIBUTING.rst
(#64)
Source-Link: https://github.com/googleapis/synthtool/commit/26558bae8976a985d73c2d98c31d8612273f907d
Post-Processor: gcr.io/repo-automation-bots/owlbot-python:latest@sha256:99d90d097e4a4710cc8658ee0b5b963f4426d0e424819787c3ac1405c9a26719
---
.github/.OwlBot.lock.yaml | 2 +-
CONTRIBUTING.rst | 14 ++++++--------
2 files changed, 7 insertions(+), 9 deletions(-)
diff --git a/.github/.OwlBot.lock.yaml b/.github/.OwlBot.lock.yaml
index 0954585..e2b39f9 100644
--- a/.github/.OwlBot.lock.yaml
+++ b/.github/.OwlBot.lock.yaml
@@ -1,3 +1,3 @@
docker:
image: gcr.io/repo-automation-bots/owlbot-python:latest
- digest: sha256:df50e8d462f86d6bcb42f27ecad55bb12c404f1c65de9c6fe4c4d25120080bd6
+ digest: sha256:99d90d097e4a4710cc8658ee0b5b963f4426d0e424819787c3ac1405c9a26719
diff --git a/CONTRIBUTING.rst b/CONTRIBUTING.rst
index 07970ca..33f0240 100644
--- a/CONTRIBUTING.rst
+++ b/CONTRIBUTING.rst
@@ -68,14 +68,12 @@ Using ``nox``
We use `nox `__ to instrument our tests.
- To test your changes, run unit tests with ``nox``::
+ $ nox -s unit
- $ nox -s unit-3.8
- $ ...
+- To run a single unit test::
-- Args to pytest can be passed through the nox command separated by a `--`. For
- example, to run a single test::
+ $ nox -s unit-3.9 -- -k
- $ nox -s unit-3.8 -- -k
.. note::
@@ -142,7 +140,7 @@ Running System Tests
- To run system tests, you can execute::
# Run all system tests
- $ nox -s system-3.8
+ $ nox -s system
# Run a single system test
$ nox -s system-3.8 -- -k
@@ -215,8 +213,8 @@ Supported versions can be found in our ``noxfile.py`` `config`_.
.. _config: https://github.com/googleapis/python-video-transcoder/blob/master/noxfile.py
-We also explicitly decided to support Python 3 beginning with version
-3.6. Reasons for this include:
+We also explicitly decided to support Python 3 beginning with version 3.6.
+Reasons for this include:
- Encouraging use of newest versions of Python 3
- Taking the lead of `prominent`_ open-source `projects`_
From 98d8b860227a9b9a8b4cecc851ec547d7789ac66 Mon Sep 17 00:00:00 2001
From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com>
Date: Thu, 1 Jul 2021 02:26:09 +0000
Subject: [PATCH 7/9] fix: disable always_use_jwt_access (#66)
Committer: @busunkim96
PiperOrigin-RevId: 382142900
Source-Link: https://github.com/googleapis/googleapis/commit/513440fda515f3c799c22a30e3906dcda325004e
Source-Link: https://github.com/googleapis/googleapis-gen/commit/7b1e2c31233f79a704ec21ca410bf661d6bc68d0
---
.../transcoder_service/transports/base.py | 2 +-
.../transcoder_service/transports/grpc.py | 5 +++-
.../transports/grpc_asyncio.py | 5 +++-
.../test_transcoder_service.py | 26 ++++++++++++++++---
4 files changed, 31 insertions(+), 7 deletions(-)
diff --git a/google/cloud/video/transcoder_v1beta1/services/transcoder_service/transports/base.py b/google/cloud/video/transcoder_v1beta1/services/transcoder_service/transports/base.py
index bfc94b5..101865e 100644
--- a/google/cloud/video/transcoder_v1beta1/services/transcoder_service/transports/base.py
+++ b/google/cloud/video/transcoder_v1beta1/services/transcoder_service/transports/base.py
@@ -100,7 +100,7 @@ def __init__(
scopes_kwargs = self._get_scopes_kwargs(self._host, scopes)
# Save the scopes.
- self._scopes = scopes or self.AUTH_SCOPES
+ self._scopes = scopes
# If no credentials are provided, then determine the appropriate
# defaults.
diff --git a/google/cloud/video/transcoder_v1beta1/services/transcoder_service/transports/grpc.py b/google/cloud/video/transcoder_v1beta1/services/transcoder_service/transports/grpc.py
index d5cd44c..fe89feb 100644
--- a/google/cloud/video/transcoder_v1beta1/services/transcoder_service/transports/grpc.py
+++ b/google/cloud/video/transcoder_v1beta1/services/transcoder_service/transports/grpc.py
@@ -65,6 +65,7 @@ def __init__(
client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None,
quota_project_id: Optional[str] = None,
client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
+ always_use_jwt_access: Optional[bool] = False,
) -> None:
"""Instantiate the transport.
@@ -105,6 +106,8 @@ def __init__(
API requests. If ``None``, then default info will be used.
Generally, you only need to set this if you're developing
your own client library.
+ always_use_jwt_access (Optional[bool]): Whether self signed JWT should
+ be used for service account credentials.
Raises:
google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport
@@ -157,7 +160,7 @@ def __init__(
scopes=scopes,
quota_project_id=quota_project_id,
client_info=client_info,
- always_use_jwt_access=True,
+ always_use_jwt_access=always_use_jwt_access,
)
if not self._grpc_channel:
diff --git a/google/cloud/video/transcoder_v1beta1/services/transcoder_service/transports/grpc_asyncio.py b/google/cloud/video/transcoder_v1beta1/services/transcoder_service/transports/grpc_asyncio.py
index 03cb255..06a48af 100644
--- a/google/cloud/video/transcoder_v1beta1/services/transcoder_service/transports/grpc_asyncio.py
+++ b/google/cloud/video/transcoder_v1beta1/services/transcoder_service/transports/grpc_asyncio.py
@@ -111,6 +111,7 @@ def __init__(
client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None,
quota_project_id=None,
client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
+ always_use_jwt_access: Optional[bool] = False,
) -> None:
"""Instantiate the transport.
@@ -152,6 +153,8 @@ def __init__(
API requests. If ``None``, then default info will be used.
Generally, you only need to set this if you're developing
your own client library.
+ always_use_jwt_access (Optional[bool]): Whether self signed JWT should
+ be used for service account credentials.
Raises:
google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport
@@ -203,7 +206,7 @@ def __init__(
scopes=scopes,
quota_project_id=quota_project_id,
client_info=client_info,
- always_use_jwt_access=True,
+ always_use_jwt_access=always_use_jwt_access,
)
if not self._grpc_channel:
diff --git a/tests/unit/gapic/transcoder_v1beta1/test_transcoder_service.py b/tests/unit/gapic/transcoder_v1beta1/test_transcoder_service.py
index b8187dc..1df9e14 100644
--- a/tests/unit/gapic/transcoder_v1beta1/test_transcoder_service.py
+++ b/tests/unit/gapic/transcoder_v1beta1/test_transcoder_service.py
@@ -135,7 +135,25 @@ def test_transcoder_service_client_service_account_always_use_jwt(client_class):
) as use_jwt:
creds = service_account.Credentials(None, None, None)
client = client_class(credentials=creds)
- use_jwt.assert_called_with(True)
+ use_jwt.assert_not_called()
+
+
+@pytest.mark.parametrize(
+ "transport_class,transport_name",
+ [
+ (transports.TranscoderServiceGrpcTransport, "grpc"),
+ (transports.TranscoderServiceGrpcAsyncIOTransport, "grpc_asyncio"),
+ ],
+)
+def test_transcoder_service_client_service_account_always_use_jwt_true(
+ transport_class, transport_name
+):
+ with mock.patch.object(
+ service_account.Credentials, "with_always_use_jwt_access", create=True
+ ) as use_jwt:
+ creds = service_account.Credentials(None, None, None)
+ transport = transport_class(credentials=creds, always_use_jwt_access=True)
+ use_jwt.assert_called_once_with(True)
@pytest.mark.parametrize(
@@ -2743,7 +2761,7 @@ def test_transcoder_service_grpc_transport_client_cert_source_for_mtls(transport
"squid.clam.whelk:443",
credentials=cred,
credentials_file=None,
- scopes=("https://www.googleapis.com/auth/cloud-platform",),
+ scopes=None,
ssl_credentials=mock_ssl_channel_creds,
quota_project_id=None,
options=[
@@ -2852,7 +2870,7 @@ def test_transcoder_service_transport_channel_mtls_with_client_cert_source(
"mtls.squid.clam.whelk:443",
credentials=cred,
credentials_file=None,
- scopes=("https://www.googleapis.com/auth/cloud-platform",),
+ scopes=None,
ssl_credentials=mock_ssl_cred,
quota_project_id=None,
options=[
@@ -2899,7 +2917,7 @@ def test_transcoder_service_transport_channel_mtls_with_adc(transport_class):
"mtls.squid.clam.whelk:443",
credentials=mock_cred,
credentials_file=None,
- scopes=("https://www.googleapis.com/auth/cloud-platform",),
+ scopes=None,
ssl_credentials=mock_ssl_cred,
quota_project_id=None,
options=[
From 721d28ec565bfdb41a195167a989baf042ede228 Mon Sep 17 00:00:00 2001
From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com>
Date: Fri, 9 Jul 2021 21:14:24 +0000
Subject: [PATCH 8/9] feat: Add Transcoder V1 (#67)
PiperOrigin-RevId: 383681557
Source-Link: https://github.com/googleapis/googleapis/commit/509489a93f7fa360e313744142290c84b68bbdbd
Source-Link: https://github.com/googleapis/googleapis-gen/commit/9bbe472e2b79bc36df7e97a890eb3cd782b89859
---
docs/index.rst | 8 +
docs/transcoder_v1/services.rst | 6 +
docs/transcoder_v1/transcoder_service.rst | 10 +
docs/transcoder_v1/types.rst | 7 +
google/cloud/video/transcoder/__init__.py | 72 +-
google/cloud/video/transcoder_v1/__init__.py | 82 +
.../video/transcoder_v1/gapic_metadata.json | 103 +
google/cloud/video/transcoder_v1/py.typed | 2 +
.../video/transcoder_v1/services/__init__.py | 15 +
.../services/transcoder_service/__init__.py | 22 +
.../transcoder_service/async_client.py | 790 +++++
.../services/transcoder_service/client.py | 989 ++++++
.../services/transcoder_service/pagers.py | 284 ++
.../transcoder_service/transports/__init__.py | 33 +
.../transcoder_service/transports/base.py | 258 ++
.../transcoder_service/transports/grpc.py | 441 +++
.../transports/grpc_asyncio.py | 450 +++
.../video/transcoder_v1/types/__init__.py | 80 +
.../video/transcoder_v1/types/resources.py | 1409 ++++++++
.../video/transcoder_v1/types/services.py | 221 ++
owlbot.py | 2 +-
tests/unit/gapic/transcoder_v1/__init__.py | 15 +
.../transcoder_v1/test_transcoder_service.py | 3097 +++++++++++++++++
23 files changed, 8354 insertions(+), 42 deletions(-)
create mode 100644 docs/transcoder_v1/services.rst
create mode 100644 docs/transcoder_v1/transcoder_service.rst
create mode 100644 docs/transcoder_v1/types.rst
create mode 100644 google/cloud/video/transcoder_v1/__init__.py
create mode 100644 google/cloud/video/transcoder_v1/gapic_metadata.json
create mode 100644 google/cloud/video/transcoder_v1/py.typed
create mode 100644 google/cloud/video/transcoder_v1/services/__init__.py
create mode 100644 google/cloud/video/transcoder_v1/services/transcoder_service/__init__.py
create mode 100644 google/cloud/video/transcoder_v1/services/transcoder_service/async_client.py
create mode 100644 google/cloud/video/transcoder_v1/services/transcoder_service/client.py
create mode 100644 google/cloud/video/transcoder_v1/services/transcoder_service/pagers.py
create mode 100644 google/cloud/video/transcoder_v1/services/transcoder_service/transports/__init__.py
create mode 100644 google/cloud/video/transcoder_v1/services/transcoder_service/transports/base.py
create mode 100644 google/cloud/video/transcoder_v1/services/transcoder_service/transports/grpc.py
create mode 100644 google/cloud/video/transcoder_v1/services/transcoder_service/transports/grpc_asyncio.py
create mode 100644 google/cloud/video/transcoder_v1/types/__init__.py
create mode 100644 google/cloud/video/transcoder_v1/types/resources.py
create mode 100644 google/cloud/video/transcoder_v1/types/services.py
create mode 100644 tests/unit/gapic/transcoder_v1/__init__.py
create mode 100644 tests/unit/gapic/transcoder_v1/test_transcoder_service.py
diff --git a/docs/index.rst b/docs/index.rst
index 5624ca4..4996c80 100644
--- a/docs/index.rst
+++ b/docs/index.rst
@@ -2,6 +2,14 @@
.. include:: multiprocessing.rst
+API Reference
+-------------
+.. toctree::
+ :maxdepth: 2
+
+ transcoder_v1/services
+ transcoder_v1/types
+
API Reference
-------------
.. toctree::
diff --git a/docs/transcoder_v1/services.rst b/docs/transcoder_v1/services.rst
new file mode 100644
index 0000000..1bd129e
--- /dev/null
+++ b/docs/transcoder_v1/services.rst
@@ -0,0 +1,6 @@
+Services for Google Cloud Video Transcoder v1 API
+=================================================
+.. toctree::
+ :maxdepth: 2
+
+ transcoder_service
diff --git a/docs/transcoder_v1/transcoder_service.rst b/docs/transcoder_v1/transcoder_service.rst
new file mode 100644
index 0000000..5bf6bd8
--- /dev/null
+++ b/docs/transcoder_v1/transcoder_service.rst
@@ -0,0 +1,10 @@
+TranscoderService
+-----------------------------------
+
+.. automodule:: google.cloud.video.transcoder_v1.services.transcoder_service
+ :members:
+ :inherited-members:
+
+.. automodule:: google.cloud.video.transcoder_v1.services.transcoder_service.pagers
+ :members:
+ :inherited-members:
diff --git a/docs/transcoder_v1/types.rst b/docs/transcoder_v1/types.rst
new file mode 100644
index 0000000..7dc3c71
--- /dev/null
+++ b/docs/transcoder_v1/types.rst
@@ -0,0 +1,7 @@
+Types for Google Cloud Video Transcoder v1 API
+==============================================
+
+.. automodule:: google.cloud.video.transcoder_v1.types
+ :members:
+ :undoc-members:
+ :show-inheritance:
diff --git a/google/cloud/video/transcoder/__init__.py b/google/cloud/video/transcoder/__init__.py
index 6e82ce4..a5fa944 100644
--- a/google/cloud/video/transcoder/__init__.py
+++ b/google/cloud/video/transcoder/__init__.py
@@ -14,50 +14,42 @@
# limitations under the License.
#
-from google.cloud.video.transcoder_v1beta1.services.transcoder_service.client import (
+from google.cloud.video.transcoder_v1.services.transcoder_service.client import (
TranscoderServiceClient,
)
-from google.cloud.video.transcoder_v1beta1.services.transcoder_service.async_client import (
+from google.cloud.video.transcoder_v1.services.transcoder_service.async_client import (
TranscoderServiceAsyncClient,
)
-from google.cloud.video.transcoder_v1beta1.types.resources import AdBreak
-from google.cloud.video.transcoder_v1beta1.types.resources import AudioStream
-from google.cloud.video.transcoder_v1beta1.types.resources import EditAtom
-from google.cloud.video.transcoder_v1beta1.types.resources import ElementaryStream
-from google.cloud.video.transcoder_v1beta1.types.resources import Encryption
-from google.cloud.video.transcoder_v1beta1.types.resources import FailureDetail
-from google.cloud.video.transcoder_v1beta1.types.resources import Input
-from google.cloud.video.transcoder_v1beta1.types.resources import Job
-from google.cloud.video.transcoder_v1beta1.types.resources import JobConfig
-from google.cloud.video.transcoder_v1beta1.types.resources import JobTemplate
-from google.cloud.video.transcoder_v1beta1.types.resources import Manifest
-from google.cloud.video.transcoder_v1beta1.types.resources import MuxStream
-from google.cloud.video.transcoder_v1beta1.types.resources import Output
-from google.cloud.video.transcoder_v1beta1.types.resources import Overlay
-from google.cloud.video.transcoder_v1beta1.types.resources import PreprocessingConfig
-from google.cloud.video.transcoder_v1beta1.types.resources import Progress
-from google.cloud.video.transcoder_v1beta1.types.resources import PubsubDestination
-from google.cloud.video.transcoder_v1beta1.types.resources import SegmentSettings
-from google.cloud.video.transcoder_v1beta1.types.resources import SpriteSheet
-from google.cloud.video.transcoder_v1beta1.types.resources import TextStream
-from google.cloud.video.transcoder_v1beta1.types.resources import VideoStream
-from google.cloud.video.transcoder_v1beta1.types.services import CreateJobRequest
-from google.cloud.video.transcoder_v1beta1.types.services import (
- CreateJobTemplateRequest,
-)
-from google.cloud.video.transcoder_v1beta1.types.services import DeleteJobRequest
-from google.cloud.video.transcoder_v1beta1.types.services import (
- DeleteJobTemplateRequest,
-)
-from google.cloud.video.transcoder_v1beta1.types.services import GetJobRequest
-from google.cloud.video.transcoder_v1beta1.types.services import GetJobTemplateRequest
-from google.cloud.video.transcoder_v1beta1.types.services import ListJobsRequest
-from google.cloud.video.transcoder_v1beta1.types.services import ListJobsResponse
-from google.cloud.video.transcoder_v1beta1.types.services import ListJobTemplatesRequest
-from google.cloud.video.transcoder_v1beta1.types.services import (
- ListJobTemplatesResponse,
-)
+from google.cloud.video.transcoder_v1.types.resources import AdBreak
+from google.cloud.video.transcoder_v1.types.resources import AudioStream
+from google.cloud.video.transcoder_v1.types.resources import EditAtom
+from google.cloud.video.transcoder_v1.types.resources import ElementaryStream
+from google.cloud.video.transcoder_v1.types.resources import Encryption
+from google.cloud.video.transcoder_v1.types.resources import Input
+from google.cloud.video.transcoder_v1.types.resources import Job
+from google.cloud.video.transcoder_v1.types.resources import JobConfig
+from google.cloud.video.transcoder_v1.types.resources import JobTemplate
+from google.cloud.video.transcoder_v1.types.resources import Manifest
+from google.cloud.video.transcoder_v1.types.resources import MuxStream
+from google.cloud.video.transcoder_v1.types.resources import Output
+from google.cloud.video.transcoder_v1.types.resources import Overlay
+from google.cloud.video.transcoder_v1.types.resources import PreprocessingConfig
+from google.cloud.video.transcoder_v1.types.resources import PubsubDestination
+from google.cloud.video.transcoder_v1.types.resources import SegmentSettings
+from google.cloud.video.transcoder_v1.types.resources import SpriteSheet
+from google.cloud.video.transcoder_v1.types.resources import TextStream
+from google.cloud.video.transcoder_v1.types.resources import VideoStream
+from google.cloud.video.transcoder_v1.types.services import CreateJobRequest
+from google.cloud.video.transcoder_v1.types.services import CreateJobTemplateRequest
+from google.cloud.video.transcoder_v1.types.services import DeleteJobRequest
+from google.cloud.video.transcoder_v1.types.services import DeleteJobTemplateRequest
+from google.cloud.video.transcoder_v1.types.services import GetJobRequest
+from google.cloud.video.transcoder_v1.types.services import GetJobTemplateRequest
+from google.cloud.video.transcoder_v1.types.services import ListJobsRequest
+from google.cloud.video.transcoder_v1.types.services import ListJobsResponse
+from google.cloud.video.transcoder_v1.types.services import ListJobTemplatesRequest
+from google.cloud.video.transcoder_v1.types.services import ListJobTemplatesResponse
__all__ = (
"TranscoderServiceClient",
@@ -67,7 +59,6 @@
"EditAtom",
"ElementaryStream",
"Encryption",
- "FailureDetail",
"Input",
"Job",
"JobConfig",
@@ -77,7 +68,6 @@
"Output",
"Overlay",
"PreprocessingConfig",
- "Progress",
"PubsubDestination",
"SegmentSettings",
"SpriteSheet",
diff --git a/google/cloud/video/transcoder_v1/__init__.py b/google/cloud/video/transcoder_v1/__init__.py
new file mode 100644
index 0000000..cd70bef
--- /dev/null
+++ b/google/cloud/video/transcoder_v1/__init__.py
@@ -0,0 +1,82 @@
+# -*- coding: utf-8 -*-
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+from .services.transcoder_service import TranscoderServiceClient
+from .services.transcoder_service import TranscoderServiceAsyncClient
+
+from .types.resources import AdBreak
+from .types.resources import AudioStream
+from .types.resources import EditAtom
+from .types.resources import ElementaryStream
+from .types.resources import Encryption
+from .types.resources import Input
+from .types.resources import Job
+from .types.resources import JobConfig
+from .types.resources import JobTemplate
+from .types.resources import Manifest
+from .types.resources import MuxStream
+from .types.resources import Output
+from .types.resources import Overlay
+from .types.resources import PreprocessingConfig
+from .types.resources import PubsubDestination
+from .types.resources import SegmentSettings
+from .types.resources import SpriteSheet
+from .types.resources import TextStream
+from .types.resources import VideoStream
+from .types.services import CreateJobRequest
+from .types.services import CreateJobTemplateRequest
+from .types.services import DeleteJobRequest
+from .types.services import DeleteJobTemplateRequest
+from .types.services import GetJobRequest
+from .types.services import GetJobTemplateRequest
+from .types.services import ListJobsRequest
+from .types.services import ListJobsResponse
+from .types.services import ListJobTemplatesRequest
+from .types.services import ListJobTemplatesResponse
+
+__all__ = (
+ "TranscoderServiceAsyncClient",
+ "AdBreak",
+ "AudioStream",
+ "CreateJobRequest",
+ "CreateJobTemplateRequest",
+ "DeleteJobRequest",
+ "DeleteJobTemplateRequest",
+ "EditAtom",
+ "ElementaryStream",
+ "Encryption",
+ "GetJobRequest",
+ "GetJobTemplateRequest",
+ "Input",
+ "Job",
+ "JobConfig",
+ "JobTemplate",
+ "ListJobTemplatesRequest",
+ "ListJobTemplatesResponse",
+ "ListJobsRequest",
+ "ListJobsResponse",
+ "Manifest",
+ "MuxStream",
+ "Output",
+ "Overlay",
+ "PreprocessingConfig",
+ "PubsubDestination",
+ "SegmentSettings",
+ "SpriteSheet",
+ "TextStream",
+ "TranscoderServiceClient",
+ "VideoStream",
+)
diff --git a/google/cloud/video/transcoder_v1/gapic_metadata.json b/google/cloud/video/transcoder_v1/gapic_metadata.json
new file mode 100644
index 0000000..6651379
--- /dev/null
+++ b/google/cloud/video/transcoder_v1/gapic_metadata.json
@@ -0,0 +1,103 @@
+ {
+ "comment": "This file maps proto services/RPCs to the corresponding library clients/methods",
+ "language": "python",
+ "libraryPackage": "google.cloud.video.transcoder_v1",
+ "protoPackage": "google.cloud.video.transcoder.v1",
+ "schema": "1.0",
+ "services": {
+ "TranscoderService": {
+ "clients": {
+ "grpc": {
+ "libraryClient": "TranscoderServiceClient",
+ "rpcs": {
+ "CreateJob": {
+ "methods": [
+ "create_job"
+ ]
+ },
+ "CreateJobTemplate": {
+ "methods": [
+ "create_job_template"
+ ]
+ },
+ "DeleteJob": {
+ "methods": [
+ "delete_job"
+ ]
+ },
+ "DeleteJobTemplate": {
+ "methods": [
+ "delete_job_template"
+ ]
+ },
+ "GetJob": {
+ "methods": [
+ "get_job"
+ ]
+ },
+ "GetJobTemplate": {
+ "methods": [
+ "get_job_template"
+ ]
+ },
+ "ListJobTemplates": {
+ "methods": [
+ "list_job_templates"
+ ]
+ },
+ "ListJobs": {
+ "methods": [
+ "list_jobs"
+ ]
+ }
+ }
+ },
+ "grpc-async": {
+ "libraryClient": "TranscoderServiceAsyncClient",
+ "rpcs": {
+ "CreateJob": {
+ "methods": [
+ "create_job"
+ ]
+ },
+ "CreateJobTemplate": {
+ "methods": [
+ "create_job_template"
+ ]
+ },
+ "DeleteJob": {
+ "methods": [
+ "delete_job"
+ ]
+ },
+ "DeleteJobTemplate": {
+ "methods": [
+ "delete_job_template"
+ ]
+ },
+ "GetJob": {
+ "methods": [
+ "get_job"
+ ]
+ },
+ "GetJobTemplate": {
+ "methods": [
+ "get_job_template"
+ ]
+ },
+ "ListJobTemplates": {
+ "methods": [
+ "list_job_templates"
+ ]
+ },
+ "ListJobs": {
+ "methods": [
+ "list_jobs"
+ ]
+ }
+ }
+ }
+ }
+ }
+ }
+}
diff --git a/google/cloud/video/transcoder_v1/py.typed b/google/cloud/video/transcoder_v1/py.typed
new file mode 100644
index 0000000..a2716a6
--- /dev/null
+++ b/google/cloud/video/transcoder_v1/py.typed
@@ -0,0 +1,2 @@
+# Marker file for PEP 561.
+# The google-cloud-video-transcoder package uses inline types.
diff --git a/google/cloud/video/transcoder_v1/services/__init__.py b/google/cloud/video/transcoder_v1/services/__init__.py
new file mode 100644
index 0000000..4de6597
--- /dev/null
+++ b/google/cloud/video/transcoder_v1/services/__init__.py
@@ -0,0 +1,15 @@
+# -*- coding: utf-8 -*-
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
diff --git a/google/cloud/video/transcoder_v1/services/transcoder_service/__init__.py b/google/cloud/video/transcoder_v1/services/transcoder_service/__init__.py
new file mode 100644
index 0000000..d853c3a
--- /dev/null
+++ b/google/cloud/video/transcoder_v1/services/transcoder_service/__init__.py
@@ -0,0 +1,22 @@
+# -*- coding: utf-8 -*-
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+from .client import TranscoderServiceClient
+from .async_client import TranscoderServiceAsyncClient
+
+__all__ = (
+ "TranscoderServiceClient",
+ "TranscoderServiceAsyncClient",
+)
diff --git a/google/cloud/video/transcoder_v1/services/transcoder_service/async_client.py b/google/cloud/video/transcoder_v1/services/transcoder_service/async_client.py
new file mode 100644
index 0000000..5d129e7
--- /dev/null
+++ b/google/cloud/video/transcoder_v1/services/transcoder_service/async_client.py
@@ -0,0 +1,790 @@
+# -*- coding: utf-8 -*-
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+from collections import OrderedDict
+import functools
+import re
+from typing import Dict, Sequence, Tuple, Type, Union
+import pkg_resources
+
+import google.api_core.client_options as ClientOptions # type: ignore
+from google.api_core import exceptions as core_exceptions # type: ignore
+from google.api_core import gapic_v1 # type: ignore
+from google.api_core import retry as retries # type: ignore
+from google.auth import credentials as ga_credentials # type: ignore
+from google.oauth2 import service_account # type: ignore
+
+from google.cloud.video.transcoder_v1.services.transcoder_service import pagers
+from google.cloud.video.transcoder_v1.types import resources
+from google.cloud.video.transcoder_v1.types import services
+from google.protobuf import timestamp_pb2 # type: ignore
+from google.rpc import status_pb2 # type: ignore
+from .transports.base import TranscoderServiceTransport, DEFAULT_CLIENT_INFO
+from .transports.grpc_asyncio import TranscoderServiceGrpcAsyncIOTransport
+from .client import TranscoderServiceClient
+
+
+class TranscoderServiceAsyncClient:
+ """Using the Transcoder API, you can queue asynchronous jobs for
+ transcoding media into various output formats. Output formats
+ may include different streaming standards such as HTTP Live
+ Streaming (HLS) and Dynamic Adaptive Streaming over HTTP (DASH).
+ You can also customize jobs using advanced features such as
+ Digital Rights Management (DRM), audio equalization, content
+ concatenation, and digital ad-stitch ready content generation.
+ """
+
+ _client: TranscoderServiceClient
+
+ DEFAULT_ENDPOINT = TranscoderServiceClient.DEFAULT_ENDPOINT
+ DEFAULT_MTLS_ENDPOINT = TranscoderServiceClient.DEFAULT_MTLS_ENDPOINT
+
+ job_path = staticmethod(TranscoderServiceClient.job_path)
+ parse_job_path = staticmethod(TranscoderServiceClient.parse_job_path)
+ job_template_path = staticmethod(TranscoderServiceClient.job_template_path)
+ parse_job_template_path = staticmethod(
+ TranscoderServiceClient.parse_job_template_path
+ )
+ common_billing_account_path = staticmethod(
+ TranscoderServiceClient.common_billing_account_path
+ )
+ parse_common_billing_account_path = staticmethod(
+ TranscoderServiceClient.parse_common_billing_account_path
+ )
+ common_folder_path = staticmethod(TranscoderServiceClient.common_folder_path)
+ parse_common_folder_path = staticmethod(
+ TranscoderServiceClient.parse_common_folder_path
+ )
+ common_organization_path = staticmethod(
+ TranscoderServiceClient.common_organization_path
+ )
+ parse_common_organization_path = staticmethod(
+ TranscoderServiceClient.parse_common_organization_path
+ )
+ common_project_path = staticmethod(TranscoderServiceClient.common_project_path)
+ parse_common_project_path = staticmethod(
+ TranscoderServiceClient.parse_common_project_path
+ )
+ common_location_path = staticmethod(TranscoderServiceClient.common_location_path)
+ parse_common_location_path = staticmethod(
+ TranscoderServiceClient.parse_common_location_path
+ )
+
+ @classmethod
+ def from_service_account_info(cls, info: dict, *args, **kwargs):
+ """Creates an instance of this client using the provided credentials
+ info.
+
+ Args:
+ info (dict): The service account private key info.
+ args: Additional arguments to pass to the constructor.
+ kwargs: Additional arguments to pass to the constructor.
+
+ Returns:
+ TranscoderServiceAsyncClient: The constructed client.
+ """
+ return TranscoderServiceClient.from_service_account_info.__func__(TranscoderServiceAsyncClient, info, *args, **kwargs) # type: ignore
+
+ @classmethod
+ def from_service_account_file(cls, filename: str, *args, **kwargs):
+ """Creates an instance of this client using the provided credentials
+ file.
+
+ Args:
+ filename (str): The path to the service account private key json
+ file.
+ args: Additional arguments to pass to the constructor.
+ kwargs: Additional arguments to pass to the constructor.
+
+ Returns:
+ TranscoderServiceAsyncClient: The constructed client.
+ """
+ return TranscoderServiceClient.from_service_account_file.__func__(TranscoderServiceAsyncClient, filename, *args, **kwargs) # type: ignore
+
+ from_service_account_json = from_service_account_file
+
+ @property
+ def transport(self) -> TranscoderServiceTransport:
+ """Returns the transport used by the client instance.
+
+ Returns:
+ TranscoderServiceTransport: The transport used by the client instance.
+ """
+ return self._client.transport
+
+ get_transport_class = functools.partial(
+ type(TranscoderServiceClient).get_transport_class, type(TranscoderServiceClient)
+ )
+
+ def __init__(
+ self,
+ *,
+ credentials: ga_credentials.Credentials = None,
+ transport: Union[str, TranscoderServiceTransport] = "grpc_asyncio",
+ client_options: ClientOptions = None,
+ client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
+ ) -> None:
+ """Instantiates the transcoder service client.
+
+ Args:
+ credentials (Optional[google.auth.credentials.Credentials]): The
+ authorization credentials to attach to requests. These
+ credentials identify the application to the service; if none
+ are specified, the client will attempt to ascertain the
+ credentials from the environment.
+ transport (Union[str, ~.TranscoderServiceTransport]): The
+ transport to use. If set to None, a transport is chosen
+ automatically.
+ client_options (ClientOptions): Custom options for the client. It
+ won't take effect if a ``transport`` instance is provided.
+ (1) The ``api_endpoint`` property can be used to override the
+ default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT
+ environment variable can also be used to override the endpoint:
+ "always" (always use the default mTLS endpoint), "never" (always
+ use the default regular endpoint) and "auto" (auto switch to the
+ default mTLS endpoint if client certificate is present, this is
+ the default value). However, the ``api_endpoint`` property takes
+ precedence if provided.
+ (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable
+ is "true", then the ``client_cert_source`` property can be used
+ to provide client certificate for mutual TLS transport. If
+ not provided, the default SSL client certificate will be used if
+ present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not
+ set, no client certificate will be used.
+
+ Raises:
+ google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport
+ creation failed for any reason.
+ """
+ self._client = TranscoderServiceClient(
+ credentials=credentials,
+ transport=transport,
+ client_options=client_options,
+ client_info=client_info,
+ )
+
+ async def create_job(
+ self,
+ request: services.CreateJobRequest = None,
+ *,
+ parent: str = None,
+ job: resources.Job = None,
+ retry: retries.Retry = gapic_v1.method.DEFAULT,
+ timeout: float = None,
+ metadata: Sequence[Tuple[str, str]] = (),
+ ) -> resources.Job:
+ r"""Creates a job in the specified region.
+
+ Args:
+ request (:class:`google.cloud.video.transcoder_v1.types.CreateJobRequest`):
+ The request object. Request message for
+ `TranscoderService.CreateJob`.
+ parent (:class:`str`):
+ Required. The parent location to create and process this
+ job. Format: ``projects/{project}/locations/{location}``
+
+ This corresponds to the ``parent`` field
+ on the ``request`` instance; if ``request`` is provided, this
+ should not be set.
+ job (:class:`google.cloud.video.transcoder_v1.types.Job`):
+ Required. Parameters for creating
+ transcoding job.
+
+ This corresponds to the ``job`` field
+ on the ``request`` instance; if ``request`` is provided, this
+ should not be set.
+ retry (google.api_core.retry.Retry): Designation of what errors, if any,
+ should be retried.
+ timeout (float): The timeout for this request.
+ metadata (Sequence[Tuple[str, str]]): Strings which should be
+ sent along with the request as metadata.
+
+ Returns:
+ google.cloud.video.transcoder_v1.types.Job:
+ Transcoding job resource.
+ """
+ # Create or coerce a protobuf request object.
+ # Sanity check: If we got a request object, we should *not* have
+ # gotten any keyword arguments that map to the request.
+ has_flattened_params = any([parent, job])
+ if request is not None and has_flattened_params:
+ raise ValueError(
+ "If the `request` argument is set, then none of "
+ "the individual field arguments should be set."
+ )
+
+ request = services.CreateJobRequest(request)
+
+ # If we have keyword arguments corresponding to fields on the
+ # request, apply these.
+ if parent is not None:
+ request.parent = parent
+ if job is not None:
+ request.job = job
+
+ # Wrap the RPC method; this adds retry and timeout information,
+ # and friendly error handling.
+ rpc = gapic_v1.method_async.wrap_method(
+ self._client._transport.create_job,
+ default_timeout=60.0,
+ client_info=DEFAULT_CLIENT_INFO,
+ )
+
+ # Certain fields should be provided within the metadata header;
+ # add these here.
+ metadata = tuple(metadata) + (
+ gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)),
+ )
+
+ # Send the request.
+ response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,)
+
+ # Done; return the response.
+ return response
+
+ async def list_jobs(
+ self,
+ request: services.ListJobsRequest = None,
+ *,
+ parent: str = None,
+ retry: retries.Retry = gapic_v1.method.DEFAULT,
+ timeout: float = None,
+ metadata: Sequence[Tuple[str, str]] = (),
+ ) -> pagers.ListJobsAsyncPager:
+ r"""Lists jobs in the specified region.
+
+ Args:
+ request (:class:`google.cloud.video.transcoder_v1.types.ListJobsRequest`):
+ The request object. Request message for
+ `TranscoderService.ListJobs`. The parent location from
+ which to retrieve the collection of jobs.
+ parent (:class:`str`):
+ Required. Format:
+ ``projects/{project}/locations/{location}``
+
+ This corresponds to the ``parent`` field
+ on the ``request`` instance; if ``request`` is provided, this
+ should not be set.
+ retry (google.api_core.retry.Retry): Designation of what errors, if any,
+ should be retried.
+ timeout (float): The timeout for this request.
+ metadata (Sequence[Tuple[str, str]]): Strings which should be
+ sent along with the request as metadata.
+
+ Returns:
+ google.cloud.video.transcoder_v1.services.transcoder_service.pagers.ListJobsAsyncPager:
+ Response message for TranscoderService.ListJobs.
+
+ Iterating over this object will yield results and
+ resolve additional pages automatically.
+
+ """
+ # Create or coerce a protobuf request object.
+ # Sanity check: If we got a request object, we should *not* have
+ # gotten any keyword arguments that map to the request.
+ has_flattened_params = any([parent])
+ if request is not None and has_flattened_params:
+ raise ValueError(
+ "If the `request` argument is set, then none of "
+ "the individual field arguments should be set."
+ )
+
+ request = services.ListJobsRequest(request)
+
+ # If we have keyword arguments corresponding to fields on the
+ # request, apply these.
+ if parent is not None:
+ request.parent = parent
+
+ # Wrap the RPC method; this adds retry and timeout information,
+ # and friendly error handling.
+ rpc = gapic_v1.method_async.wrap_method(
+ self._client._transport.list_jobs,
+ default_timeout=60.0,
+ client_info=DEFAULT_CLIENT_INFO,
+ )
+
+ # Certain fields should be provided within the metadata header;
+ # add these here.
+ metadata = tuple(metadata) + (
+ gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)),
+ )
+
+ # Send the request.
+ response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,)
+
+ # This method is paged; wrap the response in a pager, which provides
+ # an `__aiter__` convenience method.
+ response = pagers.ListJobsAsyncPager(
+ method=rpc, request=request, response=response, metadata=metadata,
+ )
+
+ # Done; return the response.
+ return response
+
+ async def get_job(
+ self,
+ request: services.GetJobRequest = None,
+ *,
+ name: str = None,
+ retry: retries.Retry = gapic_v1.method.DEFAULT,
+ timeout: float = None,
+ metadata: Sequence[Tuple[str, str]] = (),
+ ) -> resources.Job:
+ r"""Returns the job data.
+
+ Args:
+ request (:class:`google.cloud.video.transcoder_v1.types.GetJobRequest`):
+ The request object. Request message for
+ `TranscoderService.GetJob`.
+ name (:class:`str`):
+ Required. The name of the job to retrieve. Format:
+ ``projects/{project}/locations/{location}/jobs/{job}``
+
+ This corresponds to the ``name`` field
+ on the ``request`` instance; if ``request`` is provided, this
+ should not be set.
+ retry (google.api_core.retry.Retry): Designation of what errors, if any,
+ should be retried.
+ timeout (float): The timeout for this request.
+ metadata (Sequence[Tuple[str, str]]): Strings which should be
+ sent along with the request as metadata.
+
+ Returns:
+ google.cloud.video.transcoder_v1.types.Job:
+ Transcoding job resource.
+ """
+ # Create or coerce a protobuf request object.
+ # Sanity check: If we got a request object, we should *not* have
+ # gotten any keyword arguments that map to the request.
+ has_flattened_params = any([name])
+ if request is not None and has_flattened_params:
+ raise ValueError(
+ "If the `request` argument is set, then none of "
+ "the individual field arguments should be set."
+ )
+
+ request = services.GetJobRequest(request)
+
+ # If we have keyword arguments corresponding to fields on the
+ # request, apply these.
+ if name is not None:
+ request.name = name
+
+ # Wrap the RPC method; this adds retry and timeout information,
+ # and friendly error handling.
+ rpc = gapic_v1.method_async.wrap_method(
+ self._client._transport.get_job,
+ default_timeout=60.0,
+ client_info=DEFAULT_CLIENT_INFO,
+ )
+
+ # Certain fields should be provided within the metadata header;
+ # add these here.
+ metadata = tuple(metadata) + (
+ gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),
+ )
+
+ # Send the request.
+ response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,)
+
+ # Done; return the response.
+ return response
+
+ async def delete_job(
+ self,
+ request: services.DeleteJobRequest = None,
+ *,
+ name: str = None,
+ retry: retries.Retry = gapic_v1.method.DEFAULT,
+ timeout: float = None,
+ metadata: Sequence[Tuple[str, str]] = (),
+ ) -> None:
+ r"""Deletes a job.
+
+ Args:
+ request (:class:`google.cloud.video.transcoder_v1.types.DeleteJobRequest`):
+ The request object. Request message for
+ `TranscoderService.DeleteJob`.
+ name (:class:`str`):
+ Required. The name of the job to delete. Format:
+ ``projects/{project}/locations/{location}/jobs/{job}``
+
+ This corresponds to the ``name`` field
+ on the ``request`` instance; if ``request`` is provided, this
+ should not be set.
+ retry (google.api_core.retry.Retry): Designation of what errors, if any,
+ should be retried.
+ timeout (float): The timeout for this request.
+ metadata (Sequence[Tuple[str, str]]): Strings which should be
+ sent along with the request as metadata.
+ """
+ # Create or coerce a protobuf request object.
+ # Sanity check: If we got a request object, we should *not* have
+ # gotten any keyword arguments that map to the request.
+ has_flattened_params = any([name])
+ if request is not None and has_flattened_params:
+ raise ValueError(
+ "If the `request` argument is set, then none of "
+ "the individual field arguments should be set."
+ )
+
+ request = services.DeleteJobRequest(request)
+
+ # If we have keyword arguments corresponding to fields on the
+ # request, apply these.
+ if name is not None:
+ request.name = name
+
+ # Wrap the RPC method; this adds retry and timeout information,
+ # and friendly error handling.
+ rpc = gapic_v1.method_async.wrap_method(
+ self._client._transport.delete_job,
+ default_timeout=60.0,
+ client_info=DEFAULT_CLIENT_INFO,
+ )
+
+ # Certain fields should be provided within the metadata header;
+ # add these here.
+ metadata = tuple(metadata) + (
+ gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),
+ )
+
+ # Send the request.
+ await rpc(
+ request, retry=retry, timeout=timeout, metadata=metadata,
+ )
+
+ async def create_job_template(
+ self,
+ request: services.CreateJobTemplateRequest = None,
+ *,
+ parent: str = None,
+ job_template: resources.JobTemplate = None,
+ job_template_id: str = None,
+ retry: retries.Retry = gapic_v1.method.DEFAULT,
+ timeout: float = None,
+ metadata: Sequence[Tuple[str, str]] = (),
+ ) -> resources.JobTemplate:
+ r"""Creates a job template in the specified region.
+
+ Args:
+ request (:class:`google.cloud.video.transcoder_v1.types.CreateJobTemplateRequest`):
+ The request object. Request message for
+ `TranscoderService.CreateJobTemplate`.
+ parent (:class:`str`):
+ Required. The parent location to create this job
+ template. Format:
+ ``projects/{project}/locations/{location}``
+
+ This corresponds to the ``parent`` field
+ on the ``request`` instance; if ``request`` is provided, this
+ should not be set.
+ job_template (:class:`google.cloud.video.transcoder_v1.types.JobTemplate`):
+ Required. Parameters for creating job
+ template.
+
+ This corresponds to the ``job_template`` field
+ on the ``request`` instance; if ``request`` is provided, this
+ should not be set.
+ job_template_id (:class:`str`):
+ Required. The ID to use for the job template, which will
+ become the final component of the job template's
+ resource name.
+
+ This value should be 4-63 characters, and valid
+ characters must match the regular expression
+ ``[a-zA-Z][a-zA-Z0-9_-]*``.
+
+ This corresponds to the ``job_template_id`` field
+ on the ``request`` instance; if ``request`` is provided, this
+ should not be set.
+ retry (google.api_core.retry.Retry): Designation of what errors, if any,
+ should be retried.
+ timeout (float): The timeout for this request.
+ metadata (Sequence[Tuple[str, str]]): Strings which should be
+ sent along with the request as metadata.
+
+ Returns:
+ google.cloud.video.transcoder_v1.types.JobTemplate:
+ Transcoding job template resource.
+ """
+ # Create or coerce a protobuf request object.
+ # Sanity check: If we got a request object, we should *not* have
+ # gotten any keyword arguments that map to the request.
+ has_flattened_params = any([parent, job_template, job_template_id])
+ if request is not None and has_flattened_params:
+ raise ValueError(
+ "If the `request` argument is set, then none of "
+ "the individual field arguments should be set."
+ )
+
+ request = services.CreateJobTemplateRequest(request)
+
+ # If we have keyword arguments corresponding to fields on the
+ # request, apply these.
+ if parent is not None:
+ request.parent = parent
+ if job_template is not None:
+ request.job_template = job_template
+ if job_template_id is not None:
+ request.job_template_id = job_template_id
+
+ # Wrap the RPC method; this adds retry and timeout information,
+ # and friendly error handling.
+ rpc = gapic_v1.method_async.wrap_method(
+ self._client._transport.create_job_template,
+ default_timeout=60.0,
+ client_info=DEFAULT_CLIENT_INFO,
+ )
+
+ # Certain fields should be provided within the metadata header;
+ # add these here.
+ metadata = tuple(metadata) + (
+ gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)),
+ )
+
+ # Send the request.
+ response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,)
+
+ # Done; return the response.
+ return response
+
+ async def list_job_templates(
+ self,
+ request: services.ListJobTemplatesRequest = None,
+ *,
+ parent: str = None,
+ retry: retries.Retry = gapic_v1.method.DEFAULT,
+ timeout: float = None,
+ metadata: Sequence[Tuple[str, str]] = (),
+ ) -> pagers.ListJobTemplatesAsyncPager:
+ r"""Lists job templates in the specified region.
+
+ Args:
+ request (:class:`google.cloud.video.transcoder_v1.types.ListJobTemplatesRequest`):
+ The request object. Request message for
+ `TranscoderService.ListJobTemplates`.
+ parent (:class:`str`):
+ Required. The parent location from which to retrieve the
+ collection of job templates. Format:
+ ``projects/{project}/locations/{location}``
+
+ This corresponds to the ``parent`` field
+ on the ``request`` instance; if ``request`` is provided, this
+ should not be set.
+ retry (google.api_core.retry.Retry): Designation of what errors, if any,
+ should be retried.
+ timeout (float): The timeout for this request.
+ metadata (Sequence[Tuple[str, str]]): Strings which should be
+ sent along with the request as metadata.
+
+ Returns:
+ google.cloud.video.transcoder_v1.services.transcoder_service.pagers.ListJobTemplatesAsyncPager:
+ Response message for TranscoderService.ListJobTemplates.
+
+ Iterating over this object will yield results and
+ resolve additional pages automatically.
+
+ """
+ # Create or coerce a protobuf request object.
+ # Sanity check: If we got a request object, we should *not* have
+ # gotten any keyword arguments that map to the request.
+ has_flattened_params = any([parent])
+ if request is not None and has_flattened_params:
+ raise ValueError(
+ "If the `request` argument is set, then none of "
+ "the individual field arguments should be set."
+ )
+
+ request = services.ListJobTemplatesRequest(request)
+
+ # If we have keyword arguments corresponding to fields on the
+ # request, apply these.
+ if parent is not None:
+ request.parent = parent
+
+ # Wrap the RPC method; this adds retry and timeout information,
+ # and friendly error handling.
+ rpc = gapic_v1.method_async.wrap_method(
+ self._client._transport.list_job_templates,
+ default_timeout=60.0,
+ client_info=DEFAULT_CLIENT_INFO,
+ )
+
+ # Certain fields should be provided within the metadata header;
+ # add these here.
+ metadata = tuple(metadata) + (
+ gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)),
+ )
+
+ # Send the request.
+ response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,)
+
+ # This method is paged; wrap the response in a pager, which provides
+ # an `__aiter__` convenience method.
+ response = pagers.ListJobTemplatesAsyncPager(
+ method=rpc, request=request, response=response, metadata=metadata,
+ )
+
+ # Done; return the response.
+ return response
+
+ async def get_job_template(
+ self,
+ request: services.GetJobTemplateRequest = None,
+ *,
+ name: str = None,
+ retry: retries.Retry = gapic_v1.method.DEFAULT,
+ timeout: float = None,
+ metadata: Sequence[Tuple[str, str]] = (),
+ ) -> resources.JobTemplate:
+ r"""Returns the job template data.
+
+ Args:
+ request (:class:`google.cloud.video.transcoder_v1.types.GetJobTemplateRequest`):
+ The request object. Request message for
+ `TranscoderService.GetJobTemplate`.
+ name (:class:`str`):
+ Required. The name of the job template to retrieve.
+ Format:
+ ``projects/{project}/locations/{location}/jobTemplates/{job_template}``
+
+ This corresponds to the ``name`` field
+ on the ``request`` instance; if ``request`` is provided, this
+ should not be set.
+ retry (google.api_core.retry.Retry): Designation of what errors, if any,
+ should be retried.
+ timeout (float): The timeout for this request.
+ metadata (Sequence[Tuple[str, str]]): Strings which should be
+ sent along with the request as metadata.
+
+ Returns:
+ google.cloud.video.transcoder_v1.types.JobTemplate:
+ Transcoding job template resource.
+ """
+ # Create or coerce a protobuf request object.
+ # Sanity check: If we got a request object, we should *not* have
+ # gotten any keyword arguments that map to the request.
+ has_flattened_params = any([name])
+ if request is not None and has_flattened_params:
+ raise ValueError(
+ "If the `request` argument is set, then none of "
+ "the individual field arguments should be set."
+ )
+
+ request = services.GetJobTemplateRequest(request)
+
+ # If we have keyword arguments corresponding to fields on the
+ # request, apply these.
+ if name is not None:
+ request.name = name
+
+ # Wrap the RPC method; this adds retry and timeout information,
+ # and friendly error handling.
+ rpc = gapic_v1.method_async.wrap_method(
+ self._client._transport.get_job_template,
+ default_timeout=60.0,
+ client_info=DEFAULT_CLIENT_INFO,
+ )
+
+ # Certain fields should be provided within the metadata header;
+ # add these here.
+ metadata = tuple(metadata) + (
+ gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),
+ )
+
+ # Send the request.
+ response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,)
+
+ # Done; return the response.
+ return response
+
+ async def delete_job_template(
+ self,
+ request: services.DeleteJobTemplateRequest = None,
+ *,
+ name: str = None,
+ retry: retries.Retry = gapic_v1.method.DEFAULT,
+ timeout: float = None,
+ metadata: Sequence[Tuple[str, str]] = (),
+ ) -> None:
+ r"""Deletes a job template.
+
+ Args:
+ request (:class:`google.cloud.video.transcoder_v1.types.DeleteJobTemplateRequest`):
+ The request object. Request message for
+ `TranscoderService.DeleteJobTemplate`.
+ name (:class:`str`):
+ Required. The name of the job template to delete.
+ ``projects/{project}/locations/{location}/jobTemplates/{job_template}``
+
+ This corresponds to the ``name`` field
+ on the ``request`` instance; if ``request`` is provided, this
+ should not be set.
+ retry (google.api_core.retry.Retry): Designation of what errors, if any,
+ should be retried.
+ timeout (float): The timeout for this request.
+ metadata (Sequence[Tuple[str, str]]): Strings which should be
+ sent along with the request as metadata.
+ """
+ # Create or coerce a protobuf request object.
+ # Sanity check: If we got a request object, we should *not* have
+ # gotten any keyword arguments that map to the request.
+ has_flattened_params = any([name])
+ if request is not None and has_flattened_params:
+ raise ValueError(
+ "If the `request` argument is set, then none of "
+ "the individual field arguments should be set."
+ )
+
+ request = services.DeleteJobTemplateRequest(request)
+
+ # If we have keyword arguments corresponding to fields on the
+ # request, apply these.
+ if name is not None:
+ request.name = name
+
+ # Wrap the RPC method; this adds retry and timeout information,
+ # and friendly error handling.
+ rpc = gapic_v1.method_async.wrap_method(
+ self._client._transport.delete_job_template,
+ default_timeout=60.0,
+ client_info=DEFAULT_CLIENT_INFO,
+ )
+
+ # Certain fields should be provided within the metadata header;
+ # add these here.
+ metadata = tuple(metadata) + (
+ gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),
+ )
+
+ # Send the request.
+ await rpc(
+ request, retry=retry, timeout=timeout, metadata=metadata,
+ )
+
+
+try:
+ DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(
+ gapic_version=pkg_resources.get_distribution(
+ "google-cloud-video-transcoder",
+ ).version,
+ )
+except pkg_resources.DistributionNotFound:
+ DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo()
+
+
+__all__ = ("TranscoderServiceAsyncClient",)
diff --git a/google/cloud/video/transcoder_v1/services/transcoder_service/client.py b/google/cloud/video/transcoder_v1/services/transcoder_service/client.py
new file mode 100644
index 0000000..764897f
--- /dev/null
+++ b/google/cloud/video/transcoder_v1/services/transcoder_service/client.py
@@ -0,0 +1,989 @@
+# -*- coding: utf-8 -*-
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+from collections import OrderedDict
+from distutils import util
+import os
+import re
+from typing import Callable, Dict, Optional, Sequence, Tuple, Type, Union
+import pkg_resources
+
+from google.api_core import client_options as client_options_lib # type: ignore
+from google.api_core import exceptions as core_exceptions # type: ignore
+from google.api_core import gapic_v1 # type: ignore
+from google.api_core import retry as retries # type: ignore
+from google.auth import credentials as ga_credentials # type: ignore
+from google.auth.transport import mtls # type: ignore
+from google.auth.transport.grpc import SslCredentials # type: ignore
+from google.auth.exceptions import MutualTLSChannelError # type: ignore
+from google.oauth2 import service_account # type: ignore
+
+from google.cloud.video.transcoder_v1.services.transcoder_service import pagers
+from google.cloud.video.transcoder_v1.types import resources
+from google.cloud.video.transcoder_v1.types import services
+from google.protobuf import timestamp_pb2 # type: ignore
+from google.rpc import status_pb2 # type: ignore
+from .transports.base import TranscoderServiceTransport, DEFAULT_CLIENT_INFO
+from .transports.grpc import TranscoderServiceGrpcTransport
+from .transports.grpc_asyncio import TranscoderServiceGrpcAsyncIOTransport
+
+
+class TranscoderServiceClientMeta(type):
+ """Metaclass for the TranscoderService client.
+
+ This provides class-level methods for building and retrieving
+ support objects (e.g. transport) without polluting the client instance
+ objects.
+ """
+
+ _transport_registry = (
+ OrderedDict()
+ ) # type: Dict[str, Type[TranscoderServiceTransport]]
+ _transport_registry["grpc"] = TranscoderServiceGrpcTransport
+ _transport_registry["grpc_asyncio"] = TranscoderServiceGrpcAsyncIOTransport
+
+ def get_transport_class(
+ cls, label: str = None,
+ ) -> Type[TranscoderServiceTransport]:
+ """Returns an appropriate transport class.
+
+ Args:
+ label: The name of the desired transport. If none is
+ provided, then the first transport in the registry is used.
+
+ Returns:
+ The transport class to use.
+ """
+ # If a specific transport is requested, return that one.
+ if label:
+ return cls._transport_registry[label]
+
+ # No transport is requested; return the default (that is, the first one
+ # in the dictionary).
+ return next(iter(cls._transport_registry.values()))
+
+
+class TranscoderServiceClient(metaclass=TranscoderServiceClientMeta):
+ """Using the Transcoder API, you can queue asynchronous jobs for
+ transcoding media into various output formats. Output formats
+ may include different streaming standards such as HTTP Live
+ Streaming (HLS) and Dynamic Adaptive Streaming over HTTP (DASH).
+ You can also customize jobs using advanced features such as
+ Digital Rights Management (DRM), audio equalization, content
+ concatenation, and digital ad-stitch ready content generation.
+ """
+
+ @staticmethod
+ def _get_default_mtls_endpoint(api_endpoint):
+ """Converts api endpoint to mTLS endpoint.
+
+ Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to
+ "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively.
+ Args:
+ api_endpoint (Optional[str]): the api endpoint to convert.
+ Returns:
+ str: converted mTLS api endpoint.
+ """
+ if not api_endpoint:
+ return api_endpoint
+
+ mtls_endpoint_re = re.compile(
+ r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?"
+ )
+
+ m = mtls_endpoint_re.match(api_endpoint)
+ name, mtls, sandbox, googledomain = m.groups()
+ if mtls or not googledomain:
+ return api_endpoint
+
+ if sandbox:
+ return api_endpoint.replace(
+ "sandbox.googleapis.com", "mtls.sandbox.googleapis.com"
+ )
+
+ return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com")
+
+ DEFAULT_ENDPOINT = "transcoder.googleapis.com"
+ DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore
+ DEFAULT_ENDPOINT
+ )
+
+ @classmethod
+ def from_service_account_info(cls, info: dict, *args, **kwargs):
+ """Creates an instance of this client using the provided credentials
+ info.
+
+ Args:
+ info (dict): The service account private key info.
+ args: Additional arguments to pass to the constructor.
+ kwargs: Additional arguments to pass to the constructor.
+
+ Returns:
+ TranscoderServiceClient: The constructed client.
+ """
+ credentials = service_account.Credentials.from_service_account_info(info)
+ kwargs["credentials"] = credentials
+ return cls(*args, **kwargs)
+
+ @classmethod
+ def from_service_account_file(cls, filename: str, *args, **kwargs):
+ """Creates an instance of this client using the provided credentials
+ file.
+
+ Args:
+ filename (str): The path to the service account private key json
+ file.
+ args: Additional arguments to pass to the constructor.
+ kwargs: Additional arguments to pass to the constructor.
+
+ Returns:
+ TranscoderServiceClient: The constructed client.
+ """
+ credentials = service_account.Credentials.from_service_account_file(filename)
+ kwargs["credentials"] = credentials
+ return cls(*args, **kwargs)
+
+ from_service_account_json = from_service_account_file
+
+ @property
+ def transport(self) -> TranscoderServiceTransport:
+ """Returns the transport used by the client instance.
+
+ Returns:
+ TranscoderServiceTransport: The transport used by the client
+ instance.
+ """
+ return self._transport
+
+ @staticmethod
+ def job_path(project: str, location: str, job: str,) -> str:
+ """Returns a fully-qualified job string."""
+ return "projects/{project}/locations/{location}/jobs/{job}".format(
+ project=project, location=location, job=job,
+ )
+
+ @staticmethod
+ def parse_job_path(path: str) -> Dict[str, str]:
+ """Parses a job path into its component segments."""
+ m = re.match(
+ r"^projects/(?P.+?)/locations/(?P.+?)/jobs/(?P.+?)$",
+ path,
+ )
+ return m.groupdict() if m else {}
+
+ @staticmethod
+ def job_template_path(project: str, location: str, job_template: str,) -> str:
+ """Returns a fully-qualified job_template string."""
+ return "projects/{project}/locations/{location}/jobTemplates/{job_template}".format(
+ project=project, location=location, job_template=job_template,
+ )
+
+ @staticmethod
+ def parse_job_template_path(path: str) -> Dict[str, str]:
+ """Parses a job_template path into its component segments."""
+ m = re.match(
+ r"^projects/(?P.+?)/locations/(?P.+?)/jobTemplates/(?P.+?)$",
+ path,
+ )
+ return m.groupdict() if m else {}
+
+ @staticmethod
+ def common_billing_account_path(billing_account: str,) -> str:
+ """Returns a fully-qualified billing_account string."""
+ return "billingAccounts/{billing_account}".format(
+ billing_account=billing_account,
+ )
+
+ @staticmethod
+ def parse_common_billing_account_path(path: str) -> Dict[str, str]:
+ """Parse a billing_account path into its component segments."""
+ m = re.match(r"^billingAccounts/(?P.+?)$", path)
+ return m.groupdict() if m else {}
+
+ @staticmethod
+ def common_folder_path(folder: str,) -> str:
+ """Returns a fully-qualified folder string."""
+ return "folders/{folder}".format(folder=folder,)
+
+ @staticmethod
+ def parse_common_folder_path(path: str) -> Dict[str, str]:
+ """Parse a folder path into its component segments."""
+ m = re.match(r"^folders/(?P.+?)$", path)
+ return m.groupdict() if m else {}
+
+ @staticmethod
+ def common_organization_path(organization: str,) -> str:
+ """Returns a fully-qualified organization string."""
+ return "organizations/{organization}".format(organization=organization,)
+
+ @staticmethod
+ def parse_common_organization_path(path: str) -> Dict[str, str]:
+ """Parse a organization path into its component segments."""
+ m = re.match(r"^organizations/(?P.+?)$", path)
+ return m.groupdict() if m else {}
+
+ @staticmethod
+ def common_project_path(project: str,) -> str:
+ """Returns a fully-qualified project string."""
+ return "projects/{project}".format(project=project,)
+
+ @staticmethod
+ def parse_common_project_path(path: str) -> Dict[str, str]:
+ """Parse a project path into its component segments."""
+ m = re.match(r"^projects/(?P.+?)$", path)
+ return m.groupdict() if m else {}
+
+ @staticmethod
+ def common_location_path(project: str, location: str,) -> str:
+ """Returns a fully-qualified location string."""
+ return "projects/{project}/locations/{location}".format(
+ project=project, location=location,
+ )
+
+ @staticmethod
+ def parse_common_location_path(path: str) -> Dict[str, str]:
+ """Parse a location path into its component segments."""
+ m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path)
+ return m.groupdict() if m else {}
+
+ def __init__(
+ self,
+ *,
+ credentials: Optional[ga_credentials.Credentials] = None,
+ transport: Union[str, TranscoderServiceTransport, None] = None,
+ client_options: Optional[client_options_lib.ClientOptions] = None,
+ client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
+ ) -> None:
+ """Instantiates the transcoder service client.
+
+ Args:
+ credentials (Optional[google.auth.credentials.Credentials]): The
+ authorization credentials to attach to requests. These
+ credentials identify the application to the service; if none
+ are specified, the client will attempt to ascertain the
+ credentials from the environment.
+ transport (Union[str, TranscoderServiceTransport]): The
+ transport to use. If set to None, a transport is chosen
+ automatically.
+ client_options (google.api_core.client_options.ClientOptions): Custom options for the
+ client. It won't take effect if a ``transport`` instance is provided.
+ (1) The ``api_endpoint`` property can be used to override the
+ default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT
+ environment variable can also be used to override the endpoint:
+ "always" (always use the default mTLS endpoint), "never" (always
+ use the default regular endpoint) and "auto" (auto switch to the
+ default mTLS endpoint if client certificate is present, this is
+ the default value). However, the ``api_endpoint`` property takes
+ precedence if provided.
+ (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable
+ is "true", then the ``client_cert_source`` property can be used
+ to provide client certificate for mutual TLS transport. If
+ not provided, the default SSL client certificate will be used if
+ present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not
+ set, no client certificate will be used.
+ client_info (google.api_core.gapic_v1.client_info.ClientInfo):
+ The client info used to send a user-agent string along with
+ API requests. If ``None``, then default info will be used.
+ Generally, you only need to set this if you're developing
+ your own client library.
+
+ Raises:
+ google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport
+ creation failed for any reason.
+ """
+ if isinstance(client_options, dict):
+ client_options = client_options_lib.from_dict(client_options)
+ if client_options is None:
+ client_options = client_options_lib.ClientOptions()
+
+ # Create SSL credentials for mutual TLS if needed.
+ use_client_cert = bool(
+ util.strtobool(os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false"))
+ )
+
+ client_cert_source_func = None
+ is_mtls = False
+ if use_client_cert:
+ if client_options.client_cert_source:
+ is_mtls = True
+ client_cert_source_func = client_options.client_cert_source
+ else:
+ is_mtls = mtls.has_default_client_cert_source()
+ if is_mtls:
+ client_cert_source_func = mtls.default_client_cert_source()
+ else:
+ client_cert_source_func = None
+
+ # Figure out which api endpoint to use.
+ if client_options.api_endpoint is not None:
+ api_endpoint = client_options.api_endpoint
+ else:
+ use_mtls_env = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto")
+ if use_mtls_env == "never":
+ api_endpoint = self.DEFAULT_ENDPOINT
+ elif use_mtls_env == "always":
+ api_endpoint = self.DEFAULT_MTLS_ENDPOINT
+ elif use_mtls_env == "auto":
+ if is_mtls:
+ api_endpoint = self.DEFAULT_MTLS_ENDPOINT
+ else:
+ api_endpoint = self.DEFAULT_ENDPOINT
+ else:
+ raise MutualTLSChannelError(
+ "Unsupported GOOGLE_API_USE_MTLS_ENDPOINT value. Accepted "
+ "values: never, auto, always"
+ )
+
+ # Save or instantiate the transport.
+ # Ordinarily, we provide the transport, but allowing a custom transport
+ # instance provides an extensibility point for unusual situations.
+ if isinstance(transport, TranscoderServiceTransport):
+ # transport is a TranscoderServiceTransport instance.
+ if credentials or client_options.credentials_file:
+ raise ValueError(
+ "When providing a transport instance, "
+ "provide its credentials directly."
+ )
+ if client_options.scopes:
+ raise ValueError(
+ "When providing a transport instance, provide its scopes "
+ "directly."
+ )
+ self._transport = transport
+ else:
+ Transport = type(self).get_transport_class(transport)
+ self._transport = Transport(
+ credentials=credentials,
+ credentials_file=client_options.credentials_file,
+ host=api_endpoint,
+ scopes=client_options.scopes,
+ client_cert_source_for_mtls=client_cert_source_func,
+ quota_project_id=client_options.quota_project_id,
+ client_info=client_info,
+ )
+
+ def create_job(
+ self,
+ request: services.CreateJobRequest = None,
+ *,
+ parent: str = None,
+ job: resources.Job = None,
+ retry: retries.Retry = gapic_v1.method.DEFAULT,
+ timeout: float = None,
+ metadata: Sequence[Tuple[str, str]] = (),
+ ) -> resources.Job:
+ r"""Creates a job in the specified region.
+
+ Args:
+ request (google.cloud.video.transcoder_v1.types.CreateJobRequest):
+ The request object. Request message for
+ `TranscoderService.CreateJob`.
+ parent (str):
+ Required. The parent location to create and process this
+ job. Format: ``projects/{project}/locations/{location}``
+
+ This corresponds to the ``parent`` field
+ on the ``request`` instance; if ``request`` is provided, this
+ should not be set.
+ job (google.cloud.video.transcoder_v1.types.Job):
+ Required. Parameters for creating
+ transcoding job.
+
+ This corresponds to the ``job`` field
+ on the ``request`` instance; if ``request`` is provided, this
+ should not be set.
+ retry (google.api_core.retry.Retry): Designation of what errors, if any,
+ should be retried.
+ timeout (float): The timeout for this request.
+ metadata (Sequence[Tuple[str, str]]): Strings which should be
+ sent along with the request as metadata.
+
+ Returns:
+ google.cloud.video.transcoder_v1.types.Job:
+ Transcoding job resource.
+ """
+ # Create or coerce a protobuf request object.
+ # Sanity check: If we got a request object, we should *not* have
+ # gotten any keyword arguments that map to the request.
+ has_flattened_params = any([parent, job])
+ if request is not None and has_flattened_params:
+ raise ValueError(
+ "If the `request` argument is set, then none of "
+ "the individual field arguments should be set."
+ )
+
+ # Minor optimization to avoid making a copy if the user passes
+ # in a services.CreateJobRequest.
+ # There's no risk of modifying the input as we've already verified
+ # there are no flattened fields.
+ if not isinstance(request, services.CreateJobRequest):
+ request = services.CreateJobRequest(request)
+ # If we have keyword arguments corresponding to fields on the
+ # request, apply these.
+ if parent is not None:
+ request.parent = parent
+ if job is not None:
+ request.job = job
+
+ # Wrap the RPC method; this adds retry and timeout information,
+ # and friendly error handling.
+ rpc = self._transport._wrapped_methods[self._transport.create_job]
+
+ # Certain fields should be provided within the metadata header;
+ # add these here.
+ metadata = tuple(metadata) + (
+ gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)),
+ )
+
+ # Send the request.
+ response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,)
+
+ # Done; return the response.
+ return response
+
+ def list_jobs(
+ self,
+ request: services.ListJobsRequest = None,
+ *,
+ parent: str = None,
+ retry: retries.Retry = gapic_v1.method.DEFAULT,
+ timeout: float = None,
+ metadata: Sequence[Tuple[str, str]] = (),
+ ) -> pagers.ListJobsPager:
+ r"""Lists jobs in the specified region.
+
+ Args:
+ request (google.cloud.video.transcoder_v1.types.ListJobsRequest):
+ The request object. Request message for
+ `TranscoderService.ListJobs`. The parent location from
+ which to retrieve the collection of jobs.
+ parent (str):
+ Required. Format:
+ ``projects/{project}/locations/{location}``
+
+ This corresponds to the ``parent`` field
+ on the ``request`` instance; if ``request`` is provided, this
+ should not be set.
+ retry (google.api_core.retry.Retry): Designation of what errors, if any,
+ should be retried.
+ timeout (float): The timeout for this request.
+ metadata (Sequence[Tuple[str, str]]): Strings which should be
+ sent along with the request as metadata.
+
+ Returns:
+ google.cloud.video.transcoder_v1.services.transcoder_service.pagers.ListJobsPager:
+ Response message for TranscoderService.ListJobs.
+
+ Iterating over this object will yield results and
+ resolve additional pages automatically.
+
+ """
+ # Create or coerce a protobuf request object.
+ # Sanity check: If we got a request object, we should *not* have
+ # gotten any keyword arguments that map to the request.
+ has_flattened_params = any([parent])
+ if request is not None and has_flattened_params:
+ raise ValueError(
+ "If the `request` argument is set, then none of "
+ "the individual field arguments should be set."
+ )
+
+ # Minor optimization to avoid making a copy if the user passes
+ # in a services.ListJobsRequest.
+ # There's no risk of modifying the input as we've already verified
+ # there are no flattened fields.
+ if not isinstance(request, services.ListJobsRequest):
+ request = services.ListJobsRequest(request)
+ # If we have keyword arguments corresponding to fields on the
+ # request, apply these.
+ if parent is not None:
+ request.parent = parent
+
+ # Wrap the RPC method; this adds retry and timeout information,
+ # and friendly error handling.
+ rpc = self._transport._wrapped_methods[self._transport.list_jobs]
+
+ # Certain fields should be provided within the metadata header;
+ # add these here.
+ metadata = tuple(metadata) + (
+ gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)),
+ )
+
+ # Send the request.
+ response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,)
+
+ # This method is paged; wrap the response in a pager, which provides
+ # an `__iter__` convenience method.
+ response = pagers.ListJobsPager(
+ method=rpc, request=request, response=response, metadata=metadata,
+ )
+
+ # Done; return the response.
+ return response
+
+ def get_job(
+ self,
+ request: services.GetJobRequest = None,
+ *,
+ name: str = None,
+ retry: retries.Retry = gapic_v1.method.DEFAULT,
+ timeout: float = None,
+ metadata: Sequence[Tuple[str, str]] = (),
+ ) -> resources.Job:
+ r"""Returns the job data.
+
+ Args:
+ request (google.cloud.video.transcoder_v1.types.GetJobRequest):
+ The request object. Request message for
+ `TranscoderService.GetJob`.
+ name (str):
+ Required. The name of the job to retrieve. Format:
+ ``projects/{project}/locations/{location}/jobs/{job}``
+
+ This corresponds to the ``name`` field
+ on the ``request`` instance; if ``request`` is provided, this
+ should not be set.
+ retry (google.api_core.retry.Retry): Designation of what errors, if any,
+ should be retried.
+ timeout (float): The timeout for this request.
+ metadata (Sequence[Tuple[str, str]]): Strings which should be
+ sent along with the request as metadata.
+
+ Returns:
+ google.cloud.video.transcoder_v1.types.Job:
+ Transcoding job resource.
+ """
+ # Create or coerce a protobuf request object.
+ # Sanity check: If we got a request object, we should *not* have
+ # gotten any keyword arguments that map to the request.
+ has_flattened_params = any([name])
+ if request is not None and has_flattened_params:
+ raise ValueError(
+ "If the `request` argument is set, then none of "
+ "the individual field arguments should be set."
+ )
+
+ # Minor optimization to avoid making a copy if the user passes
+ # in a services.GetJobRequest.
+ # There's no risk of modifying the input as we've already verified
+ # there are no flattened fields.
+ if not isinstance(request, services.GetJobRequest):
+ request = services.GetJobRequest(request)
+ # If we have keyword arguments corresponding to fields on the
+ # request, apply these.
+ if name is not None:
+ request.name = name
+
+ # Wrap the RPC method; this adds retry and timeout information,
+ # and friendly error handling.
+ rpc = self._transport._wrapped_methods[self._transport.get_job]
+
+ # Certain fields should be provided within the metadata header;
+ # add these here.
+ metadata = tuple(metadata) + (
+ gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),
+ )
+
+ # Send the request.
+ response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,)
+
+ # Done; return the response.
+ return response
+
+ def delete_job(
+ self,
+ request: services.DeleteJobRequest = None,
+ *,
+ name: str = None,
+ retry: retries.Retry = gapic_v1.method.DEFAULT,
+ timeout: float = None,
+ metadata: Sequence[Tuple[str, str]] = (),
+ ) -> None:
+ r"""Deletes a job.
+
+ Args:
+ request (google.cloud.video.transcoder_v1.types.DeleteJobRequest):
+ The request object. Request message for
+ `TranscoderService.DeleteJob`.
+ name (str):
+ Required. The name of the job to delete. Format:
+ ``projects/{project}/locations/{location}/jobs/{job}``
+
+ This corresponds to the ``name`` field
+ on the ``request`` instance; if ``request`` is provided, this
+ should not be set.
+ retry (google.api_core.retry.Retry): Designation of what errors, if any,
+ should be retried.
+ timeout (float): The timeout for this request.
+ metadata (Sequence[Tuple[str, str]]): Strings which should be
+ sent along with the request as metadata.
+ """
+ # Create or coerce a protobuf request object.
+ # Sanity check: If we got a request object, we should *not* have
+ # gotten any keyword arguments that map to the request.
+ has_flattened_params = any([name])
+ if request is not None and has_flattened_params:
+ raise ValueError(
+ "If the `request` argument is set, then none of "
+ "the individual field arguments should be set."
+ )
+
+ # Minor optimization to avoid making a copy if the user passes
+ # in a services.DeleteJobRequest.
+ # There's no risk of modifying the input as we've already verified
+ # there are no flattened fields.
+ if not isinstance(request, services.DeleteJobRequest):
+ request = services.DeleteJobRequest(request)
+ # If we have keyword arguments corresponding to fields on the
+ # request, apply these.
+ if name is not None:
+ request.name = name
+
+ # Wrap the RPC method; this adds retry and timeout information,
+ # and friendly error handling.
+ rpc = self._transport._wrapped_methods[self._transport.delete_job]
+
+ # Certain fields should be provided within the metadata header;
+ # add these here.
+ metadata = tuple(metadata) + (
+ gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),
+ )
+
+ # Send the request.
+ rpc(
+ request, retry=retry, timeout=timeout, metadata=metadata,
+ )
+
+ def create_job_template(
+ self,
+ request: services.CreateJobTemplateRequest = None,
+ *,
+ parent: str = None,
+ job_template: resources.JobTemplate = None,
+ job_template_id: str = None,
+ retry: retries.Retry = gapic_v1.method.DEFAULT,
+ timeout: float = None,
+ metadata: Sequence[Tuple[str, str]] = (),
+ ) -> resources.JobTemplate:
+ r"""Creates a job template in the specified region.
+
+ Args:
+ request (google.cloud.video.transcoder_v1.types.CreateJobTemplateRequest):
+ The request object. Request message for
+ `TranscoderService.CreateJobTemplate`.
+ parent (str):
+ Required. The parent location to create this job
+ template. Format:
+ ``projects/{project}/locations/{location}``
+
+ This corresponds to the ``parent`` field
+ on the ``request`` instance; if ``request`` is provided, this
+ should not be set.
+ job_template (google.cloud.video.transcoder_v1.types.JobTemplate):
+ Required. Parameters for creating job
+ template.
+
+ This corresponds to the ``job_template`` field
+ on the ``request`` instance; if ``request`` is provided, this
+ should not be set.
+ job_template_id (str):
+ Required. The ID to use for the job template, which will
+ become the final component of the job template's
+ resource name.
+
+ This value should be 4-63 characters, and valid
+ characters must match the regular expression
+ ``[a-zA-Z][a-zA-Z0-9_-]*``.
+
+ This corresponds to the ``job_template_id`` field
+ on the ``request`` instance; if ``request`` is provided, this
+ should not be set.
+ retry (google.api_core.retry.Retry): Designation of what errors, if any,
+ should be retried.
+ timeout (float): The timeout for this request.
+ metadata (Sequence[Tuple[str, str]]): Strings which should be
+ sent along with the request as metadata.
+
+ Returns:
+ google.cloud.video.transcoder_v1.types.JobTemplate:
+ Transcoding job template resource.
+ """
+ # Create or coerce a protobuf request object.
+ # Sanity check: If we got a request object, we should *not* have
+ # gotten any keyword arguments that map to the request.
+ has_flattened_params = any([parent, job_template, job_template_id])
+ if request is not None and has_flattened_params:
+ raise ValueError(
+ "If the `request` argument is set, then none of "
+ "the individual field arguments should be set."
+ )
+
+ # Minor optimization to avoid making a copy if the user passes
+ # in a services.CreateJobTemplateRequest.
+ # There's no risk of modifying the input as we've already verified
+ # there are no flattened fields.
+ if not isinstance(request, services.CreateJobTemplateRequest):
+ request = services.CreateJobTemplateRequest(request)
+ # If we have keyword arguments corresponding to fields on the
+ # request, apply these.
+ if parent is not None:
+ request.parent = parent
+ if job_template is not None:
+ request.job_template = job_template
+ if job_template_id is not None:
+ request.job_template_id = job_template_id
+
+ # Wrap the RPC method; this adds retry and timeout information,
+ # and friendly error handling.
+ rpc = self._transport._wrapped_methods[self._transport.create_job_template]
+
+ # Certain fields should be provided within the metadata header;
+ # add these here.
+ metadata = tuple(metadata) + (
+ gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)),
+ )
+
+ # Send the request.
+ response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,)
+
+ # Done; return the response.
+ return response
+
+ def list_job_templates(
+ self,
+ request: services.ListJobTemplatesRequest = None,
+ *,
+ parent: str = None,
+ retry: retries.Retry = gapic_v1.method.DEFAULT,
+ timeout: float = None,
+ metadata: Sequence[Tuple[str, str]] = (),
+ ) -> pagers.ListJobTemplatesPager:
+ r"""Lists job templates in the specified region.
+
+ Args:
+ request (google.cloud.video.transcoder_v1.types.ListJobTemplatesRequest):
+ The request object. Request message for
+ `TranscoderService.ListJobTemplates`.
+ parent (str):
+ Required. The parent location from which to retrieve the
+ collection of job templates. Format:
+ ``projects/{project}/locations/{location}``
+
+ This corresponds to the ``parent`` field
+ on the ``request`` instance; if ``request`` is provided, this
+ should not be set.
+ retry (google.api_core.retry.Retry): Designation of what errors, if any,
+ should be retried.
+ timeout (float): The timeout for this request.
+ metadata (Sequence[Tuple[str, str]]): Strings which should be
+ sent along with the request as metadata.
+
+ Returns:
+ google.cloud.video.transcoder_v1.services.transcoder_service.pagers.ListJobTemplatesPager:
+ Response message for TranscoderService.ListJobTemplates.
+
+ Iterating over this object will yield results and
+ resolve additional pages automatically.
+
+ """
+ # Create or coerce a protobuf request object.
+ # Sanity check: If we got a request object, we should *not* have
+ # gotten any keyword arguments that map to the request.
+ has_flattened_params = any([parent])
+ if request is not None and has_flattened_params:
+ raise ValueError(
+ "If the `request` argument is set, then none of "
+ "the individual field arguments should be set."
+ )
+
+ # Minor optimization to avoid making a copy if the user passes
+ # in a services.ListJobTemplatesRequest.
+ # There's no risk of modifying the input as we've already verified
+ # there are no flattened fields.
+ if not isinstance(request, services.ListJobTemplatesRequest):
+ request = services.ListJobTemplatesRequest(request)
+ # If we have keyword arguments corresponding to fields on the
+ # request, apply these.
+ if parent is not None:
+ request.parent = parent
+
+ # Wrap the RPC method; this adds retry and timeout information,
+ # and friendly error handling.
+ rpc = self._transport._wrapped_methods[self._transport.list_job_templates]
+
+ # Certain fields should be provided within the metadata header;
+ # add these here.
+ metadata = tuple(metadata) + (
+ gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)),
+ )
+
+ # Send the request.
+ response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,)
+
+ # This method is paged; wrap the response in a pager, which provides
+ # an `__iter__` convenience method.
+ response = pagers.ListJobTemplatesPager(
+ method=rpc, request=request, response=response, metadata=metadata,
+ )
+
+ # Done; return the response.
+ return response
+
+ def get_job_template(
+ self,
+ request: services.GetJobTemplateRequest = None,
+ *,
+ name: str = None,
+ retry: retries.Retry = gapic_v1.method.DEFAULT,
+ timeout: float = None,
+ metadata: Sequence[Tuple[str, str]] = (),
+ ) -> resources.JobTemplate:
+ r"""Returns the job template data.
+
+ Args:
+ request (google.cloud.video.transcoder_v1.types.GetJobTemplateRequest):
+ The request object. Request message for
+ `TranscoderService.GetJobTemplate`.
+ name (str):
+ Required. The name of the job template to retrieve.
+ Format:
+ ``projects/{project}/locations/{location}/jobTemplates/{job_template}``
+
+ This corresponds to the ``name`` field
+ on the ``request`` instance; if ``request`` is provided, this
+ should not be set.
+ retry (google.api_core.retry.Retry): Designation of what errors, if any,
+ should be retried.
+ timeout (float): The timeout for this request.
+ metadata (Sequence[Tuple[str, str]]): Strings which should be
+ sent along with the request as metadata.
+
+ Returns:
+ google.cloud.video.transcoder_v1.types.JobTemplate:
+ Transcoding job template resource.
+ """
+ # Create or coerce a protobuf request object.
+ # Sanity check: If we got a request object, we should *not* have
+ # gotten any keyword arguments that map to the request.
+ has_flattened_params = any([name])
+ if request is not None and has_flattened_params:
+ raise ValueError(
+ "If the `request` argument is set, then none of "
+ "the individual field arguments should be set."
+ )
+
+ # Minor optimization to avoid making a copy if the user passes
+ # in a services.GetJobTemplateRequest.
+ # There's no risk of modifying the input as we've already verified
+ # there are no flattened fields.
+ if not isinstance(request, services.GetJobTemplateRequest):
+ request = services.GetJobTemplateRequest(request)
+ # If we have keyword arguments corresponding to fields on the
+ # request, apply these.
+ if name is not None:
+ request.name = name
+
+ # Wrap the RPC method; this adds retry and timeout information,
+ # and friendly error handling.
+ rpc = self._transport._wrapped_methods[self._transport.get_job_template]
+
+ # Certain fields should be provided within the metadata header;
+ # add these here.
+ metadata = tuple(metadata) + (
+ gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),
+ )
+
+ # Send the request.
+ response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,)
+
+ # Done; return the response.
+ return response
+
+ def delete_job_template(
+ self,
+ request: services.DeleteJobTemplateRequest = None,
+ *,
+ name: str = None,
+ retry: retries.Retry = gapic_v1.method.DEFAULT,
+ timeout: float = None,
+ metadata: Sequence[Tuple[str, str]] = (),
+ ) -> None:
+ r"""Deletes a job template.
+
+ Args:
+ request (google.cloud.video.transcoder_v1.types.DeleteJobTemplateRequest):
+ The request object. Request message for
+ `TranscoderService.DeleteJobTemplate`.
+ name (str):
+ Required. The name of the job template to delete.
+ ``projects/{project}/locations/{location}/jobTemplates/{job_template}``
+
+ This corresponds to the ``name`` field
+ on the ``request`` instance; if ``request`` is provided, this
+ should not be set.
+ retry (google.api_core.retry.Retry): Designation of what errors, if any,
+ should be retried.
+ timeout (float): The timeout for this request.
+ metadata (Sequence[Tuple[str, str]]): Strings which should be
+ sent along with the request as metadata.
+ """
+ # Create or coerce a protobuf request object.
+ # Sanity check: If we got a request object, we should *not* have
+ # gotten any keyword arguments that map to the request.
+ has_flattened_params = any([name])
+ if request is not None and has_flattened_params:
+ raise ValueError(
+ "If the `request` argument is set, then none of "
+ "the individual field arguments should be set."
+ )
+
+ # Minor optimization to avoid making a copy if the user passes
+ # in a services.DeleteJobTemplateRequest.
+ # There's no risk of modifying the input as we've already verified
+ # there are no flattened fields.
+ if not isinstance(request, services.DeleteJobTemplateRequest):
+ request = services.DeleteJobTemplateRequest(request)
+ # If we have keyword arguments corresponding to fields on the
+ # request, apply these.
+ if name is not None:
+ request.name = name
+
+ # Wrap the RPC method; this adds retry and timeout information,
+ # and friendly error handling.
+ rpc = self._transport._wrapped_methods[self._transport.delete_job_template]
+
+ # Certain fields should be provided within the metadata header;
+ # add these here.
+ metadata = tuple(metadata) + (
+ gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)),
+ )
+
+ # Send the request.
+ rpc(
+ request, retry=retry, timeout=timeout, metadata=metadata,
+ )
+
+
+try:
+ DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(
+ gapic_version=pkg_resources.get_distribution(
+ "google-cloud-video-transcoder",
+ ).version,
+ )
+except pkg_resources.DistributionNotFound:
+ DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo()
+
+
+__all__ = ("TranscoderServiceClient",)
diff --git a/google/cloud/video/transcoder_v1/services/transcoder_service/pagers.py b/google/cloud/video/transcoder_v1/services/transcoder_service/pagers.py
new file mode 100644
index 0000000..2df2cf4
--- /dev/null
+++ b/google/cloud/video/transcoder_v1/services/transcoder_service/pagers.py
@@ -0,0 +1,284 @@
+# -*- coding: utf-8 -*-
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+from typing import (
+ Any,
+ AsyncIterable,
+ Awaitable,
+ Callable,
+ Iterable,
+ Sequence,
+ Tuple,
+ Optional,
+)
+
+from google.cloud.video.transcoder_v1.types import resources
+from google.cloud.video.transcoder_v1.types import services
+
+
+class ListJobsPager:
+ """A pager for iterating through ``list_jobs`` requests.
+
+ This class thinly wraps an initial
+ :class:`google.cloud.video.transcoder_v1.types.ListJobsResponse` object, and
+ provides an ``__iter__`` method to iterate through its
+ ``jobs`` field.
+
+ If there are more pages, the ``__iter__`` method will make additional
+ ``ListJobs`` requests and continue to iterate
+ through the ``jobs`` field on the
+ corresponding responses.
+
+ All the usual :class:`google.cloud.video.transcoder_v1.types.ListJobsResponse`
+ attributes are available on the pager. If multiple requests are made, only
+ the most recent response is retained, and thus used for attribute lookup.
+ """
+
+ def __init__(
+ self,
+ method: Callable[..., services.ListJobsResponse],
+ request: services.ListJobsRequest,
+ response: services.ListJobsResponse,
+ *,
+ metadata: Sequence[Tuple[str, str]] = ()
+ ):
+ """Instantiate the pager.
+
+ Args:
+ method (Callable): The method that was originally called, and
+ which instantiated this pager.
+ request (google.cloud.video.transcoder_v1.types.ListJobsRequest):
+ The initial request object.
+ response (google.cloud.video.transcoder_v1.types.ListJobsResponse):
+ The initial response object.
+ metadata (Sequence[Tuple[str, str]]): Strings which should be
+ sent along with the request as metadata.
+ """
+ self._method = method
+ self._request = services.ListJobsRequest(request)
+ self._response = response
+ self._metadata = metadata
+
+ def __getattr__(self, name: str) -> Any:
+ return getattr(self._response, name)
+
+ @property
+ def pages(self) -> Iterable[services.ListJobsResponse]:
+ yield self._response
+ while self._response.next_page_token:
+ self._request.page_token = self._response.next_page_token
+ self._response = self._method(self._request, metadata=self._metadata)
+ yield self._response
+
+ def __iter__(self) -> Iterable[resources.Job]:
+ for page in self.pages:
+ yield from page.jobs
+
+ def __repr__(self) -> str:
+ return "{0}<{1!r}>".format(self.__class__.__name__, self._response)
+
+
+class ListJobsAsyncPager:
+ """A pager for iterating through ``list_jobs`` requests.
+
+ This class thinly wraps an initial
+ :class:`google.cloud.video.transcoder_v1.types.ListJobsResponse` object, and
+ provides an ``__aiter__`` method to iterate through its
+ ``jobs`` field.
+
+ If there are more pages, the ``__aiter__`` method will make additional
+ ``ListJobs`` requests and continue to iterate
+ through the ``jobs`` field on the
+ corresponding responses.
+
+ All the usual :class:`google.cloud.video.transcoder_v1.types.ListJobsResponse`
+ attributes are available on the pager. If multiple requests are made, only
+ the most recent response is retained, and thus used for attribute lookup.
+ """
+
+ def __init__(
+ self,
+ method: Callable[..., Awaitable[services.ListJobsResponse]],
+ request: services.ListJobsRequest,
+ response: services.ListJobsResponse,
+ *,
+ metadata: Sequence[Tuple[str, str]] = ()
+ ):
+ """Instantiates the pager.
+
+ Args:
+ method (Callable): The method that was originally called, and
+ which instantiated this pager.
+ request (google.cloud.video.transcoder_v1.types.ListJobsRequest):
+ The initial request object.
+ response (google.cloud.video.transcoder_v1.types.ListJobsResponse):
+ The initial response object.
+ metadata (Sequence[Tuple[str, str]]): Strings which should be
+ sent along with the request as metadata.
+ """
+ self._method = method
+ self._request = services.ListJobsRequest(request)
+ self._response = response
+ self._metadata = metadata
+
+ def __getattr__(self, name: str) -> Any:
+ return getattr(self._response, name)
+
+ @property
+ async def pages(self) -> AsyncIterable[services.ListJobsResponse]:
+ yield self._response
+ while self._response.next_page_token:
+ self._request.page_token = self._response.next_page_token
+ self._response = await self._method(self._request, metadata=self._metadata)
+ yield self._response
+
+ def __aiter__(self) -> AsyncIterable[resources.Job]:
+ async def async_generator():
+ async for page in self.pages:
+ for response in page.jobs:
+ yield response
+
+ return async_generator()
+
+ def __repr__(self) -> str:
+ return "{0}<{1!r}>".format(self.__class__.__name__, self._response)
+
+
+class ListJobTemplatesPager:
+ """A pager for iterating through ``list_job_templates`` requests.
+
+ This class thinly wraps an initial
+ :class:`google.cloud.video.transcoder_v1.types.ListJobTemplatesResponse` object, and
+ provides an ``__iter__`` method to iterate through its
+ ``job_templates`` field.
+
+ If there are more pages, the ``__iter__`` method will make additional
+ ``ListJobTemplates`` requests and continue to iterate
+ through the ``job_templates`` field on the
+ corresponding responses.
+
+ All the usual :class:`google.cloud.video.transcoder_v1.types.ListJobTemplatesResponse`
+ attributes are available on the pager. If multiple requests are made, only
+ the most recent response is retained, and thus used for attribute lookup.
+ """
+
+ def __init__(
+ self,
+ method: Callable[..., services.ListJobTemplatesResponse],
+ request: services.ListJobTemplatesRequest,
+ response: services.ListJobTemplatesResponse,
+ *,
+ metadata: Sequence[Tuple[str, str]] = ()
+ ):
+ """Instantiate the pager.
+
+ Args:
+ method (Callable): The method that was originally called, and
+ which instantiated this pager.
+ request (google.cloud.video.transcoder_v1.types.ListJobTemplatesRequest):
+ The initial request object.
+ response (google.cloud.video.transcoder_v1.types.ListJobTemplatesResponse):
+ The initial response object.
+ metadata (Sequence[Tuple[str, str]]): Strings which should be
+ sent along with the request as metadata.
+ """
+ self._method = method
+ self._request = services.ListJobTemplatesRequest(request)
+ self._response = response
+ self._metadata = metadata
+
+ def __getattr__(self, name: str) -> Any:
+ return getattr(self._response, name)
+
+ @property
+ def pages(self) -> Iterable[services.ListJobTemplatesResponse]:
+ yield self._response
+ while self._response.next_page_token:
+ self._request.page_token = self._response.next_page_token
+ self._response = self._method(self._request, metadata=self._metadata)
+ yield self._response
+
+ def __iter__(self) -> Iterable[resources.JobTemplate]:
+ for page in self.pages:
+ yield from page.job_templates
+
+ def __repr__(self) -> str:
+ return "{0}<{1!r}>".format(self.__class__.__name__, self._response)
+
+
+class ListJobTemplatesAsyncPager:
+ """A pager for iterating through ``list_job_templates`` requests.
+
+ This class thinly wraps an initial
+ :class:`google.cloud.video.transcoder_v1.types.ListJobTemplatesResponse` object, and
+ provides an ``__aiter__`` method to iterate through its
+ ``job_templates`` field.
+
+ If there are more pages, the ``__aiter__`` method will make additional
+ ``ListJobTemplates`` requests and continue to iterate
+ through the ``job_templates`` field on the
+ corresponding responses.
+
+ All the usual :class:`google.cloud.video.transcoder_v1.types.ListJobTemplatesResponse`
+ attributes are available on the pager. If multiple requests are made, only
+ the most recent response is retained, and thus used for attribute lookup.
+ """
+
+ def __init__(
+ self,
+ method: Callable[..., Awaitable[services.ListJobTemplatesResponse]],
+ request: services.ListJobTemplatesRequest,
+ response: services.ListJobTemplatesResponse,
+ *,
+ metadata: Sequence[Tuple[str, str]] = ()
+ ):
+ """Instantiates the pager.
+
+ Args:
+ method (Callable): The method that was originally called, and
+ which instantiated this pager.
+ request (google.cloud.video.transcoder_v1.types.ListJobTemplatesRequest):
+ The initial request object.
+ response (google.cloud.video.transcoder_v1.types.ListJobTemplatesResponse):
+ The initial response object.
+ metadata (Sequence[Tuple[str, str]]): Strings which should be
+ sent along with the request as metadata.
+ """
+ self._method = method
+ self._request = services.ListJobTemplatesRequest(request)
+ self._response = response
+ self._metadata = metadata
+
+ def __getattr__(self, name: str) -> Any:
+ return getattr(self._response, name)
+
+ @property
+ async def pages(self) -> AsyncIterable[services.ListJobTemplatesResponse]:
+ yield self._response
+ while self._response.next_page_token:
+ self._request.page_token = self._response.next_page_token
+ self._response = await self._method(self._request, metadata=self._metadata)
+ yield self._response
+
+ def __aiter__(self) -> AsyncIterable[resources.JobTemplate]:
+ async def async_generator():
+ async for page in self.pages:
+ for response in page.job_templates:
+ yield response
+
+ return async_generator()
+
+ def __repr__(self) -> str:
+ return "{0}<{1!r}>".format(self.__class__.__name__, self._response)
diff --git a/google/cloud/video/transcoder_v1/services/transcoder_service/transports/__init__.py b/google/cloud/video/transcoder_v1/services/transcoder_service/transports/__init__.py
new file mode 100644
index 0000000..f7496c0
--- /dev/null
+++ b/google/cloud/video/transcoder_v1/services/transcoder_service/transports/__init__.py
@@ -0,0 +1,33 @@
+# -*- coding: utf-8 -*-
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+from collections import OrderedDict
+from typing import Dict, Type
+
+from .base import TranscoderServiceTransport
+from .grpc import TranscoderServiceGrpcTransport
+from .grpc_asyncio import TranscoderServiceGrpcAsyncIOTransport
+
+
+# Compile a registry of transports.
+_transport_registry = OrderedDict() # type: Dict[str, Type[TranscoderServiceTransport]]
+_transport_registry["grpc"] = TranscoderServiceGrpcTransport
+_transport_registry["grpc_asyncio"] = TranscoderServiceGrpcAsyncIOTransport
+
+__all__ = (
+ "TranscoderServiceTransport",
+ "TranscoderServiceGrpcTransport",
+ "TranscoderServiceGrpcAsyncIOTransport",
+)
diff --git a/google/cloud/video/transcoder_v1/services/transcoder_service/transports/base.py b/google/cloud/video/transcoder_v1/services/transcoder_service/transports/base.py
new file mode 100644
index 0000000..4cc020c
--- /dev/null
+++ b/google/cloud/video/transcoder_v1/services/transcoder_service/transports/base.py
@@ -0,0 +1,258 @@
+# -*- coding: utf-8 -*-
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+import abc
+from typing import Awaitable, Callable, Dict, Optional, Sequence, Union
+import packaging.version
+import pkg_resources
+
+import google.auth # type: ignore
+import google.api_core # type: ignore
+from google.api_core import exceptions as core_exceptions # type: ignore
+from google.api_core import gapic_v1 # type: ignore
+from google.api_core import retry as retries # type: ignore
+from google.auth import credentials as ga_credentials # type: ignore
+from google.oauth2 import service_account # type: ignore
+
+from google.cloud.video.transcoder_v1.types import resources
+from google.cloud.video.transcoder_v1.types import services
+from google.protobuf import empty_pb2 # type: ignore
+
+try:
+ DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(
+ gapic_version=pkg_resources.get_distribution(
+ "google-cloud-video-transcoder",
+ ).version,
+ )
+except pkg_resources.DistributionNotFound:
+ DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo()
+
+try:
+ # google.auth.__version__ was added in 1.26.0
+ _GOOGLE_AUTH_VERSION = google.auth.__version__
+except AttributeError:
+ try: # try pkg_resources if it is available
+ _GOOGLE_AUTH_VERSION = pkg_resources.get_distribution("google-auth").version
+ except pkg_resources.DistributionNotFound: # pragma: NO COVER
+ _GOOGLE_AUTH_VERSION = None
+
+
+class TranscoderServiceTransport(abc.ABC):
+ """Abstract transport class for TranscoderService."""
+
+ AUTH_SCOPES = ("https://www.googleapis.com/auth/cloud-platform",)
+
+ DEFAULT_HOST: str = "transcoder.googleapis.com"
+
+ def __init__(
+ self,
+ *,
+ host: str = DEFAULT_HOST,
+ credentials: ga_credentials.Credentials = None,
+ credentials_file: Optional[str] = None,
+ scopes: Optional[Sequence[str]] = None,
+ quota_project_id: Optional[str] = None,
+ client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
+ always_use_jwt_access: Optional[bool] = False,
+ **kwargs,
+ ) -> None:
+ """Instantiate the transport.
+
+ Args:
+ host (Optional[str]):
+ The hostname to connect to.
+ credentials (Optional[google.auth.credentials.Credentials]): The
+ authorization credentials to attach to requests. These
+ credentials identify the application to the service; if none
+ are specified, the client will attempt to ascertain the
+ credentials from the environment.
+ credentials_file (Optional[str]): A file with credentials that can
+ be loaded with :func:`google.auth.load_credentials_from_file`.
+ This argument is mutually exclusive with credentials.
+ scopes (Optional[Sequence[str]]): A list of scopes.
+ quota_project_id (Optional[str]): An optional project to use for billing
+ and quota.
+ client_info (google.api_core.gapic_v1.client_info.ClientInfo):
+ The client info used to send a user-agent string along with
+ API requests. If ``None``, then default info will be used.
+ Generally, you only need to set this if you're developing
+ your own client library.
+ always_use_jwt_access (Optional[bool]): Whether self signed JWT should
+ be used for service account credentials.
+ """
+ # Save the hostname. Default to port 443 (HTTPS) if none is specified.
+ if ":" not in host:
+ host += ":443"
+ self._host = host
+
+ scopes_kwargs = self._get_scopes_kwargs(self._host, scopes)
+
+ # Save the scopes.
+ self._scopes = scopes
+
+ # If no credentials are provided, then determine the appropriate
+ # defaults.
+ if credentials and credentials_file:
+ raise core_exceptions.DuplicateCredentialArgs(
+ "'credentials_file' and 'credentials' are mutually exclusive"
+ )
+
+ if credentials_file is not None:
+ credentials, _ = google.auth.load_credentials_from_file(
+ credentials_file, **scopes_kwargs, quota_project_id=quota_project_id
+ )
+
+ elif credentials is None:
+ credentials, _ = google.auth.default(
+ **scopes_kwargs, quota_project_id=quota_project_id
+ )
+
+ # If the credentials is service account credentials, then always try to use self signed JWT.
+ if (
+ always_use_jwt_access
+ and isinstance(credentials, service_account.Credentials)
+ and hasattr(service_account.Credentials, "with_always_use_jwt_access")
+ ):
+ credentials = credentials.with_always_use_jwt_access(True)
+
+ # Save the credentials.
+ self._credentials = credentials
+
+ # TODO(busunkim): This method is in the base transport
+ # to avoid duplicating code across the transport classes. These functions
+ # should be deleted once the minimum required versions of google-auth is increased.
+
+ # TODO: Remove this function once google-auth >= 1.25.0 is required
+ @classmethod
+ def _get_scopes_kwargs(
+ cls, host: str, scopes: Optional[Sequence[str]]
+ ) -> Dict[str, Optional[Sequence[str]]]:
+ """Returns scopes kwargs to pass to google-auth methods depending on the google-auth version"""
+
+ scopes_kwargs = {}
+
+ if _GOOGLE_AUTH_VERSION and (
+ packaging.version.parse(_GOOGLE_AUTH_VERSION)
+ >= packaging.version.parse("1.25.0")
+ ):
+ scopes_kwargs = {"scopes": scopes, "default_scopes": cls.AUTH_SCOPES}
+ else:
+ scopes_kwargs = {"scopes": scopes or cls.AUTH_SCOPES}
+
+ return scopes_kwargs
+
+ def _prep_wrapped_messages(self, client_info):
+ # Precompute the wrapped methods.
+ self._wrapped_methods = {
+ self.create_job: gapic_v1.method.wrap_method(
+ self.create_job, default_timeout=60.0, client_info=client_info,
+ ),
+ self.list_jobs: gapic_v1.method.wrap_method(
+ self.list_jobs, default_timeout=60.0, client_info=client_info,
+ ),
+ self.get_job: gapic_v1.method.wrap_method(
+ self.get_job, default_timeout=60.0, client_info=client_info,
+ ),
+ self.delete_job: gapic_v1.method.wrap_method(
+ self.delete_job, default_timeout=60.0, client_info=client_info,
+ ),
+ self.create_job_template: gapic_v1.method.wrap_method(
+ self.create_job_template, default_timeout=60.0, client_info=client_info,
+ ),
+ self.list_job_templates: gapic_v1.method.wrap_method(
+ self.list_job_templates, default_timeout=60.0, client_info=client_info,
+ ),
+ self.get_job_template: gapic_v1.method.wrap_method(
+ self.get_job_template, default_timeout=60.0, client_info=client_info,
+ ),
+ self.delete_job_template: gapic_v1.method.wrap_method(
+ self.delete_job_template, default_timeout=60.0, client_info=client_info,
+ ),
+ }
+
+ @property
+ def create_job(
+ self,
+ ) -> Callable[
+ [services.CreateJobRequest], Union[resources.Job, Awaitable[resources.Job]]
+ ]:
+ raise NotImplementedError()
+
+ @property
+ def list_jobs(
+ self,
+ ) -> Callable[
+ [services.ListJobsRequest],
+ Union[services.ListJobsResponse, Awaitable[services.ListJobsResponse]],
+ ]:
+ raise NotImplementedError()
+
+ @property
+ def get_job(
+ self,
+ ) -> Callable[
+ [services.GetJobRequest], Union[resources.Job, Awaitable[resources.Job]]
+ ]:
+ raise NotImplementedError()
+
+ @property
+ def delete_job(
+ self,
+ ) -> Callable[
+ [services.DeleteJobRequest], Union[empty_pb2.Empty, Awaitable[empty_pb2.Empty]]
+ ]:
+ raise NotImplementedError()
+
+ @property
+ def create_job_template(
+ self,
+ ) -> Callable[
+ [services.CreateJobTemplateRequest],
+ Union[resources.JobTemplate, Awaitable[resources.JobTemplate]],
+ ]:
+ raise NotImplementedError()
+
+ @property
+ def list_job_templates(
+ self,
+ ) -> Callable[
+ [services.ListJobTemplatesRequest],
+ Union[
+ services.ListJobTemplatesResponse,
+ Awaitable[services.ListJobTemplatesResponse],
+ ],
+ ]:
+ raise NotImplementedError()
+
+ @property
+ def get_job_template(
+ self,
+ ) -> Callable[
+ [services.GetJobTemplateRequest],
+ Union[resources.JobTemplate, Awaitable[resources.JobTemplate]],
+ ]:
+ raise NotImplementedError()
+
+ @property
+ def delete_job_template(
+ self,
+ ) -> Callable[
+ [services.DeleteJobTemplateRequest],
+ Union[empty_pb2.Empty, Awaitable[empty_pb2.Empty]],
+ ]:
+ raise NotImplementedError()
+
+
+__all__ = ("TranscoderServiceTransport",)
diff --git a/google/cloud/video/transcoder_v1/services/transcoder_service/transports/grpc.py b/google/cloud/video/transcoder_v1/services/transcoder_service/transports/grpc.py
new file mode 100644
index 0000000..eab5ac2
--- /dev/null
+++ b/google/cloud/video/transcoder_v1/services/transcoder_service/transports/grpc.py
@@ -0,0 +1,441 @@
+# -*- coding: utf-8 -*-
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+import warnings
+from typing import Callable, Dict, Optional, Sequence, Tuple, Union
+
+from google.api_core import grpc_helpers # type: ignore
+from google.api_core import gapic_v1 # type: ignore
+import google.auth # type: ignore
+from google.auth import credentials as ga_credentials # type: ignore
+from google.auth.transport.grpc import SslCredentials # type: ignore
+
+import grpc # type: ignore
+
+from google.cloud.video.transcoder_v1.types import resources
+from google.cloud.video.transcoder_v1.types import services
+from google.protobuf import empty_pb2 # type: ignore
+from .base import TranscoderServiceTransport, DEFAULT_CLIENT_INFO
+
+
+class TranscoderServiceGrpcTransport(TranscoderServiceTransport):
+ """gRPC backend transport for TranscoderService.
+
+ Using the Transcoder API, you can queue asynchronous jobs for
+ transcoding media into various output formats. Output formats
+ may include different streaming standards such as HTTP Live
+ Streaming (HLS) and Dynamic Adaptive Streaming over HTTP (DASH).
+ You can also customize jobs using advanced features such as
+ Digital Rights Management (DRM), audio equalization, content
+ concatenation, and digital ad-stitch ready content generation.
+
+ This class defines the same methods as the primary client, so the
+ primary client can load the underlying transport implementation
+ and call it.
+
+ It sends protocol buffers over the wire using gRPC (which is built on
+ top of HTTP/2); the ``grpcio`` package must be installed.
+ """
+
+ _stubs: Dict[str, Callable]
+
+ def __init__(
+ self,
+ *,
+ host: str = "transcoder.googleapis.com",
+ credentials: ga_credentials.Credentials = None,
+ credentials_file: str = None,
+ scopes: Sequence[str] = None,
+ channel: grpc.Channel = None,
+ api_mtls_endpoint: str = None,
+ client_cert_source: Callable[[], Tuple[bytes, bytes]] = None,
+ ssl_channel_credentials: grpc.ChannelCredentials = None,
+ client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None,
+ quota_project_id: Optional[str] = None,
+ client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
+ always_use_jwt_access: Optional[bool] = False,
+ ) -> None:
+ """Instantiate the transport.
+
+ Args:
+ host (Optional[str]):
+ The hostname to connect to.
+ credentials (Optional[google.auth.credentials.Credentials]): The
+ authorization credentials to attach to requests. These
+ credentials identify the application to the service; if none
+ are specified, the client will attempt to ascertain the
+ credentials from the environment.
+ This argument is ignored if ``channel`` is provided.
+ credentials_file (Optional[str]): A file with credentials that can
+ be loaded with :func:`google.auth.load_credentials_from_file`.
+ This argument is ignored if ``channel`` is provided.
+ scopes (Optional(Sequence[str])): A list of scopes. This argument is
+ ignored if ``channel`` is provided.
+ channel (Optional[grpc.Channel]): A ``Channel`` instance through
+ which to make calls.
+ api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint.
+ If provided, it overrides the ``host`` argument and tries to create
+ a mutual TLS channel with client SSL credentials from
+ ``client_cert_source`` or applicatin default SSL credentials.
+ client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]):
+ Deprecated. A callback to provide client SSL certificate bytes and
+ private key bytes, both in PEM format. It is ignored if
+ ``api_mtls_endpoint`` is None.
+ ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials
+ for grpc channel. It is ignored if ``channel`` is provided.
+ client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]):
+ A callback to provide client certificate bytes and private key bytes,
+ both in PEM format. It is used to configure mutual TLS channel. It is
+ ignored if ``channel`` or ``ssl_channel_credentials`` is provided.
+ quota_project_id (Optional[str]): An optional project to use for billing
+ and quota.
+ client_info (google.api_core.gapic_v1.client_info.ClientInfo):
+ The client info used to send a user-agent string along with
+ API requests. If ``None``, then default info will be used.
+ Generally, you only need to set this if you're developing
+ your own client library.
+ always_use_jwt_access (Optional[bool]): Whether self signed JWT should
+ be used for service account credentials.
+
+ Raises:
+ google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport
+ creation failed for any reason.
+ google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials``
+ and ``credentials_file`` are passed.
+ """
+ self._grpc_channel = None
+ self._ssl_channel_credentials = ssl_channel_credentials
+ self._stubs: Dict[str, Callable] = {}
+
+ if api_mtls_endpoint:
+ warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning)
+ if client_cert_source:
+ warnings.warn("client_cert_source is deprecated", DeprecationWarning)
+
+ if channel:
+ # Ignore credentials if a channel was passed.
+ credentials = False
+ # If a channel was explicitly provided, set it.
+ self._grpc_channel = channel
+ self._ssl_channel_credentials = None
+
+ else:
+ if api_mtls_endpoint:
+ host = api_mtls_endpoint
+
+ # Create SSL credentials with client_cert_source or application
+ # default SSL credentials.
+ if client_cert_source:
+ cert, key = client_cert_source()
+ self._ssl_channel_credentials = grpc.ssl_channel_credentials(
+ certificate_chain=cert, private_key=key
+ )
+ else:
+ self._ssl_channel_credentials = SslCredentials().ssl_credentials
+
+ else:
+ if client_cert_source_for_mtls and not ssl_channel_credentials:
+ cert, key = client_cert_source_for_mtls()
+ self._ssl_channel_credentials = grpc.ssl_channel_credentials(
+ certificate_chain=cert, private_key=key
+ )
+
+ # The base transport sets the host, credentials and scopes
+ super().__init__(
+ host=host,
+ credentials=credentials,
+ credentials_file=credentials_file,
+ scopes=scopes,
+ quota_project_id=quota_project_id,
+ client_info=client_info,
+ always_use_jwt_access=always_use_jwt_access,
+ )
+
+ if not self._grpc_channel:
+ self._grpc_channel = type(self).create_channel(
+ self._host,
+ credentials=self._credentials,
+ credentials_file=credentials_file,
+ scopes=self._scopes,
+ ssl_credentials=self._ssl_channel_credentials,
+ quota_project_id=quota_project_id,
+ options=[
+ ("grpc.max_send_message_length", -1),
+ ("grpc.max_receive_message_length", -1),
+ ],
+ )
+
+ # Wrap messages. This must be done after self._grpc_channel exists
+ self._prep_wrapped_messages(client_info)
+
+ @classmethod
+ def create_channel(
+ cls,
+ host: str = "transcoder.googleapis.com",
+ credentials: ga_credentials.Credentials = None,
+ credentials_file: str = None,
+ scopes: Optional[Sequence[str]] = None,
+ quota_project_id: Optional[str] = None,
+ **kwargs,
+ ) -> grpc.Channel:
+ """Create and return a gRPC channel object.
+ Args:
+ host (Optional[str]): The host for the channel to use.
+ credentials (Optional[~.Credentials]): The
+ authorization credentials to attach to requests. These
+ credentials identify this application to the service. If
+ none are specified, the client will attempt to ascertain
+ the credentials from the environment.
+ credentials_file (Optional[str]): A file with credentials that can
+ be loaded with :func:`google.auth.load_credentials_from_file`.
+ This argument is mutually exclusive with credentials.
+ scopes (Optional[Sequence[str]]): A optional list of scopes needed for this
+ service. These are only used when credentials are not specified and
+ are passed to :func:`google.auth.default`.
+ quota_project_id (Optional[str]): An optional project to use for billing
+ and quota.
+ kwargs (Optional[dict]): Keyword arguments, which are passed to the
+ channel creation.
+ Returns:
+ grpc.Channel: A gRPC channel object.
+
+ Raises:
+ google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials``
+ and ``credentials_file`` are passed.
+ """
+
+ return grpc_helpers.create_channel(
+ host,
+ credentials=credentials,
+ credentials_file=credentials_file,
+ quota_project_id=quota_project_id,
+ default_scopes=cls.AUTH_SCOPES,
+ scopes=scopes,
+ default_host=cls.DEFAULT_HOST,
+ **kwargs,
+ )
+
+ @property
+ def grpc_channel(self) -> grpc.Channel:
+ """Return the channel designed to connect to this service.
+ """
+ return self._grpc_channel
+
+ @property
+ def create_job(self) -> Callable[[services.CreateJobRequest], resources.Job]:
+ r"""Return a callable for the create job method over gRPC.
+
+ Creates a job in the specified region.
+
+ Returns:
+ Callable[[~.CreateJobRequest],
+ ~.Job]:
+ A function that, when called, will call the underlying RPC
+ on the server.
+ """
+ # Generate a "stub function" on-the-fly which will actually make
+ # the request.
+ # gRPC handles serialization and deserialization, so we just need
+ # to pass in the functions for each.
+ if "create_job" not in self._stubs:
+ self._stubs["create_job"] = self.grpc_channel.unary_unary(
+ "/google.cloud.video.transcoder.v1.TranscoderService/CreateJob",
+ request_serializer=services.CreateJobRequest.serialize,
+ response_deserializer=resources.Job.deserialize,
+ )
+ return self._stubs["create_job"]
+
+ @property
+ def list_jobs(
+ self,
+ ) -> Callable[[services.ListJobsRequest], services.ListJobsResponse]:
+ r"""Return a callable for the list jobs method over gRPC.
+
+ Lists jobs in the specified region.
+
+ Returns:
+ Callable[[~.ListJobsRequest],
+ ~.ListJobsResponse]:
+ A function that, when called, will call the underlying RPC
+ on the server.
+ """
+ # Generate a "stub function" on-the-fly which will actually make
+ # the request.
+ # gRPC handles serialization and deserialization, so we just need
+ # to pass in the functions for each.
+ if "list_jobs" not in self._stubs:
+ self._stubs["list_jobs"] = self.grpc_channel.unary_unary(
+ "/google.cloud.video.transcoder.v1.TranscoderService/ListJobs",
+ request_serializer=services.ListJobsRequest.serialize,
+ response_deserializer=services.ListJobsResponse.deserialize,
+ )
+ return self._stubs["list_jobs"]
+
+ @property
+ def get_job(self) -> Callable[[services.GetJobRequest], resources.Job]:
+ r"""Return a callable for the get job method over gRPC.
+
+ Returns the job data.
+
+ Returns:
+ Callable[[~.GetJobRequest],
+ ~.Job]:
+ A function that, when called, will call the underlying RPC
+ on the server.
+ """
+ # Generate a "stub function" on-the-fly which will actually make
+ # the request.
+ # gRPC handles serialization and deserialization, so we just need
+ # to pass in the functions for each.
+ if "get_job" not in self._stubs:
+ self._stubs["get_job"] = self.grpc_channel.unary_unary(
+ "/google.cloud.video.transcoder.v1.TranscoderService/GetJob",
+ request_serializer=services.GetJobRequest.serialize,
+ response_deserializer=resources.Job.deserialize,
+ )
+ return self._stubs["get_job"]
+
+ @property
+ def delete_job(self) -> Callable[[services.DeleteJobRequest], empty_pb2.Empty]:
+ r"""Return a callable for the delete job method over gRPC.
+
+ Deletes a job.
+
+ Returns:
+ Callable[[~.DeleteJobRequest],
+ ~.Empty]:
+ A function that, when called, will call the underlying RPC
+ on the server.
+ """
+ # Generate a "stub function" on-the-fly which will actually make
+ # the request.
+ # gRPC handles serialization and deserialization, so we just need
+ # to pass in the functions for each.
+ if "delete_job" not in self._stubs:
+ self._stubs["delete_job"] = self.grpc_channel.unary_unary(
+ "/google.cloud.video.transcoder.v1.TranscoderService/DeleteJob",
+ request_serializer=services.DeleteJobRequest.serialize,
+ response_deserializer=empty_pb2.Empty.FromString,
+ )
+ return self._stubs["delete_job"]
+
+ @property
+ def create_job_template(
+ self,
+ ) -> Callable[[services.CreateJobTemplateRequest], resources.JobTemplate]:
+ r"""Return a callable for the create job template method over gRPC.
+
+ Creates a job template in the specified region.
+
+ Returns:
+ Callable[[~.CreateJobTemplateRequest],
+ ~.JobTemplate]:
+ A function that, when called, will call the underlying RPC
+ on the server.
+ """
+ # Generate a "stub function" on-the-fly which will actually make
+ # the request.
+ # gRPC handles serialization and deserialization, so we just need
+ # to pass in the functions for each.
+ if "create_job_template" not in self._stubs:
+ self._stubs["create_job_template"] = self.grpc_channel.unary_unary(
+ "/google.cloud.video.transcoder.v1.TranscoderService/CreateJobTemplate",
+ request_serializer=services.CreateJobTemplateRequest.serialize,
+ response_deserializer=resources.JobTemplate.deserialize,
+ )
+ return self._stubs["create_job_template"]
+
+ @property
+ def list_job_templates(
+ self,
+ ) -> Callable[
+ [services.ListJobTemplatesRequest], services.ListJobTemplatesResponse
+ ]:
+ r"""Return a callable for the list job templates method over gRPC.
+
+ Lists job templates in the specified region.
+
+ Returns:
+ Callable[[~.ListJobTemplatesRequest],
+ ~.ListJobTemplatesResponse]:
+ A function that, when called, will call the underlying RPC
+ on the server.
+ """
+ # Generate a "stub function" on-the-fly which will actually make
+ # the request.
+ # gRPC handles serialization and deserialization, so we just need
+ # to pass in the functions for each.
+ if "list_job_templates" not in self._stubs:
+ self._stubs["list_job_templates"] = self.grpc_channel.unary_unary(
+ "/google.cloud.video.transcoder.v1.TranscoderService/ListJobTemplates",
+ request_serializer=services.ListJobTemplatesRequest.serialize,
+ response_deserializer=services.ListJobTemplatesResponse.deserialize,
+ )
+ return self._stubs["list_job_templates"]
+
+ @property
+ def get_job_template(
+ self,
+ ) -> Callable[[services.GetJobTemplateRequest], resources.JobTemplate]:
+ r"""Return a callable for the get job template method over gRPC.
+
+ Returns the job template data.
+
+ Returns:
+ Callable[[~.GetJobTemplateRequest],
+ ~.JobTemplate]:
+ A function that, when called, will call the underlying RPC
+ on the server.
+ """
+ # Generate a "stub function" on-the-fly which will actually make
+ # the request.
+ # gRPC handles serialization and deserialization, so we just need
+ # to pass in the functions for each.
+ if "get_job_template" not in self._stubs:
+ self._stubs["get_job_template"] = self.grpc_channel.unary_unary(
+ "/google.cloud.video.transcoder.v1.TranscoderService/GetJobTemplate",
+ request_serializer=services.GetJobTemplateRequest.serialize,
+ response_deserializer=resources.JobTemplate.deserialize,
+ )
+ return self._stubs["get_job_template"]
+
+ @property
+ def delete_job_template(
+ self,
+ ) -> Callable[[services.DeleteJobTemplateRequest], empty_pb2.Empty]:
+ r"""Return a callable for the delete job template method over gRPC.
+
+ Deletes a job template.
+
+ Returns:
+ Callable[[~.DeleteJobTemplateRequest],
+ ~.Empty]:
+ A function that, when called, will call the underlying RPC
+ on the server.
+ """
+ # Generate a "stub function" on-the-fly which will actually make
+ # the request.
+ # gRPC handles serialization and deserialization, so we just need
+ # to pass in the functions for each.
+ if "delete_job_template" not in self._stubs:
+ self._stubs["delete_job_template"] = self.grpc_channel.unary_unary(
+ "/google.cloud.video.transcoder.v1.TranscoderService/DeleteJobTemplate",
+ request_serializer=services.DeleteJobTemplateRequest.serialize,
+ response_deserializer=empty_pb2.Empty.FromString,
+ )
+ return self._stubs["delete_job_template"]
+
+
+__all__ = ("TranscoderServiceGrpcTransport",)
diff --git a/google/cloud/video/transcoder_v1/services/transcoder_service/transports/grpc_asyncio.py b/google/cloud/video/transcoder_v1/services/transcoder_service/transports/grpc_asyncio.py
new file mode 100644
index 0000000..972e4e4
--- /dev/null
+++ b/google/cloud/video/transcoder_v1/services/transcoder_service/transports/grpc_asyncio.py
@@ -0,0 +1,450 @@
+# -*- coding: utf-8 -*-
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+import warnings
+from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union
+
+from google.api_core import gapic_v1 # type: ignore
+from google.api_core import grpc_helpers_async # type: ignore
+from google.auth import credentials as ga_credentials # type: ignore
+from google.auth.transport.grpc import SslCredentials # type: ignore
+import packaging.version
+
+import grpc # type: ignore
+from grpc.experimental import aio # type: ignore
+
+from google.cloud.video.transcoder_v1.types import resources
+from google.cloud.video.transcoder_v1.types import services
+from google.protobuf import empty_pb2 # type: ignore
+from .base import TranscoderServiceTransport, DEFAULT_CLIENT_INFO
+from .grpc import TranscoderServiceGrpcTransport
+
+
+class TranscoderServiceGrpcAsyncIOTransport(TranscoderServiceTransport):
+ """gRPC AsyncIO backend transport for TranscoderService.
+
+ Using the Transcoder API, you can queue asynchronous jobs for
+ transcoding media into various output formats. Output formats
+ may include different streaming standards such as HTTP Live
+ Streaming (HLS) and Dynamic Adaptive Streaming over HTTP (DASH).
+ You can also customize jobs using advanced features such as
+ Digital Rights Management (DRM), audio equalization, content
+ concatenation, and digital ad-stitch ready content generation.
+
+ This class defines the same methods as the primary client, so the
+ primary client can load the underlying transport implementation
+ and call it.
+
+ It sends protocol buffers over the wire using gRPC (which is built on
+ top of HTTP/2); the ``grpcio`` package must be installed.
+ """
+
+ _grpc_channel: aio.Channel
+ _stubs: Dict[str, Callable] = {}
+
+ @classmethod
+ def create_channel(
+ cls,
+ host: str = "transcoder.googleapis.com",
+ credentials: ga_credentials.Credentials = None,
+ credentials_file: Optional[str] = None,
+ scopes: Optional[Sequence[str]] = None,
+ quota_project_id: Optional[str] = None,
+ **kwargs,
+ ) -> aio.Channel:
+ """Create and return a gRPC AsyncIO channel object.
+ Args:
+ host (Optional[str]): The host for the channel to use.
+ credentials (Optional[~.Credentials]): The
+ authorization credentials to attach to requests. These
+ credentials identify this application to the service. If
+ none are specified, the client will attempt to ascertain
+ the credentials from the environment.
+ credentials_file (Optional[str]): A file with credentials that can
+ be loaded with :func:`google.auth.load_credentials_from_file`.
+ This argument is ignored if ``channel`` is provided.
+ scopes (Optional[Sequence[str]]): A optional list of scopes needed for this
+ service. These are only used when credentials are not specified and
+ are passed to :func:`google.auth.default`.
+ quota_project_id (Optional[str]): An optional project to use for billing
+ and quota.
+ kwargs (Optional[dict]): Keyword arguments, which are passed to the
+ channel creation.
+ Returns:
+ aio.Channel: A gRPC AsyncIO channel object.
+ """
+
+ return grpc_helpers_async.create_channel(
+ host,
+ credentials=credentials,
+ credentials_file=credentials_file,
+ quota_project_id=quota_project_id,
+ default_scopes=cls.AUTH_SCOPES,
+ scopes=scopes,
+ default_host=cls.DEFAULT_HOST,
+ **kwargs,
+ )
+
+ def __init__(
+ self,
+ *,
+ host: str = "transcoder.googleapis.com",
+ credentials: ga_credentials.Credentials = None,
+ credentials_file: Optional[str] = None,
+ scopes: Optional[Sequence[str]] = None,
+ channel: aio.Channel = None,
+ api_mtls_endpoint: str = None,
+ client_cert_source: Callable[[], Tuple[bytes, bytes]] = None,
+ ssl_channel_credentials: grpc.ChannelCredentials = None,
+ client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None,
+ quota_project_id=None,
+ client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
+ always_use_jwt_access: Optional[bool] = False,
+ ) -> None:
+ """Instantiate the transport.
+
+ Args:
+ host (Optional[str]):
+ The hostname to connect to.
+ credentials (Optional[google.auth.credentials.Credentials]): The
+ authorization credentials to attach to requests. These
+ credentials identify the application to the service; if none
+ are specified, the client will attempt to ascertain the
+ credentials from the environment.
+ This argument is ignored if ``channel`` is provided.
+ credentials_file (Optional[str]): A file with credentials that can
+ be loaded with :func:`google.auth.load_credentials_from_file`.
+ This argument is ignored if ``channel`` is provided.
+ scopes (Optional[Sequence[str]]): A optional list of scopes needed for this
+ service. These are only used when credentials are not specified and
+ are passed to :func:`google.auth.default`.
+ channel (Optional[aio.Channel]): A ``Channel`` instance through
+ which to make calls.
+ api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint.
+ If provided, it overrides the ``host`` argument and tries to create
+ a mutual TLS channel with client SSL credentials from
+ ``client_cert_source`` or applicatin default SSL credentials.
+ client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]):
+ Deprecated. A callback to provide client SSL certificate bytes and
+ private key bytes, both in PEM format. It is ignored if
+ ``api_mtls_endpoint`` is None.
+ ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials
+ for grpc channel. It is ignored if ``channel`` is provided.
+ client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]):
+ A callback to provide client certificate bytes and private key bytes,
+ both in PEM format. It is used to configure mutual TLS channel. It is
+ ignored if ``channel`` or ``ssl_channel_credentials`` is provided.
+ quota_project_id (Optional[str]): An optional project to use for billing
+ and quota.
+ client_info (google.api_core.gapic_v1.client_info.ClientInfo):
+ The client info used to send a user-agent string along with
+ API requests. If ``None``, then default info will be used.
+ Generally, you only need to set this if you're developing
+ your own client library.
+ always_use_jwt_access (Optional[bool]): Whether self signed JWT should
+ be used for service account credentials.
+
+ Raises:
+ google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport
+ creation failed for any reason.
+ google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials``
+ and ``credentials_file`` are passed.
+ """
+ self._grpc_channel = None
+ self._ssl_channel_credentials = ssl_channel_credentials
+ self._stubs: Dict[str, Callable] = {}
+
+ if api_mtls_endpoint:
+ warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning)
+ if client_cert_source:
+ warnings.warn("client_cert_source is deprecated", DeprecationWarning)
+
+ if channel:
+ # Ignore credentials if a channel was passed.
+ credentials = False
+ # If a channel was explicitly provided, set it.
+ self._grpc_channel = channel
+ self._ssl_channel_credentials = None
+ else:
+ if api_mtls_endpoint:
+ host = api_mtls_endpoint
+
+ # Create SSL credentials with client_cert_source or application
+ # default SSL credentials.
+ if client_cert_source:
+ cert, key = client_cert_source()
+ self._ssl_channel_credentials = grpc.ssl_channel_credentials(
+ certificate_chain=cert, private_key=key
+ )
+ else:
+ self._ssl_channel_credentials = SslCredentials().ssl_credentials
+
+ else:
+ if client_cert_source_for_mtls and not ssl_channel_credentials:
+ cert, key = client_cert_source_for_mtls()
+ self._ssl_channel_credentials = grpc.ssl_channel_credentials(
+ certificate_chain=cert, private_key=key
+ )
+
+ # The base transport sets the host, credentials and scopes
+ super().__init__(
+ host=host,
+ credentials=credentials,
+ credentials_file=credentials_file,
+ scopes=scopes,
+ quota_project_id=quota_project_id,
+ client_info=client_info,
+ always_use_jwt_access=always_use_jwt_access,
+ )
+
+ if not self._grpc_channel:
+ self._grpc_channel = type(self).create_channel(
+ self._host,
+ credentials=self._credentials,
+ credentials_file=credentials_file,
+ scopes=self._scopes,
+ ssl_credentials=self._ssl_channel_credentials,
+ quota_project_id=quota_project_id,
+ options=[
+ ("grpc.max_send_message_length", -1),
+ ("grpc.max_receive_message_length", -1),
+ ],
+ )
+
+ # Wrap messages. This must be done after self._grpc_channel exists
+ self._prep_wrapped_messages(client_info)
+
+ @property
+ def grpc_channel(self) -> aio.Channel:
+ """Create the channel designed to connect to this service.
+
+ This property caches on the instance; repeated calls return
+ the same channel.
+ """
+ # Return the channel from cache.
+ return self._grpc_channel
+
+ @property
+ def create_job(
+ self,
+ ) -> Callable[[services.CreateJobRequest], Awaitable[resources.Job]]:
+ r"""Return a callable for the create job method over gRPC.
+
+ Creates a job in the specified region.
+
+ Returns:
+ Callable[[~.CreateJobRequest],
+ Awaitable[~.Job]]:
+ A function that, when called, will call the underlying RPC
+ on the server.
+ """
+ # Generate a "stub function" on-the-fly which will actually make
+ # the request.
+ # gRPC handles serialization and deserialization, so we just need
+ # to pass in the functions for each.
+ if "create_job" not in self._stubs:
+ self._stubs["create_job"] = self.grpc_channel.unary_unary(
+ "/google.cloud.video.transcoder.v1.TranscoderService/CreateJob",
+ request_serializer=services.CreateJobRequest.serialize,
+ response_deserializer=resources.Job.deserialize,
+ )
+ return self._stubs["create_job"]
+
+ @property
+ def list_jobs(
+ self,
+ ) -> Callable[[services.ListJobsRequest], Awaitable[services.ListJobsResponse]]:
+ r"""Return a callable for the list jobs method over gRPC.
+
+ Lists jobs in the specified region.
+
+ Returns:
+ Callable[[~.ListJobsRequest],
+ Awaitable[~.ListJobsResponse]]:
+ A function that, when called, will call the underlying RPC
+ on the server.
+ """
+ # Generate a "stub function" on-the-fly which will actually make
+ # the request.
+ # gRPC handles serialization and deserialization, so we just need
+ # to pass in the functions for each.
+ if "list_jobs" not in self._stubs:
+ self._stubs["list_jobs"] = self.grpc_channel.unary_unary(
+ "/google.cloud.video.transcoder.v1.TranscoderService/ListJobs",
+ request_serializer=services.ListJobsRequest.serialize,
+ response_deserializer=services.ListJobsResponse.deserialize,
+ )
+ return self._stubs["list_jobs"]
+
+ @property
+ def get_job(self) -> Callable[[services.GetJobRequest], Awaitable[resources.Job]]:
+ r"""Return a callable for the get job method over gRPC.
+
+ Returns the job data.
+
+ Returns:
+ Callable[[~.GetJobRequest],
+ Awaitable[~.Job]]:
+ A function that, when called, will call the underlying RPC
+ on the server.
+ """
+ # Generate a "stub function" on-the-fly which will actually make
+ # the request.
+ # gRPC handles serialization and deserialization, so we just need
+ # to pass in the functions for each.
+ if "get_job" not in self._stubs:
+ self._stubs["get_job"] = self.grpc_channel.unary_unary(
+ "/google.cloud.video.transcoder.v1.TranscoderService/GetJob",
+ request_serializer=services.GetJobRequest.serialize,
+ response_deserializer=resources.Job.deserialize,
+ )
+ return self._stubs["get_job"]
+
+ @property
+ def delete_job(
+ self,
+ ) -> Callable[[services.DeleteJobRequest], Awaitable[empty_pb2.Empty]]:
+ r"""Return a callable for the delete job method over gRPC.
+
+ Deletes a job.
+
+ Returns:
+ Callable[[~.DeleteJobRequest],
+ Awaitable[~.Empty]]:
+ A function that, when called, will call the underlying RPC
+ on the server.
+ """
+ # Generate a "stub function" on-the-fly which will actually make
+ # the request.
+ # gRPC handles serialization and deserialization, so we just need
+ # to pass in the functions for each.
+ if "delete_job" not in self._stubs:
+ self._stubs["delete_job"] = self.grpc_channel.unary_unary(
+ "/google.cloud.video.transcoder.v1.TranscoderService/DeleteJob",
+ request_serializer=services.DeleteJobRequest.serialize,
+ response_deserializer=empty_pb2.Empty.FromString,
+ )
+ return self._stubs["delete_job"]
+
+ @property
+ def create_job_template(
+ self,
+ ) -> Callable[
+ [services.CreateJobTemplateRequest], Awaitable[resources.JobTemplate]
+ ]:
+ r"""Return a callable for the create job template method over gRPC.
+
+ Creates a job template in the specified region.
+
+ Returns:
+ Callable[[~.CreateJobTemplateRequest],
+ Awaitable[~.JobTemplate]]:
+ A function that, when called, will call the underlying RPC
+ on the server.
+ """
+ # Generate a "stub function" on-the-fly which will actually make
+ # the request.
+ # gRPC handles serialization and deserialization, so we just need
+ # to pass in the functions for each.
+ if "create_job_template" not in self._stubs:
+ self._stubs["create_job_template"] = self.grpc_channel.unary_unary(
+ "/google.cloud.video.transcoder.v1.TranscoderService/CreateJobTemplate",
+ request_serializer=services.CreateJobTemplateRequest.serialize,
+ response_deserializer=resources.JobTemplate.deserialize,
+ )
+ return self._stubs["create_job_template"]
+
+ @property
+ def list_job_templates(
+ self,
+ ) -> Callable[
+ [services.ListJobTemplatesRequest], Awaitable[services.ListJobTemplatesResponse]
+ ]:
+ r"""Return a callable for the list job templates method over gRPC.
+
+ Lists job templates in the specified region.
+
+ Returns:
+ Callable[[~.ListJobTemplatesRequest],
+ Awaitable[~.ListJobTemplatesResponse]]:
+ A function that, when called, will call the underlying RPC
+ on the server.
+ """
+ # Generate a "stub function" on-the-fly which will actually make
+ # the request.
+ # gRPC handles serialization and deserialization, so we just need
+ # to pass in the functions for each.
+ if "list_job_templates" not in self._stubs:
+ self._stubs["list_job_templates"] = self.grpc_channel.unary_unary(
+ "/google.cloud.video.transcoder.v1.TranscoderService/ListJobTemplates",
+ request_serializer=services.ListJobTemplatesRequest.serialize,
+ response_deserializer=services.ListJobTemplatesResponse.deserialize,
+ )
+ return self._stubs["list_job_templates"]
+
+ @property
+ def get_job_template(
+ self,
+ ) -> Callable[[services.GetJobTemplateRequest], Awaitable[resources.JobTemplate]]:
+ r"""Return a callable for the get job template method over gRPC.
+
+ Returns the job template data.
+
+ Returns:
+ Callable[[~.GetJobTemplateRequest],
+ Awaitable[~.JobTemplate]]:
+ A function that, when called, will call the underlying RPC
+ on the server.
+ """
+ # Generate a "stub function" on-the-fly which will actually make
+ # the request.
+ # gRPC handles serialization and deserialization, so we just need
+ # to pass in the functions for each.
+ if "get_job_template" not in self._stubs:
+ self._stubs["get_job_template"] = self.grpc_channel.unary_unary(
+ "/google.cloud.video.transcoder.v1.TranscoderService/GetJobTemplate",
+ request_serializer=services.GetJobTemplateRequest.serialize,
+ response_deserializer=resources.JobTemplate.deserialize,
+ )
+ return self._stubs["get_job_template"]
+
+ @property
+ def delete_job_template(
+ self,
+ ) -> Callable[[services.DeleteJobTemplateRequest], Awaitable[empty_pb2.Empty]]:
+ r"""Return a callable for the delete job template method over gRPC.
+
+ Deletes a job template.
+
+ Returns:
+ Callable[[~.DeleteJobTemplateRequest],
+ Awaitable[~.Empty]]:
+ A function that, when called, will call the underlying RPC
+ on the server.
+ """
+ # Generate a "stub function" on-the-fly which will actually make
+ # the request.
+ # gRPC handles serialization and deserialization, so we just need
+ # to pass in the functions for each.
+ if "delete_job_template" not in self._stubs:
+ self._stubs["delete_job_template"] = self.grpc_channel.unary_unary(
+ "/google.cloud.video.transcoder.v1.TranscoderService/DeleteJobTemplate",
+ request_serializer=services.DeleteJobTemplateRequest.serialize,
+ response_deserializer=empty_pb2.Empty.FromString,
+ )
+ return self._stubs["delete_job_template"]
+
+
+__all__ = ("TranscoderServiceGrpcAsyncIOTransport",)
diff --git a/google/cloud/video/transcoder_v1/types/__init__.py b/google/cloud/video/transcoder_v1/types/__init__.py
new file mode 100644
index 0000000..4aa88f8
--- /dev/null
+++ b/google/cloud/video/transcoder_v1/types/__init__.py
@@ -0,0 +1,80 @@
+# -*- coding: utf-8 -*-
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+from .resources import (
+ AdBreak,
+ AudioStream,
+ EditAtom,
+ ElementaryStream,
+ Encryption,
+ Input,
+ Job,
+ JobConfig,
+ JobTemplate,
+ Manifest,
+ MuxStream,
+ Output,
+ Overlay,
+ PreprocessingConfig,
+ PubsubDestination,
+ SegmentSettings,
+ SpriteSheet,
+ TextStream,
+ VideoStream,
+)
+from .services import (
+ CreateJobRequest,
+ CreateJobTemplateRequest,
+ DeleteJobRequest,
+ DeleteJobTemplateRequest,
+ GetJobRequest,
+ GetJobTemplateRequest,
+ ListJobsRequest,
+ ListJobsResponse,
+ ListJobTemplatesRequest,
+ ListJobTemplatesResponse,
+)
+
+__all__ = (
+ "AdBreak",
+ "AudioStream",
+ "EditAtom",
+ "ElementaryStream",
+ "Encryption",
+ "Input",
+ "Job",
+ "JobConfig",
+ "JobTemplate",
+ "Manifest",
+ "MuxStream",
+ "Output",
+ "Overlay",
+ "PreprocessingConfig",
+ "PubsubDestination",
+ "SegmentSettings",
+ "SpriteSheet",
+ "TextStream",
+ "VideoStream",
+ "CreateJobRequest",
+ "CreateJobTemplateRequest",
+ "DeleteJobRequest",
+ "DeleteJobTemplateRequest",
+ "GetJobRequest",
+ "GetJobTemplateRequest",
+ "ListJobsRequest",
+ "ListJobsResponse",
+ "ListJobTemplatesRequest",
+ "ListJobTemplatesResponse",
+)
diff --git a/google/cloud/video/transcoder_v1/types/resources.py b/google/cloud/video/transcoder_v1/types/resources.py
new file mode 100644
index 0000000..5b655c2
--- /dev/null
+++ b/google/cloud/video/transcoder_v1/types/resources.py
@@ -0,0 +1,1409 @@
+# -*- coding: utf-8 -*-
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+import proto # type: ignore
+
+from google.protobuf import duration_pb2 # type: ignore
+from google.protobuf import timestamp_pb2 # type: ignore
+from google.rpc import status_pb2 # type: ignore
+
+
+__protobuf__ = proto.module(
+ package="google.cloud.video.transcoder.v1",
+ manifest={
+ "Job",
+ "JobTemplate",
+ "JobConfig",
+ "Input",
+ "Output",
+ "EditAtom",
+ "AdBreak",
+ "ElementaryStream",
+ "MuxStream",
+ "Manifest",
+ "PubsubDestination",
+ "SpriteSheet",
+ "Overlay",
+ "PreprocessingConfig",
+ "VideoStream",
+ "AudioStream",
+ "TextStream",
+ "SegmentSettings",
+ "Encryption",
+ },
+)
+
+
+class Job(proto.Message):
+ r"""Transcoding job resource.
+ Attributes:
+ name (str):
+ The resource name of the job. Format:
+ ``projects/{project}/locations/{location}/jobs/{job}``
+ input_uri (str):
+ Input only. Specify the ``input_uri`` to populate empty
+ ``uri`` fields in each element of ``Job.config.inputs`` or
+ ``JobTemplate.config.inputs`` when using template. URI of
+ the media. Input files must be at least 5 seconds in
+ duration and stored in Cloud Storage (for example,
+ ``gs://bucket/inputs/file.mp4``).
+ output_uri (str):
+ Input only. Specify the ``output_uri`` to populate an empty
+ ``Job.config.output.uri`` or
+ ``JobTemplate.config.output.uri`` when using template. URI
+ for the output file(s). For example,
+ ``gs://my-bucket/outputs/``.
+ template_id (str):
+ Input only. Specify the ``template_id`` to use for
+ populating ``Job.config``. The default is ``preset/web-hd``.
+
+ Preset Transcoder templates:
+
+ - ``preset/{preset_id}``
+
+ - User defined JobTemplate: ``{job_template_id}``
+ config (google.cloud.video.transcoder_v1.types.JobConfig):
+ The configuration for this job.
+ state (google.cloud.video.transcoder_v1.types.Job.ProcessingState):
+ Output only. The current state of the job.
+ create_time (google.protobuf.timestamp_pb2.Timestamp):
+ Output only. The time the job was created.
+ start_time (google.protobuf.timestamp_pb2.Timestamp):
+ Output only. The time the transcoding
+ started.
+ end_time (google.protobuf.timestamp_pb2.Timestamp):
+ Output only. The time the transcoding
+ finished.
+ ttl_after_completion_days (int):
+ Job time to live value in days, which will be
+ effective after job completion. Job should be
+ deleted automatically after the given TTL. Enter
+ a value between 1 and 90. The default is 30.
+ error (google.rpc.status_pb2.Status):
+ Output only. An error object that describes the reason for
+ the failure. This property is always present when ``state``
+ is ``FAILED``.
+ """
+
+ class ProcessingState(proto.Enum):
+ r"""The current state of the job."""
+ PROCESSING_STATE_UNSPECIFIED = 0
+ PENDING = 1
+ RUNNING = 2
+ SUCCEEDED = 3
+ FAILED = 4
+
+ name = proto.Field(proto.STRING, number=1,)
+ input_uri = proto.Field(proto.STRING, number=2,)
+ output_uri = proto.Field(proto.STRING, number=3,)
+ template_id = proto.Field(proto.STRING, number=4, oneof="job_config",)
+ config = proto.Field(
+ proto.MESSAGE, number=5, oneof="job_config", message="JobConfig",
+ )
+ state = proto.Field(proto.ENUM, number=8, enum=ProcessingState,)
+ create_time = proto.Field(
+ proto.MESSAGE, number=12, message=timestamp_pb2.Timestamp,
+ )
+ start_time = proto.Field(proto.MESSAGE, number=13, message=timestamp_pb2.Timestamp,)
+ end_time = proto.Field(proto.MESSAGE, number=14, message=timestamp_pb2.Timestamp,)
+ ttl_after_completion_days = proto.Field(proto.INT32, number=15,)
+ error = proto.Field(proto.MESSAGE, number=17, message=status_pb2.Status,)
+
+
+class JobTemplate(proto.Message):
+ r"""Transcoding job template resource.
+ Attributes:
+ name (str):
+ The resource name of the job template. Format:
+ ``projects/{project}/locations/{location}/jobTemplates/{job_template}``
+ config (google.cloud.video.transcoder_v1.types.JobConfig):
+ The configuration for this template.
+ """
+
+ name = proto.Field(proto.STRING, number=1,)
+ config = proto.Field(proto.MESSAGE, number=2, message="JobConfig",)
+
+
+class JobConfig(proto.Message):
+ r"""Job configuration
+ Attributes:
+ inputs (Sequence[google.cloud.video.transcoder_v1.types.Input]):
+ List of input assets stored in Cloud Storage.
+ edit_list (Sequence[google.cloud.video.transcoder_v1.types.EditAtom]):
+ List of ``Edit atom``\ s. Defines the ultimate timeline of
+ the resulting file or manifest.
+ elementary_streams (Sequence[google.cloud.video.transcoder_v1.types.ElementaryStream]):
+ List of elementary streams.
+ mux_streams (Sequence[google.cloud.video.transcoder_v1.types.MuxStream]):
+ List of multiplexing settings for output
+ streams.
+ manifests (Sequence[google.cloud.video.transcoder_v1.types.Manifest]):
+ List of output manifests.
+ output (google.cloud.video.transcoder_v1.types.Output):
+ Output configuration.
+ ad_breaks (Sequence[google.cloud.video.transcoder_v1.types.AdBreak]):
+ List of ad breaks. Specifies where to insert
+ ad break tags in the output manifests.
+ pubsub_destination (google.cloud.video.transcoder_v1.types.PubsubDestination):
+ Destination on Pub/Sub.
+ sprite_sheets (Sequence[google.cloud.video.transcoder_v1.types.SpriteSheet]):
+ List of output sprite sheets.
+ overlays (Sequence[google.cloud.video.transcoder_v1.types.Overlay]):
+ List of overlays on the output video, in
+ descending Z-order.
+ """
+
+ inputs = proto.RepeatedField(proto.MESSAGE, number=1, message="Input",)
+ edit_list = proto.RepeatedField(proto.MESSAGE, number=2, message="EditAtom",)
+ elementary_streams = proto.RepeatedField(
+ proto.MESSAGE, number=3, message="ElementaryStream",
+ )
+ mux_streams = proto.RepeatedField(proto.MESSAGE, number=4, message="MuxStream",)
+ manifests = proto.RepeatedField(proto.MESSAGE, number=5, message="Manifest",)
+ output = proto.Field(proto.MESSAGE, number=6, message="Output",)
+ ad_breaks = proto.RepeatedField(proto.MESSAGE, number=7, message="AdBreak",)
+ pubsub_destination = proto.Field(
+ proto.MESSAGE, number=8, message="PubsubDestination",
+ )
+ sprite_sheets = proto.RepeatedField(proto.MESSAGE, number=9, message="SpriteSheet",)
+ overlays = proto.RepeatedField(proto.MESSAGE, number=10, message="Overlay",)
+
+
+class Input(proto.Message):
+ r"""Input asset.
+ Attributes:
+ key (str):
+ A unique key for this input. Must be
+ specified when using advanced mapping and edit
+ lists.
+ uri (str):
+ URI of the media. Input files must be at least 5 seconds in
+ duration and stored in Cloud Storage (for example,
+ ``gs://bucket/inputs/file.mp4``). If empty, the value will
+ be populated from ``Job.input_uri``.
+ preprocessing_config (google.cloud.video.transcoder_v1.types.PreprocessingConfig):
+ Preprocessing configurations.
+ """
+
+ key = proto.Field(proto.STRING, number=1,)
+ uri = proto.Field(proto.STRING, number=2,)
+ preprocessing_config = proto.Field(
+ proto.MESSAGE, number=3, message="PreprocessingConfig",
+ )
+
+
+class Output(proto.Message):
+ r"""Location of output file(s) in a Cloud Storage bucket.
+ Attributes:
+ uri (str):
+ URI for the output file(s). For example,
+ ``gs://my-bucket/outputs/``. If empty the value is populated
+ from ``Job.output_uri``.
+ """
+
+ uri = proto.Field(proto.STRING, number=1,)
+
+
+class EditAtom(proto.Message):
+ r"""Edit atom.
+ Attributes:
+ key (str):
+ A unique key for this atom. Must be specified
+ when using advanced mapping.
+ inputs (Sequence[str]):
+ List of ``Input.key``\ s identifying files that should be
+ used in this atom. The listed ``inputs`` must have the same
+ timeline.
+ end_time_offset (google.protobuf.duration_pb2.Duration):
+ End time in seconds for the atom, relative to the input file
+ timeline. When ``end_time_offset`` is not specified, the
+ ``inputs`` are used until the end of the atom.
+ start_time_offset (google.protobuf.duration_pb2.Duration):
+ Start time in seconds for the atom, relative to the input
+ file timeline. The default is ``0s``.
+ """
+
+ key = proto.Field(proto.STRING, number=1,)
+ inputs = proto.RepeatedField(proto.STRING, number=2,)
+ end_time_offset = proto.Field(
+ proto.MESSAGE, number=3, message=duration_pb2.Duration,
+ )
+ start_time_offset = proto.Field(
+ proto.MESSAGE, number=4, message=duration_pb2.Duration,
+ )
+
+
+class AdBreak(proto.Message):
+ r"""Ad break.
+ Attributes:
+ start_time_offset (google.protobuf.duration_pb2.Duration):
+ Start time in seconds for the ad break, relative to the
+ output file timeline. The default is ``0s``.
+ """
+
+ start_time_offset = proto.Field(
+ proto.MESSAGE, number=1, message=duration_pb2.Duration,
+ )
+
+
+class ElementaryStream(proto.Message):
+ r"""Encoding of an input file such as an audio, video, or text
+ track. Elementary streams must be packaged before
+ mapping and sharing between different output formats.
+
+ Attributes:
+ key (str):
+ A unique key for this elementary stream.
+ video_stream (google.cloud.video.transcoder_v1.types.VideoStream):
+ Encoding of a video stream.
+ audio_stream (google.cloud.video.transcoder_v1.types.AudioStream):
+ Encoding of an audio stream.
+ text_stream (google.cloud.video.transcoder_v1.types.TextStream):
+ Encoding of a text stream. For example,
+ closed captions or subtitles.
+ """
+
+ key = proto.Field(proto.STRING, number=4,)
+ video_stream = proto.Field(
+ proto.MESSAGE, number=1, oneof="elementary_stream", message="VideoStream",
+ )
+ audio_stream = proto.Field(
+ proto.MESSAGE, number=2, oneof="elementary_stream", message="AudioStream",
+ )
+ text_stream = proto.Field(
+ proto.MESSAGE, number=3, oneof="elementary_stream", message="TextStream",
+ )
+
+
+class MuxStream(proto.Message):
+ r"""Multiplexing settings for output stream.
+ Attributes:
+ key (str):
+ A unique key for this multiplexed stream. HLS media
+ manifests will be named ``MuxStream.key`` with the
+ ``".m3u8"`` extension suffix.
+ file_name (str):
+ The name of the generated file. The default is
+ ``MuxStream.key`` with the extension suffix corresponding to
+ the ``MuxStream.container``.
+
+ Individual segments also have an incremental 10-digit
+ zero-padded suffix starting from 0 before the extension,
+ such as ``"mux_stream0000000123.ts"``.
+ container (str):
+ The container format. The default is ``"mp4"``
+
+ Supported container formats:
+
+ - 'ts'
+ - 'fmp4'- the corresponding file extension is ``".m4s"``
+ - 'mp4'
+ - 'vtt'
+ elementary_streams (Sequence[str]):
+ List of ``ElementaryStream.key``\ s multiplexed in this
+ stream.
+ segment_settings (google.cloud.video.transcoder_v1.types.SegmentSettings):
+ Segment settings for ``"ts"``, ``"fmp4"`` and ``"vtt"``.
+ encryption (google.cloud.video.transcoder_v1.types.Encryption):
+ Encryption settings.
+ """
+
+ key = proto.Field(proto.STRING, number=1,)
+ file_name = proto.Field(proto.STRING, number=2,)
+ container = proto.Field(proto.STRING, number=3,)
+ elementary_streams = proto.RepeatedField(proto.STRING, number=4,)
+ segment_settings = proto.Field(proto.MESSAGE, number=5, message="SegmentSettings",)
+ encryption = proto.Field(proto.MESSAGE, number=6, message="Encryption",)
+
+
+class Manifest(proto.Message):
+ r"""Manifest configuration.
+ Attributes:
+ file_name (str):
+ The name of the generated file. The default is
+ ``"manifest"`` with the extension suffix corresponding to
+ the ``Manifest.type``.
+ type_ (google.cloud.video.transcoder_v1.types.Manifest.ManifestType):
+ Required. Type of the manifest, can be "HLS"
+ or "DASH".
+ mux_streams (Sequence[str]):
+ Required. List of user given ``MuxStream.key``\ s that
+ should appear in this manifest.
+
+ When ``Manifest.type`` is ``HLS``, a media manifest with
+ name ``MuxStream.key`` and ``.m3u8`` extension is generated
+ for each element of the ``Manifest.mux_streams``.
+ """
+
+ class ManifestType(proto.Enum):
+ r"""The manifest type can be either ``"HLS"`` or ``"DASH"``."""
+ MANIFEST_TYPE_UNSPECIFIED = 0
+ HLS = 1
+ DASH = 2
+
+ file_name = proto.Field(proto.STRING, number=1,)
+ type_ = proto.Field(proto.ENUM, number=2, enum=ManifestType,)
+ mux_streams = proto.RepeatedField(proto.STRING, number=3,)
+
+
+class PubsubDestination(proto.Message):
+ r"""A Pub/Sub destination.
+ Attributes:
+ topic (str):
+ The name of the Pub/Sub topic to publish job completion
+ notification to. For example:
+ ``projects/{project}/topics/{topic}``.
+ """
+
+ topic = proto.Field(proto.STRING, number=1,)
+
+
+class SpriteSheet(proto.Message):
+ r"""Sprite sheet configuration.
+
+ Attributes:
+ format_ (str):
+ Format type. The default is ``"jpeg"``.
+
+ Supported formats:
+
+ - 'jpeg'
+ file_prefix (str):
+ Required. File name prefix for the generated sprite sheets.
+
+ Each sprite sheet has an incremental 10-digit zero-padded
+ suffix starting from 0 before the extension, such as
+ ``"sprite_sheet0000000123.jpeg"``.
+ sprite_width_pixels (int):
+ Required. The width of sprite in pixels. Must be an even
+ integer. To preserve the source aspect ratio, set the
+ [SpriteSheet.sprite_width_pixels][google.cloud.video.transcoder.v1.SpriteSheet.sprite_width_pixels]
+ field or the
+ [SpriteSheet.sprite_height_pixels][google.cloud.video.transcoder.v1.SpriteSheet.sprite_height_pixels]
+ field, but not both (the API will automatically calculate
+ the missing field).
+ sprite_height_pixels (int):
+ Required. The height of sprite in pixels. Must be an even
+ integer. To preserve the source aspect ratio, set the
+ [SpriteSheet.sprite_height_pixels][google.cloud.video.transcoder.v1.SpriteSheet.sprite_height_pixels]
+ field or the
+ [SpriteSheet.sprite_width_pixels][google.cloud.video.transcoder.v1.SpriteSheet.sprite_width_pixels]
+ field, but not both (the API will automatically calculate
+ the missing field).
+ column_count (int):
+ The maximum number of sprites per row in a
+ sprite sheet. The default is 0, which indicates
+ no maximum limit.
+ row_count (int):
+ The maximum number of rows per sprite sheet.
+ When the sprite sheet is full, a new sprite
+ sheet is created. The default is 0, which
+ indicates no maximum limit.
+ start_time_offset (google.protobuf.duration_pb2.Duration):
+ Start time in seconds, relative to the output file timeline.
+ Determines the first sprite to pick. The default is ``0s``.
+ end_time_offset (google.protobuf.duration_pb2.Duration):
+ End time in seconds, relative to the output file timeline.
+ When ``end_time_offset`` is not specified, the sprites are
+ generated until the end of the output file.
+ total_count (int):
+ Total number of sprites. Create the specified
+ number of sprites distributed evenly across the
+ timeline of the output media. The default is
+ 100.
+ interval (google.protobuf.duration_pb2.Duration):
+ Starting from ``0s``, create sprites at regular intervals.
+ Specify the interval value in seconds.
+ quality (int):
+ The quality of the generated sprite sheet.
+ Enter a value between 1 and 100, where 1 is the
+ lowest quality and 100 is the highest quality.
+ The default is 100. A high quality value
+ corresponds to a low image data compression
+ ratio.
+ """
+
+ format_ = proto.Field(proto.STRING, number=1,)
+ file_prefix = proto.Field(proto.STRING, number=2,)
+ sprite_width_pixels = proto.Field(proto.INT32, number=3,)
+ sprite_height_pixels = proto.Field(proto.INT32, number=4,)
+ column_count = proto.Field(proto.INT32, number=5,)
+ row_count = proto.Field(proto.INT32, number=6,)
+ start_time_offset = proto.Field(
+ proto.MESSAGE, number=7, message=duration_pb2.Duration,
+ )
+ end_time_offset = proto.Field(
+ proto.MESSAGE, number=8, message=duration_pb2.Duration,
+ )
+ total_count = proto.Field(proto.INT32, number=9, oneof="extraction_strategy",)
+ interval = proto.Field(
+ proto.MESSAGE,
+ number=10,
+ oneof="extraction_strategy",
+ message=duration_pb2.Duration,
+ )
+ quality = proto.Field(proto.INT32, number=11,)
+
+
+class Overlay(proto.Message):
+ r"""Overlay configuration.
+ Attributes:
+ image (google.cloud.video.transcoder_v1.types.Overlay.Image):
+ Image overlay.
+ animations (Sequence[google.cloud.video.transcoder_v1.types.Overlay.Animation]):
+ List of Animations. The list should be
+ chronological, without any time overlap.
+ """
+
+ class FadeType(proto.Enum):
+ r"""Fade type for the overlay: ``FADE_IN`` or ``FADE_OUT``."""
+ FADE_TYPE_UNSPECIFIED = 0
+ FADE_IN = 1
+ FADE_OUT = 2
+
+ class NormalizedCoordinate(proto.Message):
+ r"""2D normalized coordinates. Default: ``{0.0, 0.0}``
+ Attributes:
+ x (float):
+ Normalized x coordinate.
+ y (float):
+ Normalized y coordinate.
+ """
+
+ x = proto.Field(proto.DOUBLE, number=1,)
+ y = proto.Field(proto.DOUBLE, number=2,)
+
+ class Image(proto.Message):
+ r"""Overlaid jpeg image.
+ Attributes:
+ uri (str):
+ Required. URI of the JPEG image in Cloud Storage. For
+ example, ``gs://bucket/inputs/image.jpeg``. JPEG is the only
+ supported image type.
+ resolution (google.cloud.video.transcoder_v1.types.Overlay.NormalizedCoordinate):
+ Normalized image resolution, based on output video
+ resolution. Valid values: ``0.0``–``1.0``. To respect the
+ original image aspect ratio, set either ``x`` or ``y`` to
+ ``0.0``. To use the original image resolution, set both
+ ``x`` and ``y`` to ``0.0``.
+ alpha (float):
+ Target image opacity. Valid values are from ``1.0`` (solid,
+ default) to ``0.0`` (transparent), exclusive. Set this to a
+ value greater than ``0.0``.
+ """
+
+ uri = proto.Field(proto.STRING, number=1,)
+ resolution = proto.Field(
+ proto.MESSAGE, number=2, message="Overlay.NormalizedCoordinate",
+ )
+ alpha = proto.Field(proto.DOUBLE, number=3,)
+
+ class AnimationStatic(proto.Message):
+ r"""Display static overlay object.
+ Attributes:
+ xy (google.cloud.video.transcoder_v1.types.Overlay.NormalizedCoordinate):
+ Normalized coordinates based on output video resolution.
+ Valid values: ``0.0``–``1.0``. ``xy`` is the upper-left
+ coordinate of the overlay object. For example, use the x and
+ y coordinates {0,0} to position the top-left corner of the
+ overlay animation in the top-left corner of the output
+ video.
+ start_time_offset (google.protobuf.duration_pb2.Duration):
+ The time to start displaying the overlay
+ object, in seconds. Default: 0
+ """
+
+ xy = proto.Field(
+ proto.MESSAGE, number=1, message="Overlay.NormalizedCoordinate",
+ )
+ start_time_offset = proto.Field(
+ proto.MESSAGE, number=2, message=duration_pb2.Duration,
+ )
+
+ class AnimationFade(proto.Message):
+ r"""Display overlay object with fade animation.
+ Attributes:
+ fade_type (google.cloud.video.transcoder_v1.types.Overlay.FadeType):
+ Required. Type of fade animation: ``FADE_IN`` or
+ ``FADE_OUT``.
+ xy (google.cloud.video.transcoder_v1.types.Overlay.NormalizedCoordinate):
+ Normalized coordinates based on output video resolution.
+ Valid values: ``0.0``–``1.0``. ``xy`` is the upper-left
+ coordinate of the overlay object. For example, use the x and
+ y coordinates {0,0} to position the top-left corner of the
+ overlay animation in the top-left corner of the output
+ video.
+ start_time_offset (google.protobuf.duration_pb2.Duration):
+ The time to start the fade animation, in
+ seconds. Default: 0
+ end_time_offset (google.protobuf.duration_pb2.Duration):
+ The time to end the fade animation, in seconds. Default:
+ ``start_time_offset`` + 1s
+ """
+
+ fade_type = proto.Field(proto.ENUM, number=1, enum="Overlay.FadeType",)
+ xy = proto.Field(
+ proto.MESSAGE, number=2, message="Overlay.NormalizedCoordinate",
+ )
+ start_time_offset = proto.Field(
+ proto.MESSAGE, number=3, message=duration_pb2.Duration,
+ )
+ end_time_offset = proto.Field(
+ proto.MESSAGE, number=4, message=duration_pb2.Duration,
+ )
+
+ class AnimationEnd(proto.Message):
+ r"""End previous overlay animation from the video. Without
+ AnimationEnd, the overlay object will keep the state of previous
+ animation until the end of the video.
+
+ Attributes:
+ start_time_offset (google.protobuf.duration_pb2.Duration):
+ The time to end overlay object, in seconds.
+ Default: 0
+ """
+
+ start_time_offset = proto.Field(
+ proto.MESSAGE, number=1, message=duration_pb2.Duration,
+ )
+
+ class Animation(proto.Message):
+ r"""Animation types.
+ Attributes:
+ animation_static (google.cloud.video.transcoder_v1.types.Overlay.AnimationStatic):
+ Display static overlay object.
+ animation_fade (google.cloud.video.transcoder_v1.types.Overlay.AnimationFade):
+ Display overlay object with fade animation.
+ animation_end (google.cloud.video.transcoder_v1.types.Overlay.AnimationEnd):
+ End previous animation.
+ """
+
+ animation_static = proto.Field(
+ proto.MESSAGE,
+ number=1,
+ oneof="animation_type",
+ message="Overlay.AnimationStatic",
+ )
+ animation_fade = proto.Field(
+ proto.MESSAGE,
+ number=2,
+ oneof="animation_type",
+ message="Overlay.AnimationFade",
+ )
+ animation_end = proto.Field(
+ proto.MESSAGE,
+ number=3,
+ oneof="animation_type",
+ message="Overlay.AnimationEnd",
+ )
+
+ image = proto.Field(proto.MESSAGE, number=1, message=Image,)
+ animations = proto.RepeatedField(proto.MESSAGE, number=2, message=Animation,)
+
+
+class PreprocessingConfig(proto.Message):
+ r"""Preprocessing configurations.
+ Attributes:
+ color (google.cloud.video.transcoder_v1.types.PreprocessingConfig.Color):
+ Color preprocessing configuration.
+ denoise (google.cloud.video.transcoder_v1.types.PreprocessingConfig.Denoise):
+ Denoise preprocessing configuration.
+ deblock (google.cloud.video.transcoder_v1.types.PreprocessingConfig.Deblock):
+ Deblock preprocessing configuration.
+ audio (google.cloud.video.transcoder_v1.types.PreprocessingConfig.Audio):
+ Audio preprocessing configuration.
+ crop (google.cloud.video.transcoder_v1.types.PreprocessingConfig.Crop):
+ Specify the video cropping configuration.
+ pad (google.cloud.video.transcoder_v1.types.PreprocessingConfig.Pad):
+ Specify the video pad filter configuration.
+ """
+
+ class Color(proto.Message):
+ r"""Color preprocessing configuration.
+ Attributes:
+ saturation (float):
+ Control color saturation of the video. Enter
+ a value between -1 and 1, where -1 is fully
+ desaturated and 1 is maximum saturation. 0 is no
+ change. The default is 0.
+ contrast (float):
+ Control black and white contrast of the
+ video. Enter a value between -1 and 1, where -1
+ is minimum contrast and 1 is maximum contrast. 0
+ is no change. The default is 0.
+ brightness (float):
+ Control brightness of the video. Enter a
+ value between -1 and 1, where -1 is minimum
+ brightness and 1 is maximum brightness. 0 is no
+ change. The default is 0.
+ """
+
+ saturation = proto.Field(proto.DOUBLE, number=1,)
+ contrast = proto.Field(proto.DOUBLE, number=2,)
+ brightness = proto.Field(proto.DOUBLE, number=3,)
+
+ class Denoise(proto.Message):
+ r"""Denoise preprocessing configuration.
+ Attributes:
+ strength (float):
+ Set strength of the denoise. Enter a value
+ between 0 and 1. The higher the value, the
+ smoother the image. 0 is no denoising. The
+ default is 0.
+ tune (str):
+ Set the denoiser mode. The default is ``"standard"``.
+
+ Supported denoiser modes:
+
+ - 'standard'
+ - 'grain'
+ """
+
+ strength = proto.Field(proto.DOUBLE, number=1,)
+ tune = proto.Field(proto.STRING, number=2,)
+
+ class Deblock(proto.Message):
+ r"""Deblock preprocessing configuration.
+ Attributes:
+ strength (float):
+ Set strength of the deblocker. Enter a value
+ between 0 and 1. The higher the value, the
+ stronger the block removal. 0 is no deblocking.
+ The default is 0.
+ enabled (bool):
+ Enable deblocker. The default is ``false``.
+ """
+
+ strength = proto.Field(proto.DOUBLE, number=1,)
+ enabled = proto.Field(proto.BOOL, number=2,)
+
+ class Audio(proto.Message):
+ r"""Audio preprocessing configuration.
+ Attributes:
+ lufs (float):
+ Specify audio loudness normalization in loudness units
+ relative to full scale (LUFS). Enter a value between -24 and
+ 0 (the default), where:
+
+ - -24 is the Advanced Television Systems Committee (ATSC
+ A/85) standard
+ - -23 is the EU R128 broadcast standard
+ - -19 is the prior standard for online mono audio
+ - -18 is the ReplayGain standard
+ - -16 is the prior standard for stereo audio
+ - -14 is the new online audio standard recommended by
+ Spotify, as well as Amazon Echo
+ - 0 disables normalization
+ high_boost (bool):
+ Enable boosting high frequency components. The default is
+ ``false``.
+ low_boost (bool):
+ Enable boosting low frequency components. The default is
+ ``false``.
+ """
+
+ lufs = proto.Field(proto.DOUBLE, number=1,)
+ high_boost = proto.Field(proto.BOOL, number=2,)
+ low_boost = proto.Field(proto.BOOL, number=3,)
+
+ class Crop(proto.Message):
+ r"""Video cropping configuration for the input video. The cropped
+ input video is scaled to match the output resolution.
+
+ Attributes:
+ top_pixels (int):
+ The number of pixels to crop from the top.
+ The default is 0.
+ bottom_pixels (int):
+ The number of pixels to crop from the bottom.
+ The default is 0.
+ left_pixels (int):
+ The number of pixels to crop from the left.
+ The default is 0.
+ right_pixels (int):
+ The number of pixels to crop from the right.
+ The default is 0.
+ """
+
+ top_pixels = proto.Field(proto.INT32, number=1,)
+ bottom_pixels = proto.Field(proto.INT32, number=2,)
+ left_pixels = proto.Field(proto.INT32, number=3,)
+ right_pixels = proto.Field(proto.INT32, number=4,)
+
+ class Pad(proto.Message):
+ r"""Pad filter configuration for the input video. The padded
+ input video is scaled after padding with black to match the
+ output resolution.
+
+ Attributes:
+ top_pixels (int):
+ The number of pixels to add to the top. The
+ default is 0.
+ bottom_pixels (int):
+ The number of pixels to add to the bottom.
+ The default is 0.
+ left_pixels (int):
+ The number of pixels to add to the left. The
+ default is 0.
+ right_pixels (int):
+ The number of pixels to add to the right. The
+ default is 0.
+ """
+
+ top_pixels = proto.Field(proto.INT32, number=1,)
+ bottom_pixels = proto.Field(proto.INT32, number=2,)
+ left_pixels = proto.Field(proto.INT32, number=3,)
+ right_pixels = proto.Field(proto.INT32, number=4,)
+
+ color = proto.Field(proto.MESSAGE, number=1, message=Color,)
+ denoise = proto.Field(proto.MESSAGE, number=2, message=Denoise,)
+ deblock = proto.Field(proto.MESSAGE, number=3, message=Deblock,)
+ audio = proto.Field(proto.MESSAGE, number=4, message=Audio,)
+ crop = proto.Field(proto.MESSAGE, number=5, message=Crop,)
+ pad = proto.Field(proto.MESSAGE, number=6, message=Pad,)
+
+
+class VideoStream(proto.Message):
+ r"""Video stream resource.
+ Attributes:
+ h264 (google.cloud.video.transcoder_v1.types.VideoStream.H264CodecSettings):
+ H264 codec settings.
+ h265 (google.cloud.video.transcoder_v1.types.VideoStream.H265CodecSettings):
+ H265 codec settings.
+ vp9 (google.cloud.video.transcoder_v1.types.VideoStream.Vp9CodecSettings):
+ VP9 codec settings.
+ """
+
+ class H264CodecSettings(proto.Message):
+ r"""H264 codec settings.
+ Attributes:
+ width_pixels (int):
+ The width of the video in pixels. Must be an
+ even integer. When not specified, the width is
+ adjusted to match the specified height and input
+ aspect ratio. If both are omitted, the input
+ width is used.
+ height_pixels (int):
+ The height of the video in pixels. Must be an
+ even integer. When not specified, the height is
+ adjusted to match the specified width and input
+ aspect ratio. If both are omitted, the input
+ height is used.
+ frame_rate (float):
+ Required. The target video frame rate in frames per second
+ (FPS). Must be less than or equal to 120. Will default to
+ the input frame rate if larger than the input frame rate.
+ The API will generate an output FPS that is divisible by the
+ input FPS, and smaller or equal to the target FPS. See
+ `Calculating frame
+ rate `__
+ for more information.
+ bitrate_bps (int):
+ Required. The video bitrate in bits per
+ second. Must be between 1 and 1,000,000,000.
+ pixel_format (str):
+ Pixel format to use. The default is ``"yuv420p"``.
+
+ Supported pixel formats:
+
+ - 'yuv420p' pixel format.
+ - 'yuv422p' pixel format.
+ - 'yuv444p' pixel format.
+ - 'yuv420p10' 10-bit HDR pixel format.
+ - 'yuv422p10' 10-bit HDR pixel format.
+ - 'yuv444p10' 10-bit HDR pixel format.
+ - 'yuv420p12' 12-bit HDR pixel format.
+ - 'yuv422p12' 12-bit HDR pixel format.
+ - 'yuv444p12' 12-bit HDR pixel format.
+ rate_control_mode (str):
+ Specify the ``rate_control_mode``. The default is ``"vbr"``.
+
+ Supported rate control modes:
+
+ - 'vbr' - variable bitrate
+ - 'crf' - constant rate factor
+ crf_level (int):
+ Target CRF level. Must be between 10 and 36,
+ where 10 is the highest quality and 36 is the
+ most efficient compression. The default is 21.
+ allow_open_gop (bool):
+ Specifies whether an open Group of Pictures (GOP) structure
+ should be allowed or not. The default is ``false``.
+ gop_frame_count (int):
+ Select the GOP size based on the specified
+ frame count. Must be greater than zero.
+ gop_duration (google.protobuf.duration_pb2.Duration):
+ Select the GOP size based on the specified duration. The
+ default is ``"3s"``. Note that ``gopDuration`` must be less
+ than or equal to ```segmentDuration`` <#SegmentSettings>`__,
+ and ```segmentDuration`` <#SegmentSettings>`__ must be
+ divisible by ``gopDuration``.
+ enable_two_pass (bool):
+ Use two-pass encoding strategy to achieve better video
+ quality. ``VideoStream.rate_control_mode`` must be
+ ``"vbr"``. The default is ``false``.
+ vbv_size_bits (int):
+ Size of the Video Buffering Verifier (VBV) buffer in bits.
+ Must be greater than zero. The default is equal to
+ ``VideoStream.bitrate_bps``.
+ vbv_fullness_bits (int):
+ Initial fullness of the Video Buffering Verifier (VBV)
+ buffer in bits. Must be greater than zero. The default is
+ equal to 90% of ``VideoStream.vbv_size_bits``.
+ entropy_coder (str):
+ The entropy coder to use. The default is ``"cabac"``.
+
+ Supported entropy coders:
+
+ - 'cavlc'
+ - 'cabac'
+ b_pyramid (bool):
+ Allow B-pyramid for reference frame selection. This may not
+ be supported on all decoders. The default is ``false``.
+ b_frame_count (int):
+ The number of consecutive B-frames. Must be greater than or
+ equal to zero. Must be less than
+ ``VideoStream.gop_frame_count`` if set. The default is 0.
+ aq_strength (float):
+ Specify the intensity of the adaptive
+ quantizer (AQ). Must be between 0 and 1, where 0
+ disables the quantizer and 1 maximizes the
+ quantizer. A higher value equals a lower bitrate
+ but smoother image. The default is 0.
+ profile (str):
+ Enforces the specified codec profile. The following profiles
+ are supported:
+
+ - ``baseline``
+ - ``main``
+ - ``high`` (default)
+
+ The available options are
+ `FFmpeg-compatible `__\ {:
+ class="external" }. Note that certain values for this field
+ may cause the transcoder to override other fields you set in
+ the ``H264CodecSettings`` message.
+ tune (str):
+ Enforces the specified codec tune. The available options are
+ `FFmpeg-compatible `__\ {:
+ class="external" }. Note that certain values for this field
+ may cause the transcoder to override other fields you set in
+ the ``H264CodecSettings`` message.
+ preset (str):
+ Enforces the specified codec preset. The default is
+ ``veryfast``. The available options are
+ `FFmpeg-compatible `__\ {:
+ class="external" }. Note that certain values for this field
+ may cause the transcoder to override other fields you set in
+ the ``H264CodecSettings`` message.
+ """
+
+ width_pixels = proto.Field(proto.INT32, number=1,)
+ height_pixels = proto.Field(proto.INT32, number=2,)
+ frame_rate = proto.Field(proto.DOUBLE, number=3,)
+ bitrate_bps = proto.Field(proto.INT32, number=4,)
+ pixel_format = proto.Field(proto.STRING, number=5,)
+ rate_control_mode = proto.Field(proto.STRING, number=6,)
+ crf_level = proto.Field(proto.INT32, number=7,)
+ allow_open_gop = proto.Field(proto.BOOL, number=8,)
+ gop_frame_count = proto.Field(proto.INT32, number=9, oneof="gop_mode",)
+ gop_duration = proto.Field(
+ proto.MESSAGE, number=10, oneof="gop_mode", message=duration_pb2.Duration,
+ )
+ enable_two_pass = proto.Field(proto.BOOL, number=11,)
+ vbv_size_bits = proto.Field(proto.INT32, number=12,)
+ vbv_fullness_bits = proto.Field(proto.INT32, number=13,)
+ entropy_coder = proto.Field(proto.STRING, number=14,)
+ b_pyramid = proto.Field(proto.BOOL, number=15,)
+ b_frame_count = proto.Field(proto.INT32, number=16,)
+ aq_strength = proto.Field(proto.DOUBLE, number=17,)
+ profile = proto.Field(proto.STRING, number=18,)
+ tune = proto.Field(proto.STRING, number=19,)
+ preset = proto.Field(proto.STRING, number=20,)
+
+ class H265CodecSettings(proto.Message):
+ r"""H265 codec settings.
+ Attributes:
+ width_pixels (int):
+ The width of the video in pixels. Must be an
+ even integer. When not specified, the width is
+ adjusted to match the specified height and input
+ aspect ratio. If both are omitted, the input
+ width is used.
+ height_pixels (int):
+ The height of the video in pixels. Must be an
+ even integer. When not specified, the height is
+ adjusted to match the specified width and input
+ aspect ratio. If both are omitted, the input
+ height is used.
+ frame_rate (float):
+ Required. The target video frame rate in frames per second
+ (FPS). Must be less than or equal to 120. Will default to
+ the input frame rate if larger than the input frame rate.
+ The API will generate an output FPS that is divisible by the
+ input FPS, and smaller or equal to the target FPS. See
+ `Calculating frame
+ rate `__
+ for more information.
+ bitrate_bps (int):
+ Required. The video bitrate in bits per
+ second. Must be between 1 and 1,000,000,000.
+ pixel_format (str):
+ Pixel format to use. The default is ``"yuv420p"``.
+
+ Supported pixel formats:
+
+ - 'yuv420p' pixel format.
+ - 'yuv422p' pixel format.
+ - 'yuv444p' pixel format.
+ - 'yuv420p10' 10-bit HDR pixel format.
+ - 'yuv422p10' 10-bit HDR pixel format.
+ - 'yuv444p10' 10-bit HDR pixel format.
+ - 'yuv420p12' 12-bit HDR pixel format.
+ - 'yuv422p12' 12-bit HDR pixel format.
+ - 'yuv444p12' 12-bit HDR pixel format.
+ rate_control_mode (str):
+ Specify the ``rate_control_mode``. The default is ``"vbr"``.
+
+ Supported rate control modes:
+
+ - 'vbr' - variable bitrate
+ - 'crf' - constant rate factor
+ crf_level (int):
+ Target CRF level. Must be between 10 and 36,
+ where 10 is the highest quality and 36 is the
+ most efficient compression. The default is 21.
+ allow_open_gop (bool):
+ Specifies whether an open Group of Pictures (GOP) structure
+ should be allowed or not. The default is ``false``.
+ gop_frame_count (int):
+ Select the GOP size based on the specified
+ frame count. Must be greater than zero.
+ gop_duration (google.protobuf.duration_pb2.Duration):
+ Select the GOP size based on the specified duration. The
+ default is ``"3s"``. Note that ``gopDuration`` must be less
+ than or equal to ```segmentDuration`` <#SegmentSettings>`__,
+ and ```segmentDuration`` <#SegmentSettings>`__ must be
+ divisible by ``gopDuration``.
+ enable_two_pass (bool):
+ Use two-pass encoding strategy to achieve better video
+ quality. ``VideoStream.rate_control_mode`` must be
+ ``"vbr"``. The default is ``false``.
+ vbv_size_bits (int):
+ Size of the Video Buffering Verifier (VBV) buffer in bits.
+ Must be greater than zero. The default is equal to
+ ``VideoStream.bitrate_bps``.
+ vbv_fullness_bits (int):
+ Initial fullness of the Video Buffering Verifier (VBV)
+ buffer in bits. Must be greater than zero. The default is
+ equal to 90% of ``VideoStream.vbv_size_bits``.
+ b_pyramid (bool):
+ Allow B-pyramid for reference frame selection. This may not
+ be supported on all decoders. The default is ``false``.
+ b_frame_count (int):
+ The number of consecutive B-frames. Must be greater than or
+ equal to zero. Must be less than
+ ``VideoStream.gop_frame_count`` if set. The default is 0.
+ aq_strength (float):
+ Specify the intensity of the adaptive
+ quantizer (AQ). Must be between 0 and 1, where 0
+ disables the quantizer and 1 maximizes the
+ quantizer. A higher value equals a lower bitrate
+ but smoother image. The default is 0.
+ profile (str):
+ Enforces the specified codec profile. The following profiles
+ are supported:
+
+ 8bit profiles
+
+ - ``main`` (default)
+ - ``main-intra``
+ - ``mainstillpicture``
+
+ 10bit profiles
+
+ - ``main10`` (default)
+ - ``main10-intra``
+ - ``main422-10``
+ - ``main422-10-intra``
+ - ``main444-10``
+ - ``main444-10-intra``
+
+ 12bit profiles
+
+ - ``main12`` (default)
+ - ``main12-intra``
+ - ``main422-12``
+ - ``main422-12-intra``
+ - ``main444-12``
+ - ``main444-12-intra``
+
+ The available options are
+ `FFmpeg-compatible `__\ {:
+ class="external" }. Note that certain values for this field
+ may cause the transcoder to override other fields you set in
+ the ``H265CodecSettings`` message.
+ tune (str):
+ Enforces the specified codec tune. The available options are
+ `FFmpeg-compatible `__\ {:
+ class="external" }. Note that certain values for this field
+ may cause the transcoder to override other fields you set in
+ the ``H265CodecSettings`` message.
+ preset (str):
+ Enforces the specified codec preset. The default is
+ ``veryfast``. The available options are
+ `FFmpeg-compatible `__\ {:
+ class="external" }. Note that certain values for this field
+ may cause the transcoder to override other fields you set in
+ the ``H265CodecSettings`` message.
+ """
+
+ width_pixels = proto.Field(proto.INT32, number=1,)
+ height_pixels = proto.Field(proto.INT32, number=2,)
+ frame_rate = proto.Field(proto.DOUBLE, number=3,)
+ bitrate_bps = proto.Field(proto.INT32, number=4,)
+ pixel_format = proto.Field(proto.STRING, number=5,)
+ rate_control_mode = proto.Field(proto.STRING, number=6,)
+ crf_level = proto.Field(proto.INT32, number=7,)
+ allow_open_gop = proto.Field(proto.BOOL, number=8,)
+ gop_frame_count = proto.Field(proto.INT32, number=9, oneof="gop_mode",)
+ gop_duration = proto.Field(
+ proto.MESSAGE, number=10, oneof="gop_mode", message=duration_pb2.Duration,
+ )
+ enable_two_pass = proto.Field(proto.BOOL, number=11,)
+ vbv_size_bits = proto.Field(proto.INT32, number=12,)
+ vbv_fullness_bits = proto.Field(proto.INT32, number=13,)
+ b_pyramid = proto.Field(proto.BOOL, number=14,)
+ b_frame_count = proto.Field(proto.INT32, number=15,)
+ aq_strength = proto.Field(proto.DOUBLE, number=16,)
+ profile = proto.Field(proto.STRING, number=17,)
+ tune = proto.Field(proto.STRING, number=18,)
+ preset = proto.Field(proto.STRING, number=19,)
+
+ class Vp9CodecSettings(proto.Message):
+ r"""VP9 codec settings.
+ Attributes:
+ width_pixels (int):
+ The width of the video in pixels. Must be an
+ even integer. When not specified, the width is
+ adjusted to match the specified height and input
+ aspect ratio. If both are omitted, the input
+ width is used.
+ height_pixels (int):
+ The height of the video in pixels. Must be an
+ even integer. When not specified, the height is
+ adjusted to match the specified width and input
+ aspect ratio. If both are omitted, the input
+ height is used.
+ frame_rate (float):
+ Required. The target video frame rate in frames per second
+ (FPS). Must be less than or equal to 120. Will default to
+ the input frame rate if larger than the input frame rate.
+ The API will generate an output FPS that is divisible by the
+ input FPS, and smaller or equal to the target FPS. See
+ `Calculating frame
+ rate `__
+ for more information.
+ bitrate_bps (int):
+ Required. The video bitrate in bits per
+ second. Must be between 1 and 1,000,000,000.
+ pixel_format (str):
+ Pixel format to use. The default is ``"yuv420p"``.
+
+ Supported pixel formats:
+
+ - 'yuv420p' pixel format.
+ - 'yuv422p' pixel format.
+ - 'yuv444p' pixel format.
+ - 'yuv420p10' 10-bit HDR pixel format.
+ - 'yuv422p10' 10-bit HDR pixel format.
+ - 'yuv444p10' 10-bit HDR pixel format.
+ - 'yuv420p12' 12-bit HDR pixel format.
+ - 'yuv422p12' 12-bit HDR pixel format.
+ - 'yuv444p12' 12-bit HDR pixel format.
+ rate_control_mode (str):
+ Specify the ``rate_control_mode``. The default is ``"vbr"``.
+
+ Supported rate control modes:
+
+ - 'vbr' - variable bitrate
+ - 'crf' - constant rate factor
+ crf_level (int):
+ Target CRF level. Must be between 10 and 36,
+ where 10 is the highest quality and 36 is the
+ most efficient compression. The default is 21.
+ gop_frame_count (int):
+ Select the GOP size based on the specified
+ frame count. Must be greater than zero.
+ gop_duration (google.protobuf.duration_pb2.Duration):
+ Select the GOP size based on the specified duration. The
+ default is ``"3s"``. Note that ``gopDuration`` must be less
+ than or equal to ```segmentDuration`` <#SegmentSettings>`__,
+ and ```segmentDuration`` <#SegmentSettings>`__ must be
+ divisible by ``gopDuration``.
+ profile (str):
+ Enforces the specified codec profile. The following profiles
+ are supported:
+
+ - ``profile0`` (default)
+ - ``profile1``
+ - ``profile2``
+ - ``profile3``
+
+ The available options are
+ `WebM-compatible `__\ {:
+ class="external" }. Note that certain values for this field
+ may cause the transcoder to override other fields you set in
+ the ``Vp9CodecSettings`` message.
+ """
+
+ width_pixels = proto.Field(proto.INT32, number=1,)
+ height_pixels = proto.Field(proto.INT32, number=2,)
+ frame_rate = proto.Field(proto.DOUBLE, number=3,)
+ bitrate_bps = proto.Field(proto.INT32, number=4,)
+ pixel_format = proto.Field(proto.STRING, number=5,)
+ rate_control_mode = proto.Field(proto.STRING, number=6,)
+ crf_level = proto.Field(proto.INT32, number=7,)
+ gop_frame_count = proto.Field(proto.INT32, number=8, oneof="gop_mode",)
+ gop_duration = proto.Field(
+ proto.MESSAGE, number=9, oneof="gop_mode", message=duration_pb2.Duration,
+ )
+ profile = proto.Field(proto.STRING, number=10,)
+
+ h264 = proto.Field(
+ proto.MESSAGE, number=1, oneof="codec_settings", message=H264CodecSettings,
+ )
+ h265 = proto.Field(
+ proto.MESSAGE, number=2, oneof="codec_settings", message=H265CodecSettings,
+ )
+ vp9 = proto.Field(
+ proto.MESSAGE, number=3, oneof="codec_settings", message=Vp9CodecSettings,
+ )
+
+
+class AudioStream(proto.Message):
+ r"""Audio stream resource.
+ Attributes:
+ codec (str):
+ The codec for this audio stream. The default is ``"aac"``.
+
+ Supported audio codecs:
+
+ - 'aac'
+ - 'aac-he'
+ - 'aac-he-v2'
+ - 'mp3'
+ - 'ac3'
+ - 'eac3'
+ bitrate_bps (int):
+ Required. Audio bitrate in bits per second.
+ Must be between 1 and 10,000,000.
+ channel_count (int):
+ Number of audio channels. Must be between 1
+ and 6. The default is 2.
+ channel_layout (Sequence[str]):
+ A list of channel names specifying layout of the audio
+ channels. This only affects the metadata embedded in the
+ container headers, if supported by the specified format. The
+ default is ``["fl", "fr"]``.
+
+ Supported channel names:
+
+ - 'fl' - Front left channel
+ - 'fr' - Front right channel
+ - 'sl' - Side left channel
+ - 'sr' - Side right channel
+ - 'fc' - Front center channel
+ - 'lfe' - Low frequency
+ mapping (Sequence[google.cloud.video.transcoder_v1.types.AudioStream.AudioMapping]):
+ The mapping for the ``Job.edit_list`` atoms with audio
+ ``EditAtom.inputs``.
+ sample_rate_hertz (int):
+ The audio sample rate in Hertz. The default
+ is 48000 Hertz.
+ """
+
+ class AudioMapping(proto.Message):
+ r"""The mapping for the ``Job.edit_list`` atoms with audio
+ ``EditAtom.inputs``.
+
+ Attributes:
+ atom_key (str):
+ Required. The ``EditAtom.key`` that references the atom with
+ audio inputs in the ``Job.edit_list``.
+ input_key (str):
+ Required. The ``Input.key`` that identifies the input file.
+ input_track (int):
+ Required. The zero-based index of the track
+ in the input file.
+ input_channel (int):
+ Required. The zero-based index of the channel
+ in the input audio stream.
+ output_channel (int):
+ Required. The zero-based index of the channel
+ in the output audio stream.
+ gain_db (float):
+ Audio volume control in dB. Negative values
+ decrease volume, positive values increase. The
+ default is 0.
+ """
+
+ atom_key = proto.Field(proto.STRING, number=1,)
+ input_key = proto.Field(proto.STRING, number=2,)
+ input_track = proto.Field(proto.INT32, number=3,)
+ input_channel = proto.Field(proto.INT32, number=4,)
+ output_channel = proto.Field(proto.INT32, number=5,)
+ gain_db = proto.Field(proto.DOUBLE, number=6,)
+
+ codec = proto.Field(proto.STRING, number=1,)
+ bitrate_bps = proto.Field(proto.INT32, number=2,)
+ channel_count = proto.Field(proto.INT32, number=3,)
+ channel_layout = proto.RepeatedField(proto.STRING, number=4,)
+ mapping = proto.RepeatedField(proto.MESSAGE, number=5, message=AudioMapping,)
+ sample_rate_hertz = proto.Field(proto.INT32, number=6,)
+
+
+class TextStream(proto.Message):
+ r"""Encoding of a text stream. For example, closed captions or
+ subtitles.
+
+ Attributes:
+ codec (str):
+ The codec for this text stream. The default is ``"webvtt"``.
+
+ Supported text codecs:
+
+ - 'srt'
+ - 'ttml'
+ - 'cea608'
+ - 'cea708'
+ - 'webvtt'
+ mapping (Sequence[google.cloud.video.transcoder_v1.types.TextStream.TextMapping]):
+ The mapping for the ``Job.edit_list`` atoms with text
+ ``EditAtom.inputs``.
+ """
+
+ class TextMapping(proto.Message):
+ r"""The mapping for the ``Job.edit_list`` atoms with text
+ ``EditAtom.inputs``.
+
+ Attributes:
+ atom_key (str):
+ Required. The ``EditAtom.key`` that references atom with
+ text inputs in the ``Job.edit_list``.
+ input_key (str):
+ Required. The ``Input.key`` that identifies the input file.
+ input_track (int):
+ Required. The zero-based index of the track
+ in the input file.
+ """
+
+ atom_key = proto.Field(proto.STRING, number=1,)
+ input_key = proto.Field(proto.STRING, number=2,)
+ input_track = proto.Field(proto.INT32, number=3,)
+
+ codec = proto.Field(proto.STRING, number=1,)
+ mapping = proto.RepeatedField(proto.MESSAGE, number=3, message=TextMapping,)
+
+
+class SegmentSettings(proto.Message):
+ r"""Segment settings for ``"ts"``, ``"fmp4"`` and ``"vtt"``.
+ Attributes:
+ segment_duration (google.protobuf.duration_pb2.Duration):
+ Duration of the segments in seconds. The default is
+ ``"6.0s"``. Note that ``segmentDuration`` must be greater
+ than or equal to ```gopDuration`` <#videostream>`__, and
+ ``segmentDuration`` must be divisible by
+ ```gopDuration`` <#videostream>`__.
+ individual_segments (bool):
+ Required. Create an individual segment file. The default is
+ ``false``.
+ """
+
+ segment_duration = proto.Field(
+ proto.MESSAGE, number=1, message=duration_pb2.Duration,
+ )
+ individual_segments = proto.Field(proto.BOOL, number=3,)
+
+
+class Encryption(proto.Message):
+ r"""Encryption settings.
+ Attributes:
+ key (str):
+ Required. 128 bit encryption key represented
+ as lowercase hexadecimal digits.
+ iv (str):
+ Required. 128 bit Initialization Vector (IV)
+ represented as lowercase hexadecimal digits.
+ aes_128 (google.cloud.video.transcoder_v1.types.Encryption.Aes128Encryption):
+ Configuration for AES-128 encryption.
+ sample_aes (google.cloud.video.transcoder_v1.types.Encryption.SampleAesEncryption):
+ Configuration for SAMPLE-AES encryption.
+ mpeg_cenc (google.cloud.video.transcoder_v1.types.Encryption.MpegCommonEncryption):
+ Configuration for MPEG Common Encryption
+ (MPEG-CENC).
+ """
+
+ class Aes128Encryption(proto.Message):
+ r"""Configuration for AES-128 encryption.
+ Attributes:
+ key_uri (str):
+ Required. URI of the key delivery service.
+ This URI is inserted into the M3U8 header.
+ """
+
+ key_uri = proto.Field(proto.STRING, number=1,)
+
+ class SampleAesEncryption(proto.Message):
+ r"""Configuration for SAMPLE-AES encryption.
+ Attributes:
+ key_uri (str):
+ Required. URI of the key delivery service.
+ This URI is inserted into the M3U8 header.
+ """
+
+ key_uri = proto.Field(proto.STRING, number=1,)
+
+ class MpegCommonEncryption(proto.Message):
+ r"""Configuration for MPEG Common Encryption (MPEG-CENC).
+ Attributes:
+ key_id (str):
+ Required. 128 bit Key ID represented as
+ lowercase hexadecimal digits for use with common
+ encryption.
+ scheme (str):
+ Required. Specify the encryption scheme.
+ Supported encryption schemes:
+ - 'cenc'
+ - 'cbcs'
+ """
+
+ key_id = proto.Field(proto.STRING, number=1,)
+ scheme = proto.Field(proto.STRING, number=2,)
+
+ key = proto.Field(proto.STRING, number=1,)
+ iv = proto.Field(proto.STRING, number=2,)
+ aes_128 = proto.Field(
+ proto.MESSAGE, number=3, oneof="encryption_mode", message=Aes128Encryption,
+ )
+ sample_aes = proto.Field(
+ proto.MESSAGE, number=4, oneof="encryption_mode", message=SampleAesEncryption,
+ )
+ mpeg_cenc = proto.Field(
+ proto.MESSAGE, number=5, oneof="encryption_mode", message=MpegCommonEncryption,
+ )
+
+
+__all__ = tuple(sorted(__protobuf__.manifest))
diff --git a/google/cloud/video/transcoder_v1/types/services.py b/google/cloud/video/transcoder_v1/types/services.py
new file mode 100644
index 0000000..edee4f1
--- /dev/null
+++ b/google/cloud/video/transcoder_v1/types/services.py
@@ -0,0 +1,221 @@
+# -*- coding: utf-8 -*-
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+import proto # type: ignore
+
+from google.cloud.video.transcoder_v1.types import resources
+
+
+__protobuf__ = proto.module(
+ package="google.cloud.video.transcoder.v1",
+ manifest={
+ "CreateJobRequest",
+ "ListJobsRequest",
+ "GetJobRequest",
+ "DeleteJobRequest",
+ "ListJobsResponse",
+ "CreateJobTemplateRequest",
+ "ListJobTemplatesRequest",
+ "GetJobTemplateRequest",
+ "DeleteJobTemplateRequest",
+ "ListJobTemplatesResponse",
+ },
+)
+
+
+class CreateJobRequest(proto.Message):
+ r"""Request message for ``TranscoderService.CreateJob``.
+ Attributes:
+ parent (str):
+ Required. The parent location to create and process this
+ job. Format: ``projects/{project}/locations/{location}``
+ job (google.cloud.video.transcoder_v1.types.Job):
+ Required. Parameters for creating transcoding
+ job.
+ """
+
+ parent = proto.Field(proto.STRING, number=1,)
+ job = proto.Field(proto.MESSAGE, number=2, message=resources.Job,)
+
+
+class ListJobsRequest(proto.Message):
+ r"""Request message for ``TranscoderService.ListJobs``. The parent
+ location from which to retrieve the collection of jobs.
+
+ Attributes:
+ parent (str):
+ Required. Format:
+ ``projects/{project}/locations/{location}``
+ page_size (int):
+ The maximum number of items to return.
+ page_token (str):
+ The ``next_page_token`` value returned from a previous List
+ request, if any.
+ filter (str):
+ The filter expression, following the syntax
+ outlined in https://google.aip.dev/160.
+ order_by (str):
+ One or more fields to compare and use to sort
+ the output. See
+ https://google.aip.dev/132#ordering.
+ """
+
+ parent = proto.Field(proto.STRING, number=1,)
+ page_size = proto.Field(proto.INT32, number=2,)
+ page_token = proto.Field(proto.STRING, number=3,)
+ filter = proto.Field(proto.STRING, number=4,)
+ order_by = proto.Field(proto.STRING, number=5,)
+
+
+class GetJobRequest(proto.Message):
+ r"""Request message for ``TranscoderService.GetJob``.
+ Attributes:
+ name (str):
+ Required. The name of the job to retrieve. Format:
+ ``projects/{project}/locations/{location}/jobs/{job}``
+ """
+
+ name = proto.Field(proto.STRING, number=1,)
+
+
+class DeleteJobRequest(proto.Message):
+ r"""Request message for ``TranscoderService.DeleteJob``.
+ Attributes:
+ name (str):
+ Required. The name of the job to delete. Format:
+ ``projects/{project}/locations/{location}/jobs/{job}``
+ """
+
+ name = proto.Field(proto.STRING, number=1,)
+
+
+class ListJobsResponse(proto.Message):
+ r"""Response message for ``TranscoderService.ListJobs``.
+ Attributes:
+ jobs (Sequence[google.cloud.video.transcoder_v1.types.Job]):
+ List of jobs in the specified region.
+ next_page_token (str):
+ The pagination token.
+ unreachable (Sequence[str]):
+ List of regions that could not be reached.
+ """
+
+ @property
+ def raw_page(self):
+ return self
+
+ jobs = proto.RepeatedField(proto.MESSAGE, number=1, message=resources.Job,)
+ next_page_token = proto.Field(proto.STRING, number=2,)
+ unreachable = proto.RepeatedField(proto.STRING, number=3,)
+
+
+class CreateJobTemplateRequest(proto.Message):
+ r"""Request message for ``TranscoderService.CreateJobTemplate``.
+ Attributes:
+ parent (str):
+ Required. The parent location to create this job template.
+ Format: ``projects/{project}/locations/{location}``
+ job_template (google.cloud.video.transcoder_v1.types.JobTemplate):
+ Required. Parameters for creating job
+ template.
+ job_template_id (str):
+ Required. The ID to use for the job template, which will
+ become the final component of the job template's resource
+ name.
+
+ This value should be 4-63 characters, and valid characters
+ must match the regular expression
+ ``[a-zA-Z][a-zA-Z0-9_-]*``.
+ """
+
+ parent = proto.Field(proto.STRING, number=1,)
+ job_template = proto.Field(proto.MESSAGE, number=2, message=resources.JobTemplate,)
+ job_template_id = proto.Field(proto.STRING, number=3,)
+
+
+class ListJobTemplatesRequest(proto.Message):
+ r"""Request message for ``TranscoderService.ListJobTemplates``.
+ Attributes:
+ parent (str):
+ Required. The parent location from which to retrieve the
+ collection of job templates. Format:
+ ``projects/{project}/locations/{location}``
+ page_size (int):
+ The maximum number of items to return.
+ page_token (str):
+ The ``next_page_token`` value returned from a previous List
+ request, if any.
+ filter (str):
+ The filter expression, following the syntax
+ outlined in https://google.aip.dev/160.
+ order_by (str):
+ One or more fields to compare and use to sort
+ the output. See
+ https://google.aip.dev/132#ordering.
+ """
+
+ parent = proto.Field(proto.STRING, number=1,)
+ page_size = proto.Field(proto.INT32, number=2,)
+ page_token = proto.Field(proto.STRING, number=3,)
+ filter = proto.Field(proto.STRING, number=4,)
+ order_by = proto.Field(proto.STRING, number=5,)
+
+
+class GetJobTemplateRequest(proto.Message):
+ r"""Request message for ``TranscoderService.GetJobTemplate``.
+ Attributes:
+ name (str):
+ Required. The name of the job template to retrieve. Format:
+ ``projects/{project}/locations/{location}/jobTemplates/{job_template}``
+ """
+
+ name = proto.Field(proto.STRING, number=1,)
+
+
+class DeleteJobTemplateRequest(proto.Message):
+ r"""Request message for ``TranscoderService.DeleteJobTemplate``.
+ Attributes:
+ name (str):
+ Required. The name of the job template to delete.
+ ``projects/{project}/locations/{location}/jobTemplates/{job_template}``
+ """
+
+ name = proto.Field(proto.STRING, number=1,)
+
+
+class ListJobTemplatesResponse(proto.Message):
+ r"""Response message for ``TranscoderService.ListJobTemplates``.
+ Attributes:
+ job_templates (Sequence[google.cloud.video.transcoder_v1.types.JobTemplate]):
+ List of job templates in the specified
+ region.
+ next_page_token (str):
+ The pagination token.
+ unreachable (Sequence[str]):
+ List of regions that could not be reached.
+ """
+
+ @property
+ def raw_page(self):
+ return self
+
+ job_templates = proto.RepeatedField(
+ proto.MESSAGE, number=1, message=resources.JobTemplate,
+ )
+ next_page_token = proto.Field(proto.STRING, number=2,)
+ unreachable = proto.RepeatedField(proto.STRING, number=3,)
+
+
+__all__ = tuple(sorted(__protobuf__.manifest))
diff --git a/owlbot.py b/owlbot.py
index a74a262..770b5ec 100644
--- a/owlbot.py
+++ b/owlbot.py
@@ -21,7 +21,7 @@
common = gcp.CommonTemplates()
-default_version = "v1beta1"
+default_version = "v1"
for library in s.get_staging_dirs(default_version):
# Work around generator issue https://github.com/googleapis/gapic-generator-python/issues/902
diff --git a/tests/unit/gapic/transcoder_v1/__init__.py b/tests/unit/gapic/transcoder_v1/__init__.py
new file mode 100644
index 0000000..4de6597
--- /dev/null
+++ b/tests/unit/gapic/transcoder_v1/__init__.py
@@ -0,0 +1,15 @@
+# -*- coding: utf-8 -*-
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
diff --git a/tests/unit/gapic/transcoder_v1/test_transcoder_service.py b/tests/unit/gapic/transcoder_v1/test_transcoder_service.py
new file mode 100644
index 0000000..7e2b3b5
--- /dev/null
+++ b/tests/unit/gapic/transcoder_v1/test_transcoder_service.py
@@ -0,0 +1,3097 @@
+# -*- coding: utf-8 -*-
+# Copyright 2020 Google LLC
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+import os
+import mock
+import packaging.version
+
+import grpc
+from grpc.experimental import aio
+import math
+import pytest
+from proto.marshal.rules.dates import DurationRule, TimestampRule
+
+
+from google.api_core import client_options
+from google.api_core import exceptions as core_exceptions
+from google.api_core import gapic_v1
+from google.api_core import grpc_helpers
+from google.api_core import grpc_helpers_async
+from google.auth import credentials as ga_credentials
+from google.auth.exceptions import MutualTLSChannelError
+from google.cloud.video.transcoder_v1.services.transcoder_service import (
+ TranscoderServiceAsyncClient,
+)
+from google.cloud.video.transcoder_v1.services.transcoder_service import (
+ TranscoderServiceClient,
+)
+from google.cloud.video.transcoder_v1.services.transcoder_service import pagers
+from google.cloud.video.transcoder_v1.services.transcoder_service import transports
+from google.cloud.video.transcoder_v1.services.transcoder_service.transports.base import (
+ _GOOGLE_AUTH_VERSION,
+)
+from google.cloud.video.transcoder_v1.types import resources
+from google.cloud.video.transcoder_v1.types import services
+from google.oauth2 import service_account
+from google.protobuf import any_pb2 # type: ignore
+from google.protobuf import duration_pb2 # type: ignore
+from google.protobuf import timestamp_pb2 # type: ignore
+from google.rpc import status_pb2 # type: ignore
+import google.auth
+
+
+# TODO(busunkim): Once google-auth >= 1.25.0 is required transitively
+# through google-api-core:
+# - Delete the auth "less than" test cases
+# - Delete these pytest markers (Make the "greater than or equal to" tests the default).
+requires_google_auth_lt_1_25_0 = pytest.mark.skipif(
+ packaging.version.parse(_GOOGLE_AUTH_VERSION) >= packaging.version.parse("1.25.0"),
+ reason="This test requires google-auth < 1.25.0",
+)
+requires_google_auth_gte_1_25_0 = pytest.mark.skipif(
+ packaging.version.parse(_GOOGLE_AUTH_VERSION) < packaging.version.parse("1.25.0"),
+ reason="This test requires google-auth >= 1.25.0",
+)
+
+
+def client_cert_source_callback():
+ return b"cert bytes", b"key bytes"
+
+
+# If default endpoint is localhost, then default mtls endpoint will be the same.
+# This method modifies the default endpoint so the client can produce a different
+# mtls endpoint for endpoint testing purposes.
+def modify_default_endpoint(client):
+ return (
+ "foo.googleapis.com"
+ if ("localhost" in client.DEFAULT_ENDPOINT)
+ else client.DEFAULT_ENDPOINT
+ )
+
+
+def test__get_default_mtls_endpoint():
+ api_endpoint = "example.googleapis.com"
+ api_mtls_endpoint = "example.mtls.googleapis.com"
+ sandbox_endpoint = "example.sandbox.googleapis.com"
+ sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com"
+ non_googleapi = "api.example.com"
+
+ assert TranscoderServiceClient._get_default_mtls_endpoint(None) is None
+ assert (
+ TranscoderServiceClient._get_default_mtls_endpoint(api_endpoint)
+ == api_mtls_endpoint
+ )
+ assert (
+ TranscoderServiceClient._get_default_mtls_endpoint(api_mtls_endpoint)
+ == api_mtls_endpoint
+ )
+ assert (
+ TranscoderServiceClient._get_default_mtls_endpoint(sandbox_endpoint)
+ == sandbox_mtls_endpoint
+ )
+ assert (
+ TranscoderServiceClient._get_default_mtls_endpoint(sandbox_mtls_endpoint)
+ == sandbox_mtls_endpoint
+ )
+ assert (
+ TranscoderServiceClient._get_default_mtls_endpoint(non_googleapi)
+ == non_googleapi
+ )
+
+
+@pytest.mark.parametrize(
+ "client_class", [TranscoderServiceClient, TranscoderServiceAsyncClient,]
+)
+def test_transcoder_service_client_from_service_account_info(client_class):
+ creds = ga_credentials.AnonymousCredentials()
+ with mock.patch.object(
+ service_account.Credentials, "from_service_account_info"
+ ) as factory:
+ factory.return_value = creds
+ info = {"valid": True}
+ client = client_class.from_service_account_info(info)
+ assert client.transport._credentials == creds
+ assert isinstance(client, client_class)
+
+ assert client.transport._host == "transcoder.googleapis.com:443"
+
+
+@pytest.mark.parametrize(
+ "client_class", [TranscoderServiceClient, TranscoderServiceAsyncClient,]
+)
+def test_transcoder_service_client_service_account_always_use_jwt(client_class):
+ with mock.patch.object(
+ service_account.Credentials, "with_always_use_jwt_access", create=True
+ ) as use_jwt:
+ creds = service_account.Credentials(None, None, None)
+ client = client_class(credentials=creds)
+ use_jwt.assert_not_called()
+
+
+@pytest.mark.parametrize(
+ "transport_class,transport_name",
+ [
+ (transports.TranscoderServiceGrpcTransport, "grpc"),
+ (transports.TranscoderServiceGrpcAsyncIOTransport, "grpc_asyncio"),
+ ],
+)
+def test_transcoder_service_client_service_account_always_use_jwt_true(
+ transport_class, transport_name
+):
+ with mock.patch.object(
+ service_account.Credentials, "with_always_use_jwt_access", create=True
+ ) as use_jwt:
+ creds = service_account.Credentials(None, None, None)
+ transport = transport_class(credentials=creds, always_use_jwt_access=True)
+ use_jwt.assert_called_once_with(True)
+
+
+@pytest.mark.parametrize(
+ "client_class", [TranscoderServiceClient, TranscoderServiceAsyncClient,]
+)
+def test_transcoder_service_client_from_service_account_file(client_class):
+ creds = ga_credentials.AnonymousCredentials()
+ with mock.patch.object(
+ service_account.Credentials, "from_service_account_file"
+ ) as factory:
+ factory.return_value = creds
+ client = client_class.from_service_account_file("dummy/file/path.json")
+ assert client.transport._credentials == creds
+ assert isinstance(client, client_class)
+
+ client = client_class.from_service_account_json("dummy/file/path.json")
+ assert client.transport._credentials == creds
+ assert isinstance(client, client_class)
+
+ assert client.transport._host == "transcoder.googleapis.com:443"
+
+
+def test_transcoder_service_client_get_transport_class():
+ transport = TranscoderServiceClient.get_transport_class()
+ available_transports = [
+ transports.TranscoderServiceGrpcTransport,
+ ]
+ assert transport in available_transports
+
+ transport = TranscoderServiceClient.get_transport_class("grpc")
+ assert transport == transports.TranscoderServiceGrpcTransport
+
+
+@pytest.mark.parametrize(
+ "client_class,transport_class,transport_name",
+ [
+ (TranscoderServiceClient, transports.TranscoderServiceGrpcTransport, "grpc"),
+ (
+ TranscoderServiceAsyncClient,
+ transports.TranscoderServiceGrpcAsyncIOTransport,
+ "grpc_asyncio",
+ ),
+ ],
+)
+@mock.patch.object(
+ TranscoderServiceClient,
+ "DEFAULT_ENDPOINT",
+ modify_default_endpoint(TranscoderServiceClient),
+)
+@mock.patch.object(
+ TranscoderServiceAsyncClient,
+ "DEFAULT_ENDPOINT",
+ modify_default_endpoint(TranscoderServiceAsyncClient),
+)
+def test_transcoder_service_client_client_options(
+ client_class, transport_class, transport_name
+):
+ # Check that if channel is provided we won't create a new one.
+ with mock.patch.object(TranscoderServiceClient, "get_transport_class") as gtc:
+ transport = transport_class(credentials=ga_credentials.AnonymousCredentials())
+ client = client_class(transport=transport)
+ gtc.assert_not_called()
+
+ # Check that if channel is provided via str we will create a new one.
+ with mock.patch.object(TranscoderServiceClient, "get_transport_class") as gtc:
+ client = client_class(transport=transport_name)
+ gtc.assert_called()
+
+ # Check the case api_endpoint is provided.
+ options = client_options.ClientOptions(api_endpoint="squid.clam.whelk")
+ with mock.patch.object(transport_class, "__init__") as patched:
+ patched.return_value = None
+ client = client_class(client_options=options)
+ patched.assert_called_once_with(
+ credentials=None,
+ credentials_file=None,
+ host="squid.clam.whelk",
+ scopes=None,
+ client_cert_source_for_mtls=None,
+ quota_project_id=None,
+ client_info=transports.base.DEFAULT_CLIENT_INFO,
+ )
+
+ # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is
+ # "never".
+ with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}):
+ with mock.patch.object(transport_class, "__init__") as patched:
+ patched.return_value = None
+ client = client_class()
+ patched.assert_called_once_with(
+ credentials=None,
+ credentials_file=None,
+ host=client.DEFAULT_ENDPOINT,
+ scopes=None,
+ client_cert_source_for_mtls=None,
+ quota_project_id=None,
+ client_info=transports.base.DEFAULT_CLIENT_INFO,
+ )
+
+ # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is
+ # "always".
+ with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}):
+ with mock.patch.object(transport_class, "__init__") as patched:
+ patched.return_value = None
+ client = client_class()
+ patched.assert_called_once_with(
+ credentials=None,
+ credentials_file=None,
+ host=client.DEFAULT_MTLS_ENDPOINT,
+ scopes=None,
+ client_cert_source_for_mtls=None,
+ quota_project_id=None,
+ client_info=transports.base.DEFAULT_CLIENT_INFO,
+ )
+
+ # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has
+ # unsupported value.
+ with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}):
+ with pytest.raises(MutualTLSChannelError):
+ client = client_class()
+
+ # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value.
+ with mock.patch.dict(
+ os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}
+ ):
+ with pytest.raises(ValueError):
+ client = client_class()
+
+ # Check the case quota_project_id is provided
+ options = client_options.ClientOptions(quota_project_id="octopus")
+ with mock.patch.object(transport_class, "__init__") as patched:
+ patched.return_value = None
+ client = client_class(client_options=options)
+ patched.assert_called_once_with(
+ credentials=None,
+ credentials_file=None,
+ host=client.DEFAULT_ENDPOINT,
+ scopes=None,
+ client_cert_source_for_mtls=None,
+ quota_project_id="octopus",
+ client_info=transports.base.DEFAULT_CLIENT_INFO,
+ )
+
+
+@pytest.mark.parametrize(
+ "client_class,transport_class,transport_name,use_client_cert_env",
+ [
+ (
+ TranscoderServiceClient,
+ transports.TranscoderServiceGrpcTransport,
+ "grpc",
+ "true",
+ ),
+ (
+ TranscoderServiceAsyncClient,
+ transports.TranscoderServiceGrpcAsyncIOTransport,
+ "grpc_asyncio",
+ "true",
+ ),
+ (
+ TranscoderServiceClient,
+ transports.TranscoderServiceGrpcTransport,
+ "grpc",
+ "false",
+ ),
+ (
+ TranscoderServiceAsyncClient,
+ transports.TranscoderServiceGrpcAsyncIOTransport,
+ "grpc_asyncio",
+ "false",
+ ),
+ ],
+)
+@mock.patch.object(
+ TranscoderServiceClient,
+ "DEFAULT_ENDPOINT",
+ modify_default_endpoint(TranscoderServiceClient),
+)
+@mock.patch.object(
+ TranscoderServiceAsyncClient,
+ "DEFAULT_ENDPOINT",
+ modify_default_endpoint(TranscoderServiceAsyncClient),
+)
+@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"})
+def test_transcoder_service_client_mtls_env_auto(
+ client_class, transport_class, transport_name, use_client_cert_env
+):
+ # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default
+ # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists.
+
+ # Check the case client_cert_source is provided. Whether client cert is used depends on
+ # GOOGLE_API_USE_CLIENT_CERTIFICATE value.
+ with mock.patch.dict(
+ os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}
+ ):
+ options = client_options.ClientOptions(
+ client_cert_source=client_cert_source_callback
+ )
+ with mock.patch.object(transport_class, "__init__") as patched:
+ patched.return_value = None
+ client = client_class(client_options=options)
+
+ if use_client_cert_env == "false":
+ expected_client_cert_source = None
+ expected_host = client.DEFAULT_ENDPOINT
+ else:
+ expected_client_cert_source = client_cert_source_callback
+ expected_host = client.DEFAULT_MTLS_ENDPOINT
+
+ patched.assert_called_once_with(
+ credentials=None,
+ credentials_file=None,
+ host=expected_host,
+ scopes=None,
+ client_cert_source_for_mtls=expected_client_cert_source,
+ quota_project_id=None,
+ client_info=transports.base.DEFAULT_CLIENT_INFO,
+ )
+
+ # Check the case ADC client cert is provided. Whether client cert is used depends on
+ # GOOGLE_API_USE_CLIENT_CERTIFICATE value.
+ with mock.patch.dict(
+ os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}
+ ):
+ with mock.patch.object(transport_class, "__init__") as patched:
+ with mock.patch(
+ "google.auth.transport.mtls.has_default_client_cert_source",
+ return_value=True,
+ ):
+ with mock.patch(
+ "google.auth.transport.mtls.default_client_cert_source",
+ return_value=client_cert_source_callback,
+ ):
+ if use_client_cert_env == "false":
+ expected_host = client.DEFAULT_ENDPOINT
+ expected_client_cert_source = None
+ else:
+ expected_host = client.DEFAULT_MTLS_ENDPOINT
+ expected_client_cert_source = client_cert_source_callback
+
+ patched.return_value = None
+ client = client_class()
+ patched.assert_called_once_with(
+ credentials=None,
+ credentials_file=None,
+ host=expected_host,
+ scopes=None,
+ client_cert_source_for_mtls=expected_client_cert_source,
+ quota_project_id=None,
+ client_info=transports.base.DEFAULT_CLIENT_INFO,
+ )
+
+ # Check the case client_cert_source and ADC client cert are not provided.
+ with mock.patch.dict(
+ os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}
+ ):
+ with mock.patch.object(transport_class, "__init__") as patched:
+ with mock.patch(
+ "google.auth.transport.mtls.has_default_client_cert_source",
+ return_value=False,
+ ):
+ patched.return_value = None
+ client = client_class()
+ patched.assert_called_once_with(
+ credentials=None,
+ credentials_file=None,
+ host=client.DEFAULT_ENDPOINT,
+ scopes=None,
+ client_cert_source_for_mtls=None,
+ quota_project_id=None,
+ client_info=transports.base.DEFAULT_CLIENT_INFO,
+ )
+
+
+@pytest.mark.parametrize(
+ "client_class,transport_class,transport_name",
+ [
+ (TranscoderServiceClient, transports.TranscoderServiceGrpcTransport, "grpc"),
+ (
+ TranscoderServiceAsyncClient,
+ transports.TranscoderServiceGrpcAsyncIOTransport,
+ "grpc_asyncio",
+ ),
+ ],
+)
+def test_transcoder_service_client_client_options_scopes(
+ client_class, transport_class, transport_name
+):
+ # Check the case scopes are provided.
+ options = client_options.ClientOptions(scopes=["1", "2"],)
+ with mock.patch.object(transport_class, "__init__") as patched:
+ patched.return_value = None
+ client = client_class(client_options=options)
+ patched.assert_called_once_with(
+ credentials=None,
+ credentials_file=None,
+ host=client.DEFAULT_ENDPOINT,
+ scopes=["1", "2"],
+ client_cert_source_for_mtls=None,
+ quota_project_id=None,
+ client_info=transports.base.DEFAULT_CLIENT_INFO,
+ )
+
+
+@pytest.mark.parametrize(
+ "client_class,transport_class,transport_name",
+ [
+ (TranscoderServiceClient, transports.TranscoderServiceGrpcTransport, "grpc"),
+ (
+ TranscoderServiceAsyncClient,
+ transports.TranscoderServiceGrpcAsyncIOTransport,
+ "grpc_asyncio",
+ ),
+ ],
+)
+def test_transcoder_service_client_client_options_credentials_file(
+ client_class, transport_class, transport_name
+):
+ # Check the case credentials file is provided.
+ options = client_options.ClientOptions(credentials_file="credentials.json")
+ with mock.patch.object(transport_class, "__init__") as patched:
+ patched.return_value = None
+ client = client_class(client_options=options)
+ patched.assert_called_once_with(
+ credentials=None,
+ credentials_file="credentials.json",
+ host=client.DEFAULT_ENDPOINT,
+ scopes=None,
+ client_cert_source_for_mtls=None,
+ quota_project_id=None,
+ client_info=transports.base.DEFAULT_CLIENT_INFO,
+ )
+
+
+def test_transcoder_service_client_client_options_from_dict():
+ with mock.patch(
+ "google.cloud.video.transcoder_v1.services.transcoder_service.transports.TranscoderServiceGrpcTransport.__init__"
+ ) as grpc_transport:
+ grpc_transport.return_value = None
+ client = TranscoderServiceClient(
+ client_options={"api_endpoint": "squid.clam.whelk"}
+ )
+ grpc_transport.assert_called_once_with(
+ credentials=None,
+ credentials_file=None,
+ host="squid.clam.whelk",
+ scopes=None,
+ client_cert_source_for_mtls=None,
+ quota_project_id=None,
+ client_info=transports.base.DEFAULT_CLIENT_INFO,
+ )
+
+
+def test_create_job(transport: str = "grpc", request_type=services.CreateJobRequest):
+ client = TranscoderServiceClient(
+ credentials=ga_credentials.AnonymousCredentials(), transport=transport,
+ )
+
+ # Everything is optional in proto3 as far as the runtime is concerned,
+ # and we are mocking out the actual API, so just send an empty request.
+ request = request_type()
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(type(client.transport.create_job), "__call__") as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = resources.Job(
+ name="name_value",
+ input_uri="input_uri_value",
+ output_uri="output_uri_value",
+ state=resources.Job.ProcessingState.PENDING,
+ ttl_after_completion_days=2670,
+ template_id="template_id_value",
+ )
+ response = client.create_job(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls) == 1
+ _, args, _ = call.mock_calls[0]
+ assert args[0] == services.CreateJobRequest()
+
+ # Establish that the response is the type that we expect.
+ assert isinstance(response, resources.Job)
+ assert response.name == "name_value"
+ assert response.input_uri == "input_uri_value"
+ assert response.output_uri == "output_uri_value"
+ assert response.state == resources.Job.ProcessingState.PENDING
+ assert response.ttl_after_completion_days == 2670
+
+
+def test_create_job_from_dict():
+ test_create_job(request_type=dict)
+
+
+def test_create_job_empty_call():
+ # This test is a coverage failsafe to make sure that totally empty calls,
+ # i.e. request == None and no flattened fields passed, work.
+ client = TranscoderServiceClient(
+ credentials=ga_credentials.AnonymousCredentials(), transport="grpc",
+ )
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(type(client.transport.create_job), "__call__") as call:
+ client.create_job()
+ call.assert_called()
+ _, args, _ = call.mock_calls[0]
+ assert args[0] == services.CreateJobRequest()
+
+
+@pytest.mark.asyncio
+async def test_create_job_async(
+ transport: str = "grpc_asyncio", request_type=services.CreateJobRequest
+):
+ client = TranscoderServiceAsyncClient(
+ credentials=ga_credentials.AnonymousCredentials(), transport=transport,
+ )
+
+ # Everything is optional in proto3 as far as the runtime is concerned,
+ # and we are mocking out the actual API, so just send an empty request.
+ request = request_type()
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(type(client.transport.create_job), "__call__") as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
+ resources.Job(
+ name="name_value",
+ input_uri="input_uri_value",
+ output_uri="output_uri_value",
+ state=resources.Job.ProcessingState.PENDING,
+ ttl_after_completion_days=2670,
+ )
+ )
+ response = await client.create_job(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls)
+ _, args, _ = call.mock_calls[0]
+ assert args[0] == services.CreateJobRequest()
+
+ # Establish that the response is the type that we expect.
+ assert isinstance(response, resources.Job)
+ assert response.name == "name_value"
+ assert response.input_uri == "input_uri_value"
+ assert response.output_uri == "output_uri_value"
+ assert response.state == resources.Job.ProcessingState.PENDING
+ assert response.ttl_after_completion_days == 2670
+
+
+@pytest.mark.asyncio
+async def test_create_job_async_from_dict():
+ await test_create_job_async(request_type=dict)
+
+
+def test_create_job_field_headers():
+ client = TranscoderServiceClient(credentials=ga_credentials.AnonymousCredentials(),)
+
+ # Any value that is part of the HTTP/1.1 URI should be sent as
+ # a field header. Set these to a non-empty value.
+ request = services.CreateJobRequest()
+
+ request.parent = "parent/value"
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(type(client.transport.create_job), "__call__") as call:
+ call.return_value = resources.Job()
+ client.create_job(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls) == 1
+ _, args, _ = call.mock_calls[0]
+ assert args[0] == request
+
+ # Establish that the field header was sent.
+ _, _, kw = call.mock_calls[0]
+ assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"]
+
+
+@pytest.mark.asyncio
+async def test_create_job_field_headers_async():
+ client = TranscoderServiceAsyncClient(
+ credentials=ga_credentials.AnonymousCredentials(),
+ )
+
+ # Any value that is part of the HTTP/1.1 URI should be sent as
+ # a field header. Set these to a non-empty value.
+ request = services.CreateJobRequest()
+
+ request.parent = "parent/value"
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(type(client.transport.create_job), "__call__") as call:
+ call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.Job())
+ await client.create_job(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls)
+ _, args, _ = call.mock_calls[0]
+ assert args[0] == request
+
+ # Establish that the field header was sent.
+ _, _, kw = call.mock_calls[0]
+ assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"]
+
+
+def test_create_job_flattened():
+ client = TranscoderServiceClient(credentials=ga_credentials.AnonymousCredentials(),)
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(type(client.transport.create_job), "__call__") as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = resources.Job()
+ # Call the method with a truthy value for each flattened field,
+ # using the keyword arguments to the method.
+ client.create_job(
+ parent="parent_value", job=resources.Job(name="name_value"),
+ )
+
+ # Establish that the underlying call was made with the expected
+ # request object values.
+ assert len(call.mock_calls) == 1
+ _, args, _ = call.mock_calls[0]
+ assert args[0].parent == "parent_value"
+ assert args[0].job == resources.Job(name="name_value")
+
+
+def test_create_job_flattened_error():
+ client = TranscoderServiceClient(credentials=ga_credentials.AnonymousCredentials(),)
+
+ # Attempting to call a method with both a request object and flattened
+ # fields is an error.
+ with pytest.raises(ValueError):
+ client.create_job(
+ services.CreateJobRequest(),
+ parent="parent_value",
+ job=resources.Job(name="name_value"),
+ )
+
+
+@pytest.mark.asyncio
+async def test_create_job_flattened_async():
+ client = TranscoderServiceAsyncClient(
+ credentials=ga_credentials.AnonymousCredentials(),
+ )
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(type(client.transport.create_job), "__call__") as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = resources.Job()
+
+ call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.Job())
+ # Call the method with a truthy value for each flattened field,
+ # using the keyword arguments to the method.
+ response = await client.create_job(
+ parent="parent_value", job=resources.Job(name="name_value"),
+ )
+
+ # Establish that the underlying call was made with the expected
+ # request object values.
+ assert len(call.mock_calls)
+ _, args, _ = call.mock_calls[0]
+ assert args[0].parent == "parent_value"
+ assert args[0].job == resources.Job(name="name_value")
+
+
+@pytest.mark.asyncio
+async def test_create_job_flattened_error_async():
+ client = TranscoderServiceAsyncClient(
+ credentials=ga_credentials.AnonymousCredentials(),
+ )
+
+ # Attempting to call a method with both a request object and flattened
+ # fields is an error.
+ with pytest.raises(ValueError):
+ await client.create_job(
+ services.CreateJobRequest(),
+ parent="parent_value",
+ job=resources.Job(name="name_value"),
+ )
+
+
+def test_list_jobs(transport: str = "grpc", request_type=services.ListJobsRequest):
+ client = TranscoderServiceClient(
+ credentials=ga_credentials.AnonymousCredentials(), transport=transport,
+ )
+
+ # Everything is optional in proto3 as far as the runtime is concerned,
+ # and we are mocking out the actual API, so just send an empty request.
+ request = request_type()
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(type(client.transport.list_jobs), "__call__") as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = services.ListJobsResponse(
+ next_page_token="next_page_token_value", unreachable=["unreachable_value"],
+ )
+ response = client.list_jobs(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls) == 1
+ _, args, _ = call.mock_calls[0]
+ assert args[0] == services.ListJobsRequest()
+
+ # Establish that the response is the type that we expect.
+ assert isinstance(response, pagers.ListJobsPager)
+ assert response.next_page_token == "next_page_token_value"
+ assert response.unreachable == ["unreachable_value"]
+
+
+def test_list_jobs_from_dict():
+ test_list_jobs(request_type=dict)
+
+
+def test_list_jobs_empty_call():
+ # This test is a coverage failsafe to make sure that totally empty calls,
+ # i.e. request == None and no flattened fields passed, work.
+ client = TranscoderServiceClient(
+ credentials=ga_credentials.AnonymousCredentials(), transport="grpc",
+ )
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(type(client.transport.list_jobs), "__call__") as call:
+ client.list_jobs()
+ call.assert_called()
+ _, args, _ = call.mock_calls[0]
+ assert args[0] == services.ListJobsRequest()
+
+
+@pytest.mark.asyncio
+async def test_list_jobs_async(
+ transport: str = "grpc_asyncio", request_type=services.ListJobsRequest
+):
+ client = TranscoderServiceAsyncClient(
+ credentials=ga_credentials.AnonymousCredentials(), transport=transport,
+ )
+
+ # Everything is optional in proto3 as far as the runtime is concerned,
+ # and we are mocking out the actual API, so just send an empty request.
+ request = request_type()
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(type(client.transport.list_jobs), "__call__") as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
+ services.ListJobsResponse(
+ next_page_token="next_page_token_value",
+ unreachable=["unreachable_value"],
+ )
+ )
+ response = await client.list_jobs(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls)
+ _, args, _ = call.mock_calls[0]
+ assert args[0] == services.ListJobsRequest()
+
+ # Establish that the response is the type that we expect.
+ assert isinstance(response, pagers.ListJobsAsyncPager)
+ assert response.next_page_token == "next_page_token_value"
+ assert response.unreachable == ["unreachable_value"]
+
+
+@pytest.mark.asyncio
+async def test_list_jobs_async_from_dict():
+ await test_list_jobs_async(request_type=dict)
+
+
+def test_list_jobs_field_headers():
+ client = TranscoderServiceClient(credentials=ga_credentials.AnonymousCredentials(),)
+
+ # Any value that is part of the HTTP/1.1 URI should be sent as
+ # a field header. Set these to a non-empty value.
+ request = services.ListJobsRequest()
+
+ request.parent = "parent/value"
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(type(client.transport.list_jobs), "__call__") as call:
+ call.return_value = services.ListJobsResponse()
+ client.list_jobs(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls) == 1
+ _, args, _ = call.mock_calls[0]
+ assert args[0] == request
+
+ # Establish that the field header was sent.
+ _, _, kw = call.mock_calls[0]
+ assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"]
+
+
+@pytest.mark.asyncio
+async def test_list_jobs_field_headers_async():
+ client = TranscoderServiceAsyncClient(
+ credentials=ga_credentials.AnonymousCredentials(),
+ )
+
+ # Any value that is part of the HTTP/1.1 URI should be sent as
+ # a field header. Set these to a non-empty value.
+ request = services.ListJobsRequest()
+
+ request.parent = "parent/value"
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(type(client.transport.list_jobs), "__call__") as call:
+ call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
+ services.ListJobsResponse()
+ )
+ await client.list_jobs(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls)
+ _, args, _ = call.mock_calls[0]
+ assert args[0] == request
+
+ # Establish that the field header was sent.
+ _, _, kw = call.mock_calls[0]
+ assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"]
+
+
+def test_list_jobs_flattened():
+ client = TranscoderServiceClient(credentials=ga_credentials.AnonymousCredentials(),)
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(type(client.transport.list_jobs), "__call__") as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = services.ListJobsResponse()
+ # Call the method with a truthy value for each flattened field,
+ # using the keyword arguments to the method.
+ client.list_jobs(parent="parent_value",)
+
+ # Establish that the underlying call was made with the expected
+ # request object values.
+ assert len(call.mock_calls) == 1
+ _, args, _ = call.mock_calls[0]
+ assert args[0].parent == "parent_value"
+
+
+def test_list_jobs_flattened_error():
+ client = TranscoderServiceClient(credentials=ga_credentials.AnonymousCredentials(),)
+
+ # Attempting to call a method with both a request object and flattened
+ # fields is an error.
+ with pytest.raises(ValueError):
+ client.list_jobs(
+ services.ListJobsRequest(), parent="parent_value",
+ )
+
+
+@pytest.mark.asyncio
+async def test_list_jobs_flattened_async():
+ client = TranscoderServiceAsyncClient(
+ credentials=ga_credentials.AnonymousCredentials(),
+ )
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(type(client.transport.list_jobs), "__call__") as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = services.ListJobsResponse()
+
+ call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
+ services.ListJobsResponse()
+ )
+ # Call the method with a truthy value for each flattened field,
+ # using the keyword arguments to the method.
+ response = await client.list_jobs(parent="parent_value",)
+
+ # Establish that the underlying call was made with the expected
+ # request object values.
+ assert len(call.mock_calls)
+ _, args, _ = call.mock_calls[0]
+ assert args[0].parent == "parent_value"
+
+
+@pytest.mark.asyncio
+async def test_list_jobs_flattened_error_async():
+ client = TranscoderServiceAsyncClient(
+ credentials=ga_credentials.AnonymousCredentials(),
+ )
+
+ # Attempting to call a method with both a request object and flattened
+ # fields is an error.
+ with pytest.raises(ValueError):
+ await client.list_jobs(
+ services.ListJobsRequest(), parent="parent_value",
+ )
+
+
+def test_list_jobs_pager():
+ client = TranscoderServiceClient(credentials=ga_credentials.AnonymousCredentials,)
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(type(client.transport.list_jobs), "__call__") as call:
+ # Set the response to a series of pages.
+ call.side_effect = (
+ services.ListJobsResponse(
+ jobs=[resources.Job(), resources.Job(), resources.Job(),],
+ next_page_token="abc",
+ ),
+ services.ListJobsResponse(jobs=[], next_page_token="def",),
+ services.ListJobsResponse(jobs=[resources.Job(),], next_page_token="ghi",),
+ services.ListJobsResponse(jobs=[resources.Job(), resources.Job(),],),
+ RuntimeError,
+ )
+
+ metadata = ()
+ metadata = tuple(metadata) + (
+ gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)),
+ )
+ pager = client.list_jobs(request={})
+
+ assert pager._metadata == metadata
+
+ results = [i for i in pager]
+ assert len(results) == 6
+ assert all(isinstance(i, resources.Job) for i in results)
+
+
+def test_list_jobs_pages():
+ client = TranscoderServiceClient(credentials=ga_credentials.AnonymousCredentials,)
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(type(client.transport.list_jobs), "__call__") as call:
+ # Set the response to a series of pages.
+ call.side_effect = (
+ services.ListJobsResponse(
+ jobs=[resources.Job(), resources.Job(), resources.Job(),],
+ next_page_token="abc",
+ ),
+ services.ListJobsResponse(jobs=[], next_page_token="def",),
+ services.ListJobsResponse(jobs=[resources.Job(),], next_page_token="ghi",),
+ services.ListJobsResponse(jobs=[resources.Job(), resources.Job(),],),
+ RuntimeError,
+ )
+ pages = list(client.list_jobs(request={}).pages)
+ for page_, token in zip(pages, ["abc", "def", "ghi", ""]):
+ assert page_.raw_page.next_page_token == token
+
+
+@pytest.mark.asyncio
+async def test_list_jobs_async_pager():
+ client = TranscoderServiceAsyncClient(
+ credentials=ga_credentials.AnonymousCredentials,
+ )
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client.transport.list_jobs), "__call__", new_callable=mock.AsyncMock
+ ) as call:
+ # Set the response to a series of pages.
+ call.side_effect = (
+ services.ListJobsResponse(
+ jobs=[resources.Job(), resources.Job(), resources.Job(),],
+ next_page_token="abc",
+ ),
+ services.ListJobsResponse(jobs=[], next_page_token="def",),
+ services.ListJobsResponse(jobs=[resources.Job(),], next_page_token="ghi",),
+ services.ListJobsResponse(jobs=[resources.Job(), resources.Job(),],),
+ RuntimeError,
+ )
+ async_pager = await client.list_jobs(request={},)
+ assert async_pager.next_page_token == "abc"
+ responses = []
+ async for response in async_pager:
+ responses.append(response)
+
+ assert len(responses) == 6
+ assert all(isinstance(i, resources.Job) for i in responses)
+
+
+@pytest.mark.asyncio
+async def test_list_jobs_async_pages():
+ client = TranscoderServiceAsyncClient(
+ credentials=ga_credentials.AnonymousCredentials,
+ )
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client.transport.list_jobs), "__call__", new_callable=mock.AsyncMock
+ ) as call:
+ # Set the response to a series of pages.
+ call.side_effect = (
+ services.ListJobsResponse(
+ jobs=[resources.Job(), resources.Job(), resources.Job(),],
+ next_page_token="abc",
+ ),
+ services.ListJobsResponse(jobs=[], next_page_token="def",),
+ services.ListJobsResponse(jobs=[resources.Job(),], next_page_token="ghi",),
+ services.ListJobsResponse(jobs=[resources.Job(), resources.Job(),],),
+ RuntimeError,
+ )
+ pages = []
+ async for page_ in (await client.list_jobs(request={})).pages:
+ pages.append(page_)
+ for page_, token in zip(pages, ["abc", "def", "ghi", ""]):
+ assert page_.raw_page.next_page_token == token
+
+
+def test_get_job(transport: str = "grpc", request_type=services.GetJobRequest):
+ client = TranscoderServiceClient(
+ credentials=ga_credentials.AnonymousCredentials(), transport=transport,
+ )
+
+ # Everything is optional in proto3 as far as the runtime is concerned,
+ # and we are mocking out the actual API, so just send an empty request.
+ request = request_type()
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(type(client.transport.get_job), "__call__") as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = resources.Job(
+ name="name_value",
+ input_uri="input_uri_value",
+ output_uri="output_uri_value",
+ state=resources.Job.ProcessingState.PENDING,
+ ttl_after_completion_days=2670,
+ template_id="template_id_value",
+ )
+ response = client.get_job(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls) == 1
+ _, args, _ = call.mock_calls[0]
+ assert args[0] == services.GetJobRequest()
+
+ # Establish that the response is the type that we expect.
+ assert isinstance(response, resources.Job)
+ assert response.name == "name_value"
+ assert response.input_uri == "input_uri_value"
+ assert response.output_uri == "output_uri_value"
+ assert response.state == resources.Job.ProcessingState.PENDING
+ assert response.ttl_after_completion_days == 2670
+
+
+def test_get_job_from_dict():
+ test_get_job(request_type=dict)
+
+
+def test_get_job_empty_call():
+ # This test is a coverage failsafe to make sure that totally empty calls,
+ # i.e. request == None and no flattened fields passed, work.
+ client = TranscoderServiceClient(
+ credentials=ga_credentials.AnonymousCredentials(), transport="grpc",
+ )
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(type(client.transport.get_job), "__call__") as call:
+ client.get_job()
+ call.assert_called()
+ _, args, _ = call.mock_calls[0]
+ assert args[0] == services.GetJobRequest()
+
+
+@pytest.mark.asyncio
+async def test_get_job_async(
+ transport: str = "grpc_asyncio", request_type=services.GetJobRequest
+):
+ client = TranscoderServiceAsyncClient(
+ credentials=ga_credentials.AnonymousCredentials(), transport=transport,
+ )
+
+ # Everything is optional in proto3 as far as the runtime is concerned,
+ # and we are mocking out the actual API, so just send an empty request.
+ request = request_type()
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(type(client.transport.get_job), "__call__") as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
+ resources.Job(
+ name="name_value",
+ input_uri="input_uri_value",
+ output_uri="output_uri_value",
+ state=resources.Job.ProcessingState.PENDING,
+ ttl_after_completion_days=2670,
+ )
+ )
+ response = await client.get_job(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls)
+ _, args, _ = call.mock_calls[0]
+ assert args[0] == services.GetJobRequest()
+
+ # Establish that the response is the type that we expect.
+ assert isinstance(response, resources.Job)
+ assert response.name == "name_value"
+ assert response.input_uri == "input_uri_value"
+ assert response.output_uri == "output_uri_value"
+ assert response.state == resources.Job.ProcessingState.PENDING
+ assert response.ttl_after_completion_days == 2670
+
+
+@pytest.mark.asyncio
+async def test_get_job_async_from_dict():
+ await test_get_job_async(request_type=dict)
+
+
+def test_get_job_field_headers():
+ client = TranscoderServiceClient(credentials=ga_credentials.AnonymousCredentials(),)
+
+ # Any value that is part of the HTTP/1.1 URI should be sent as
+ # a field header. Set these to a non-empty value.
+ request = services.GetJobRequest()
+
+ request.name = "name/value"
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(type(client.transport.get_job), "__call__") as call:
+ call.return_value = resources.Job()
+ client.get_job(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls) == 1
+ _, args, _ = call.mock_calls[0]
+ assert args[0] == request
+
+ # Establish that the field header was sent.
+ _, _, kw = call.mock_calls[0]
+ assert ("x-goog-request-params", "name=name/value",) in kw["metadata"]
+
+
+@pytest.mark.asyncio
+async def test_get_job_field_headers_async():
+ client = TranscoderServiceAsyncClient(
+ credentials=ga_credentials.AnonymousCredentials(),
+ )
+
+ # Any value that is part of the HTTP/1.1 URI should be sent as
+ # a field header. Set these to a non-empty value.
+ request = services.GetJobRequest()
+
+ request.name = "name/value"
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(type(client.transport.get_job), "__call__") as call:
+ call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.Job())
+ await client.get_job(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls)
+ _, args, _ = call.mock_calls[0]
+ assert args[0] == request
+
+ # Establish that the field header was sent.
+ _, _, kw = call.mock_calls[0]
+ assert ("x-goog-request-params", "name=name/value",) in kw["metadata"]
+
+
+def test_get_job_flattened():
+ client = TranscoderServiceClient(credentials=ga_credentials.AnonymousCredentials(),)
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(type(client.transport.get_job), "__call__") as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = resources.Job()
+ # Call the method with a truthy value for each flattened field,
+ # using the keyword arguments to the method.
+ client.get_job(name="name_value",)
+
+ # Establish that the underlying call was made with the expected
+ # request object values.
+ assert len(call.mock_calls) == 1
+ _, args, _ = call.mock_calls[0]
+ assert args[0].name == "name_value"
+
+
+def test_get_job_flattened_error():
+ client = TranscoderServiceClient(credentials=ga_credentials.AnonymousCredentials(),)
+
+ # Attempting to call a method with both a request object and flattened
+ # fields is an error.
+ with pytest.raises(ValueError):
+ client.get_job(
+ services.GetJobRequest(), name="name_value",
+ )
+
+
+@pytest.mark.asyncio
+async def test_get_job_flattened_async():
+ client = TranscoderServiceAsyncClient(
+ credentials=ga_credentials.AnonymousCredentials(),
+ )
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(type(client.transport.get_job), "__call__") as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = resources.Job()
+
+ call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.Job())
+ # Call the method with a truthy value for each flattened field,
+ # using the keyword arguments to the method.
+ response = await client.get_job(name="name_value",)
+
+ # Establish that the underlying call was made with the expected
+ # request object values.
+ assert len(call.mock_calls)
+ _, args, _ = call.mock_calls[0]
+ assert args[0].name == "name_value"
+
+
+@pytest.mark.asyncio
+async def test_get_job_flattened_error_async():
+ client = TranscoderServiceAsyncClient(
+ credentials=ga_credentials.AnonymousCredentials(),
+ )
+
+ # Attempting to call a method with both a request object and flattened
+ # fields is an error.
+ with pytest.raises(ValueError):
+ await client.get_job(
+ services.GetJobRequest(), name="name_value",
+ )
+
+
+def test_delete_job(transport: str = "grpc", request_type=services.DeleteJobRequest):
+ client = TranscoderServiceClient(
+ credentials=ga_credentials.AnonymousCredentials(), transport=transport,
+ )
+
+ # Everything is optional in proto3 as far as the runtime is concerned,
+ # and we are mocking out the actual API, so just send an empty request.
+ request = request_type()
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(type(client.transport.delete_job), "__call__") as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = None
+ response = client.delete_job(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls) == 1
+ _, args, _ = call.mock_calls[0]
+ assert args[0] == services.DeleteJobRequest()
+
+ # Establish that the response is the type that we expect.
+ assert response is None
+
+
+def test_delete_job_from_dict():
+ test_delete_job(request_type=dict)
+
+
+def test_delete_job_empty_call():
+ # This test is a coverage failsafe to make sure that totally empty calls,
+ # i.e. request == None and no flattened fields passed, work.
+ client = TranscoderServiceClient(
+ credentials=ga_credentials.AnonymousCredentials(), transport="grpc",
+ )
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(type(client.transport.delete_job), "__call__") as call:
+ client.delete_job()
+ call.assert_called()
+ _, args, _ = call.mock_calls[0]
+ assert args[0] == services.DeleteJobRequest()
+
+
+@pytest.mark.asyncio
+async def test_delete_job_async(
+ transport: str = "grpc_asyncio", request_type=services.DeleteJobRequest
+):
+ client = TranscoderServiceAsyncClient(
+ credentials=ga_credentials.AnonymousCredentials(), transport=transport,
+ )
+
+ # Everything is optional in proto3 as far as the runtime is concerned,
+ # and we are mocking out the actual API, so just send an empty request.
+ request = request_type()
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(type(client.transport.delete_job), "__call__") as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None)
+ response = await client.delete_job(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls)
+ _, args, _ = call.mock_calls[0]
+ assert args[0] == services.DeleteJobRequest()
+
+ # Establish that the response is the type that we expect.
+ assert response is None
+
+
+@pytest.mark.asyncio
+async def test_delete_job_async_from_dict():
+ await test_delete_job_async(request_type=dict)
+
+
+def test_delete_job_field_headers():
+ client = TranscoderServiceClient(credentials=ga_credentials.AnonymousCredentials(),)
+
+ # Any value that is part of the HTTP/1.1 URI should be sent as
+ # a field header. Set these to a non-empty value.
+ request = services.DeleteJobRequest()
+
+ request.name = "name/value"
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(type(client.transport.delete_job), "__call__") as call:
+ call.return_value = None
+ client.delete_job(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls) == 1
+ _, args, _ = call.mock_calls[0]
+ assert args[0] == request
+
+ # Establish that the field header was sent.
+ _, _, kw = call.mock_calls[0]
+ assert ("x-goog-request-params", "name=name/value",) in kw["metadata"]
+
+
+@pytest.mark.asyncio
+async def test_delete_job_field_headers_async():
+ client = TranscoderServiceAsyncClient(
+ credentials=ga_credentials.AnonymousCredentials(),
+ )
+
+ # Any value that is part of the HTTP/1.1 URI should be sent as
+ # a field header. Set these to a non-empty value.
+ request = services.DeleteJobRequest()
+
+ request.name = "name/value"
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(type(client.transport.delete_job), "__call__") as call:
+ call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None)
+ await client.delete_job(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls)
+ _, args, _ = call.mock_calls[0]
+ assert args[0] == request
+
+ # Establish that the field header was sent.
+ _, _, kw = call.mock_calls[0]
+ assert ("x-goog-request-params", "name=name/value",) in kw["metadata"]
+
+
+def test_delete_job_flattened():
+ client = TranscoderServiceClient(credentials=ga_credentials.AnonymousCredentials(),)
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(type(client.transport.delete_job), "__call__") as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = None
+ # Call the method with a truthy value for each flattened field,
+ # using the keyword arguments to the method.
+ client.delete_job(name="name_value",)
+
+ # Establish that the underlying call was made with the expected
+ # request object values.
+ assert len(call.mock_calls) == 1
+ _, args, _ = call.mock_calls[0]
+ assert args[0].name == "name_value"
+
+
+def test_delete_job_flattened_error():
+ client = TranscoderServiceClient(credentials=ga_credentials.AnonymousCredentials(),)
+
+ # Attempting to call a method with both a request object and flattened
+ # fields is an error.
+ with pytest.raises(ValueError):
+ client.delete_job(
+ services.DeleteJobRequest(), name="name_value",
+ )
+
+
+@pytest.mark.asyncio
+async def test_delete_job_flattened_async():
+ client = TranscoderServiceAsyncClient(
+ credentials=ga_credentials.AnonymousCredentials(),
+ )
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(type(client.transport.delete_job), "__call__") as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = None
+
+ call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None)
+ # Call the method with a truthy value for each flattened field,
+ # using the keyword arguments to the method.
+ response = await client.delete_job(name="name_value",)
+
+ # Establish that the underlying call was made with the expected
+ # request object values.
+ assert len(call.mock_calls)
+ _, args, _ = call.mock_calls[0]
+ assert args[0].name == "name_value"
+
+
+@pytest.mark.asyncio
+async def test_delete_job_flattened_error_async():
+ client = TranscoderServiceAsyncClient(
+ credentials=ga_credentials.AnonymousCredentials(),
+ )
+
+ # Attempting to call a method with both a request object and flattened
+ # fields is an error.
+ with pytest.raises(ValueError):
+ await client.delete_job(
+ services.DeleteJobRequest(), name="name_value",
+ )
+
+
+def test_create_job_template(
+ transport: str = "grpc", request_type=services.CreateJobTemplateRequest
+):
+ client = TranscoderServiceClient(
+ credentials=ga_credentials.AnonymousCredentials(), transport=transport,
+ )
+
+ # Everything is optional in proto3 as far as the runtime is concerned,
+ # and we are mocking out the actual API, so just send an empty request.
+ request = request_type()
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client.transport.create_job_template), "__call__"
+ ) as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = resources.JobTemplate(name="name_value",)
+ response = client.create_job_template(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls) == 1
+ _, args, _ = call.mock_calls[0]
+ assert args[0] == services.CreateJobTemplateRequest()
+
+ # Establish that the response is the type that we expect.
+ assert isinstance(response, resources.JobTemplate)
+ assert response.name == "name_value"
+
+
+def test_create_job_template_from_dict():
+ test_create_job_template(request_type=dict)
+
+
+def test_create_job_template_empty_call():
+ # This test is a coverage failsafe to make sure that totally empty calls,
+ # i.e. request == None and no flattened fields passed, work.
+ client = TranscoderServiceClient(
+ credentials=ga_credentials.AnonymousCredentials(), transport="grpc",
+ )
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client.transport.create_job_template), "__call__"
+ ) as call:
+ client.create_job_template()
+ call.assert_called()
+ _, args, _ = call.mock_calls[0]
+ assert args[0] == services.CreateJobTemplateRequest()
+
+
+@pytest.mark.asyncio
+async def test_create_job_template_async(
+ transport: str = "grpc_asyncio", request_type=services.CreateJobTemplateRequest
+):
+ client = TranscoderServiceAsyncClient(
+ credentials=ga_credentials.AnonymousCredentials(), transport=transport,
+ )
+
+ # Everything is optional in proto3 as far as the runtime is concerned,
+ # and we are mocking out the actual API, so just send an empty request.
+ request = request_type()
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client.transport.create_job_template), "__call__"
+ ) as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
+ resources.JobTemplate(name="name_value",)
+ )
+ response = await client.create_job_template(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls)
+ _, args, _ = call.mock_calls[0]
+ assert args[0] == services.CreateJobTemplateRequest()
+
+ # Establish that the response is the type that we expect.
+ assert isinstance(response, resources.JobTemplate)
+ assert response.name == "name_value"
+
+
+@pytest.mark.asyncio
+async def test_create_job_template_async_from_dict():
+ await test_create_job_template_async(request_type=dict)
+
+
+def test_create_job_template_field_headers():
+ client = TranscoderServiceClient(credentials=ga_credentials.AnonymousCredentials(),)
+
+ # Any value that is part of the HTTP/1.1 URI should be sent as
+ # a field header. Set these to a non-empty value.
+ request = services.CreateJobTemplateRequest()
+
+ request.parent = "parent/value"
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client.transport.create_job_template), "__call__"
+ ) as call:
+ call.return_value = resources.JobTemplate()
+ client.create_job_template(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls) == 1
+ _, args, _ = call.mock_calls[0]
+ assert args[0] == request
+
+ # Establish that the field header was sent.
+ _, _, kw = call.mock_calls[0]
+ assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"]
+
+
+@pytest.mark.asyncio
+async def test_create_job_template_field_headers_async():
+ client = TranscoderServiceAsyncClient(
+ credentials=ga_credentials.AnonymousCredentials(),
+ )
+
+ # Any value that is part of the HTTP/1.1 URI should be sent as
+ # a field header. Set these to a non-empty value.
+ request = services.CreateJobTemplateRequest()
+
+ request.parent = "parent/value"
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client.transport.create_job_template), "__call__"
+ ) as call:
+ call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
+ resources.JobTemplate()
+ )
+ await client.create_job_template(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls)
+ _, args, _ = call.mock_calls[0]
+ assert args[0] == request
+
+ # Establish that the field header was sent.
+ _, _, kw = call.mock_calls[0]
+ assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"]
+
+
+def test_create_job_template_flattened():
+ client = TranscoderServiceClient(credentials=ga_credentials.AnonymousCredentials(),)
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client.transport.create_job_template), "__call__"
+ ) as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = resources.JobTemplate()
+ # Call the method with a truthy value for each flattened field,
+ # using the keyword arguments to the method.
+ client.create_job_template(
+ parent="parent_value",
+ job_template=resources.JobTemplate(name="name_value"),
+ job_template_id="job_template_id_value",
+ )
+
+ # Establish that the underlying call was made with the expected
+ # request object values.
+ assert len(call.mock_calls) == 1
+ _, args, _ = call.mock_calls[0]
+ assert args[0].parent == "parent_value"
+ assert args[0].job_template == resources.JobTemplate(name="name_value")
+ assert args[0].job_template_id == "job_template_id_value"
+
+
+def test_create_job_template_flattened_error():
+ client = TranscoderServiceClient(credentials=ga_credentials.AnonymousCredentials(),)
+
+ # Attempting to call a method with both a request object and flattened
+ # fields is an error.
+ with pytest.raises(ValueError):
+ client.create_job_template(
+ services.CreateJobTemplateRequest(),
+ parent="parent_value",
+ job_template=resources.JobTemplate(name="name_value"),
+ job_template_id="job_template_id_value",
+ )
+
+
+@pytest.mark.asyncio
+async def test_create_job_template_flattened_async():
+ client = TranscoderServiceAsyncClient(
+ credentials=ga_credentials.AnonymousCredentials(),
+ )
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client.transport.create_job_template), "__call__"
+ ) as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = resources.JobTemplate()
+
+ call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
+ resources.JobTemplate()
+ )
+ # Call the method with a truthy value for each flattened field,
+ # using the keyword arguments to the method.
+ response = await client.create_job_template(
+ parent="parent_value",
+ job_template=resources.JobTemplate(name="name_value"),
+ job_template_id="job_template_id_value",
+ )
+
+ # Establish that the underlying call was made with the expected
+ # request object values.
+ assert len(call.mock_calls)
+ _, args, _ = call.mock_calls[0]
+ assert args[0].parent == "parent_value"
+ assert args[0].job_template == resources.JobTemplate(name="name_value")
+ assert args[0].job_template_id == "job_template_id_value"
+
+
+@pytest.mark.asyncio
+async def test_create_job_template_flattened_error_async():
+ client = TranscoderServiceAsyncClient(
+ credentials=ga_credentials.AnonymousCredentials(),
+ )
+
+ # Attempting to call a method with both a request object and flattened
+ # fields is an error.
+ with pytest.raises(ValueError):
+ await client.create_job_template(
+ services.CreateJobTemplateRequest(),
+ parent="parent_value",
+ job_template=resources.JobTemplate(name="name_value"),
+ job_template_id="job_template_id_value",
+ )
+
+
+def test_list_job_templates(
+ transport: str = "grpc", request_type=services.ListJobTemplatesRequest
+):
+ client = TranscoderServiceClient(
+ credentials=ga_credentials.AnonymousCredentials(), transport=transport,
+ )
+
+ # Everything is optional in proto3 as far as the runtime is concerned,
+ # and we are mocking out the actual API, so just send an empty request.
+ request = request_type()
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client.transport.list_job_templates), "__call__"
+ ) as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = services.ListJobTemplatesResponse(
+ next_page_token="next_page_token_value", unreachable=["unreachable_value"],
+ )
+ response = client.list_job_templates(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls) == 1
+ _, args, _ = call.mock_calls[0]
+ assert args[0] == services.ListJobTemplatesRequest()
+
+ # Establish that the response is the type that we expect.
+ assert isinstance(response, pagers.ListJobTemplatesPager)
+ assert response.next_page_token == "next_page_token_value"
+ assert response.unreachable == ["unreachable_value"]
+
+
+def test_list_job_templates_from_dict():
+ test_list_job_templates(request_type=dict)
+
+
+def test_list_job_templates_empty_call():
+ # This test is a coverage failsafe to make sure that totally empty calls,
+ # i.e. request == None and no flattened fields passed, work.
+ client = TranscoderServiceClient(
+ credentials=ga_credentials.AnonymousCredentials(), transport="grpc",
+ )
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client.transport.list_job_templates), "__call__"
+ ) as call:
+ client.list_job_templates()
+ call.assert_called()
+ _, args, _ = call.mock_calls[0]
+ assert args[0] == services.ListJobTemplatesRequest()
+
+
+@pytest.mark.asyncio
+async def test_list_job_templates_async(
+ transport: str = "grpc_asyncio", request_type=services.ListJobTemplatesRequest
+):
+ client = TranscoderServiceAsyncClient(
+ credentials=ga_credentials.AnonymousCredentials(), transport=transport,
+ )
+
+ # Everything is optional in proto3 as far as the runtime is concerned,
+ # and we are mocking out the actual API, so just send an empty request.
+ request = request_type()
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client.transport.list_job_templates), "__call__"
+ ) as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
+ services.ListJobTemplatesResponse(
+ next_page_token="next_page_token_value",
+ unreachable=["unreachable_value"],
+ )
+ )
+ response = await client.list_job_templates(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls)
+ _, args, _ = call.mock_calls[0]
+ assert args[0] == services.ListJobTemplatesRequest()
+
+ # Establish that the response is the type that we expect.
+ assert isinstance(response, pagers.ListJobTemplatesAsyncPager)
+ assert response.next_page_token == "next_page_token_value"
+ assert response.unreachable == ["unreachable_value"]
+
+
+@pytest.mark.asyncio
+async def test_list_job_templates_async_from_dict():
+ await test_list_job_templates_async(request_type=dict)
+
+
+def test_list_job_templates_field_headers():
+ client = TranscoderServiceClient(credentials=ga_credentials.AnonymousCredentials(),)
+
+ # Any value that is part of the HTTP/1.1 URI should be sent as
+ # a field header. Set these to a non-empty value.
+ request = services.ListJobTemplatesRequest()
+
+ request.parent = "parent/value"
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client.transport.list_job_templates), "__call__"
+ ) as call:
+ call.return_value = services.ListJobTemplatesResponse()
+ client.list_job_templates(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls) == 1
+ _, args, _ = call.mock_calls[0]
+ assert args[0] == request
+
+ # Establish that the field header was sent.
+ _, _, kw = call.mock_calls[0]
+ assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"]
+
+
+@pytest.mark.asyncio
+async def test_list_job_templates_field_headers_async():
+ client = TranscoderServiceAsyncClient(
+ credentials=ga_credentials.AnonymousCredentials(),
+ )
+
+ # Any value that is part of the HTTP/1.1 URI should be sent as
+ # a field header. Set these to a non-empty value.
+ request = services.ListJobTemplatesRequest()
+
+ request.parent = "parent/value"
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client.transport.list_job_templates), "__call__"
+ ) as call:
+ call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
+ services.ListJobTemplatesResponse()
+ )
+ await client.list_job_templates(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls)
+ _, args, _ = call.mock_calls[0]
+ assert args[0] == request
+
+ # Establish that the field header was sent.
+ _, _, kw = call.mock_calls[0]
+ assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"]
+
+
+def test_list_job_templates_flattened():
+ client = TranscoderServiceClient(credentials=ga_credentials.AnonymousCredentials(),)
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client.transport.list_job_templates), "__call__"
+ ) as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = services.ListJobTemplatesResponse()
+ # Call the method with a truthy value for each flattened field,
+ # using the keyword arguments to the method.
+ client.list_job_templates(parent="parent_value",)
+
+ # Establish that the underlying call was made with the expected
+ # request object values.
+ assert len(call.mock_calls) == 1
+ _, args, _ = call.mock_calls[0]
+ assert args[0].parent == "parent_value"
+
+
+def test_list_job_templates_flattened_error():
+ client = TranscoderServiceClient(credentials=ga_credentials.AnonymousCredentials(),)
+
+ # Attempting to call a method with both a request object and flattened
+ # fields is an error.
+ with pytest.raises(ValueError):
+ client.list_job_templates(
+ services.ListJobTemplatesRequest(), parent="parent_value",
+ )
+
+
+@pytest.mark.asyncio
+async def test_list_job_templates_flattened_async():
+ client = TranscoderServiceAsyncClient(
+ credentials=ga_credentials.AnonymousCredentials(),
+ )
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client.transport.list_job_templates), "__call__"
+ ) as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = services.ListJobTemplatesResponse()
+
+ call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
+ services.ListJobTemplatesResponse()
+ )
+ # Call the method with a truthy value for each flattened field,
+ # using the keyword arguments to the method.
+ response = await client.list_job_templates(parent="parent_value",)
+
+ # Establish that the underlying call was made with the expected
+ # request object values.
+ assert len(call.mock_calls)
+ _, args, _ = call.mock_calls[0]
+ assert args[0].parent == "parent_value"
+
+
+@pytest.mark.asyncio
+async def test_list_job_templates_flattened_error_async():
+ client = TranscoderServiceAsyncClient(
+ credentials=ga_credentials.AnonymousCredentials(),
+ )
+
+ # Attempting to call a method with both a request object and flattened
+ # fields is an error.
+ with pytest.raises(ValueError):
+ await client.list_job_templates(
+ services.ListJobTemplatesRequest(), parent="parent_value",
+ )
+
+
+def test_list_job_templates_pager():
+ client = TranscoderServiceClient(credentials=ga_credentials.AnonymousCredentials,)
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client.transport.list_job_templates), "__call__"
+ ) as call:
+ # Set the response to a series of pages.
+ call.side_effect = (
+ services.ListJobTemplatesResponse(
+ job_templates=[
+ resources.JobTemplate(),
+ resources.JobTemplate(),
+ resources.JobTemplate(),
+ ],
+ next_page_token="abc",
+ ),
+ services.ListJobTemplatesResponse(job_templates=[], next_page_token="def",),
+ services.ListJobTemplatesResponse(
+ job_templates=[resources.JobTemplate(),], next_page_token="ghi",
+ ),
+ services.ListJobTemplatesResponse(
+ job_templates=[resources.JobTemplate(), resources.JobTemplate(),],
+ ),
+ RuntimeError,
+ )
+
+ metadata = ()
+ metadata = tuple(metadata) + (
+ gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)),
+ )
+ pager = client.list_job_templates(request={})
+
+ assert pager._metadata == metadata
+
+ results = [i for i in pager]
+ assert len(results) == 6
+ assert all(isinstance(i, resources.JobTemplate) for i in results)
+
+
+def test_list_job_templates_pages():
+ client = TranscoderServiceClient(credentials=ga_credentials.AnonymousCredentials,)
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client.transport.list_job_templates), "__call__"
+ ) as call:
+ # Set the response to a series of pages.
+ call.side_effect = (
+ services.ListJobTemplatesResponse(
+ job_templates=[
+ resources.JobTemplate(),
+ resources.JobTemplate(),
+ resources.JobTemplate(),
+ ],
+ next_page_token="abc",
+ ),
+ services.ListJobTemplatesResponse(job_templates=[], next_page_token="def",),
+ services.ListJobTemplatesResponse(
+ job_templates=[resources.JobTemplate(),], next_page_token="ghi",
+ ),
+ services.ListJobTemplatesResponse(
+ job_templates=[resources.JobTemplate(), resources.JobTemplate(),],
+ ),
+ RuntimeError,
+ )
+ pages = list(client.list_job_templates(request={}).pages)
+ for page_, token in zip(pages, ["abc", "def", "ghi", ""]):
+ assert page_.raw_page.next_page_token == token
+
+
+@pytest.mark.asyncio
+async def test_list_job_templates_async_pager():
+ client = TranscoderServiceAsyncClient(
+ credentials=ga_credentials.AnonymousCredentials,
+ )
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client.transport.list_job_templates),
+ "__call__",
+ new_callable=mock.AsyncMock,
+ ) as call:
+ # Set the response to a series of pages.
+ call.side_effect = (
+ services.ListJobTemplatesResponse(
+ job_templates=[
+ resources.JobTemplate(),
+ resources.JobTemplate(),
+ resources.JobTemplate(),
+ ],
+ next_page_token="abc",
+ ),
+ services.ListJobTemplatesResponse(job_templates=[], next_page_token="def",),
+ services.ListJobTemplatesResponse(
+ job_templates=[resources.JobTemplate(),], next_page_token="ghi",
+ ),
+ services.ListJobTemplatesResponse(
+ job_templates=[resources.JobTemplate(), resources.JobTemplate(),],
+ ),
+ RuntimeError,
+ )
+ async_pager = await client.list_job_templates(request={},)
+ assert async_pager.next_page_token == "abc"
+ responses = []
+ async for response in async_pager:
+ responses.append(response)
+
+ assert len(responses) == 6
+ assert all(isinstance(i, resources.JobTemplate) for i in responses)
+
+
+@pytest.mark.asyncio
+async def test_list_job_templates_async_pages():
+ client = TranscoderServiceAsyncClient(
+ credentials=ga_credentials.AnonymousCredentials,
+ )
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client.transport.list_job_templates),
+ "__call__",
+ new_callable=mock.AsyncMock,
+ ) as call:
+ # Set the response to a series of pages.
+ call.side_effect = (
+ services.ListJobTemplatesResponse(
+ job_templates=[
+ resources.JobTemplate(),
+ resources.JobTemplate(),
+ resources.JobTemplate(),
+ ],
+ next_page_token="abc",
+ ),
+ services.ListJobTemplatesResponse(job_templates=[], next_page_token="def",),
+ services.ListJobTemplatesResponse(
+ job_templates=[resources.JobTemplate(),], next_page_token="ghi",
+ ),
+ services.ListJobTemplatesResponse(
+ job_templates=[resources.JobTemplate(), resources.JobTemplate(),],
+ ),
+ RuntimeError,
+ )
+ pages = []
+ async for page_ in (await client.list_job_templates(request={})).pages:
+ pages.append(page_)
+ for page_, token in zip(pages, ["abc", "def", "ghi", ""]):
+ assert page_.raw_page.next_page_token == token
+
+
+def test_get_job_template(
+ transport: str = "grpc", request_type=services.GetJobTemplateRequest
+):
+ client = TranscoderServiceClient(
+ credentials=ga_credentials.AnonymousCredentials(), transport=transport,
+ )
+
+ # Everything is optional in proto3 as far as the runtime is concerned,
+ # and we are mocking out the actual API, so just send an empty request.
+ request = request_type()
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(type(client.transport.get_job_template), "__call__") as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = resources.JobTemplate(name="name_value",)
+ response = client.get_job_template(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls) == 1
+ _, args, _ = call.mock_calls[0]
+ assert args[0] == services.GetJobTemplateRequest()
+
+ # Establish that the response is the type that we expect.
+ assert isinstance(response, resources.JobTemplate)
+ assert response.name == "name_value"
+
+
+def test_get_job_template_from_dict():
+ test_get_job_template(request_type=dict)
+
+
+def test_get_job_template_empty_call():
+ # This test is a coverage failsafe to make sure that totally empty calls,
+ # i.e. request == None and no flattened fields passed, work.
+ client = TranscoderServiceClient(
+ credentials=ga_credentials.AnonymousCredentials(), transport="grpc",
+ )
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(type(client.transport.get_job_template), "__call__") as call:
+ client.get_job_template()
+ call.assert_called()
+ _, args, _ = call.mock_calls[0]
+ assert args[0] == services.GetJobTemplateRequest()
+
+
+@pytest.mark.asyncio
+async def test_get_job_template_async(
+ transport: str = "grpc_asyncio", request_type=services.GetJobTemplateRequest
+):
+ client = TranscoderServiceAsyncClient(
+ credentials=ga_credentials.AnonymousCredentials(), transport=transport,
+ )
+
+ # Everything is optional in proto3 as far as the runtime is concerned,
+ # and we are mocking out the actual API, so just send an empty request.
+ request = request_type()
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(type(client.transport.get_job_template), "__call__") as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
+ resources.JobTemplate(name="name_value",)
+ )
+ response = await client.get_job_template(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls)
+ _, args, _ = call.mock_calls[0]
+ assert args[0] == services.GetJobTemplateRequest()
+
+ # Establish that the response is the type that we expect.
+ assert isinstance(response, resources.JobTemplate)
+ assert response.name == "name_value"
+
+
+@pytest.mark.asyncio
+async def test_get_job_template_async_from_dict():
+ await test_get_job_template_async(request_type=dict)
+
+
+def test_get_job_template_field_headers():
+ client = TranscoderServiceClient(credentials=ga_credentials.AnonymousCredentials(),)
+
+ # Any value that is part of the HTTP/1.1 URI should be sent as
+ # a field header. Set these to a non-empty value.
+ request = services.GetJobTemplateRequest()
+
+ request.name = "name/value"
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(type(client.transport.get_job_template), "__call__") as call:
+ call.return_value = resources.JobTemplate()
+ client.get_job_template(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls) == 1
+ _, args, _ = call.mock_calls[0]
+ assert args[0] == request
+
+ # Establish that the field header was sent.
+ _, _, kw = call.mock_calls[0]
+ assert ("x-goog-request-params", "name=name/value",) in kw["metadata"]
+
+
+@pytest.mark.asyncio
+async def test_get_job_template_field_headers_async():
+ client = TranscoderServiceAsyncClient(
+ credentials=ga_credentials.AnonymousCredentials(),
+ )
+
+ # Any value that is part of the HTTP/1.1 URI should be sent as
+ # a field header. Set these to a non-empty value.
+ request = services.GetJobTemplateRequest()
+
+ request.name = "name/value"
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(type(client.transport.get_job_template), "__call__") as call:
+ call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
+ resources.JobTemplate()
+ )
+ await client.get_job_template(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls)
+ _, args, _ = call.mock_calls[0]
+ assert args[0] == request
+
+ # Establish that the field header was sent.
+ _, _, kw = call.mock_calls[0]
+ assert ("x-goog-request-params", "name=name/value",) in kw["metadata"]
+
+
+def test_get_job_template_flattened():
+ client = TranscoderServiceClient(credentials=ga_credentials.AnonymousCredentials(),)
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(type(client.transport.get_job_template), "__call__") as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = resources.JobTemplate()
+ # Call the method with a truthy value for each flattened field,
+ # using the keyword arguments to the method.
+ client.get_job_template(name="name_value",)
+
+ # Establish that the underlying call was made with the expected
+ # request object values.
+ assert len(call.mock_calls) == 1
+ _, args, _ = call.mock_calls[0]
+ assert args[0].name == "name_value"
+
+
+def test_get_job_template_flattened_error():
+ client = TranscoderServiceClient(credentials=ga_credentials.AnonymousCredentials(),)
+
+ # Attempting to call a method with both a request object and flattened
+ # fields is an error.
+ with pytest.raises(ValueError):
+ client.get_job_template(
+ services.GetJobTemplateRequest(), name="name_value",
+ )
+
+
+@pytest.mark.asyncio
+async def test_get_job_template_flattened_async():
+ client = TranscoderServiceAsyncClient(
+ credentials=ga_credentials.AnonymousCredentials(),
+ )
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(type(client.transport.get_job_template), "__call__") as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = resources.JobTemplate()
+
+ call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
+ resources.JobTemplate()
+ )
+ # Call the method with a truthy value for each flattened field,
+ # using the keyword arguments to the method.
+ response = await client.get_job_template(name="name_value",)
+
+ # Establish that the underlying call was made with the expected
+ # request object values.
+ assert len(call.mock_calls)
+ _, args, _ = call.mock_calls[0]
+ assert args[0].name == "name_value"
+
+
+@pytest.mark.asyncio
+async def test_get_job_template_flattened_error_async():
+ client = TranscoderServiceAsyncClient(
+ credentials=ga_credentials.AnonymousCredentials(),
+ )
+
+ # Attempting to call a method with both a request object and flattened
+ # fields is an error.
+ with pytest.raises(ValueError):
+ await client.get_job_template(
+ services.GetJobTemplateRequest(), name="name_value",
+ )
+
+
+def test_delete_job_template(
+ transport: str = "grpc", request_type=services.DeleteJobTemplateRequest
+):
+ client = TranscoderServiceClient(
+ credentials=ga_credentials.AnonymousCredentials(), transport=transport,
+ )
+
+ # Everything is optional in proto3 as far as the runtime is concerned,
+ # and we are mocking out the actual API, so just send an empty request.
+ request = request_type()
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client.transport.delete_job_template), "__call__"
+ ) as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = None
+ response = client.delete_job_template(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls) == 1
+ _, args, _ = call.mock_calls[0]
+ assert args[0] == services.DeleteJobTemplateRequest()
+
+ # Establish that the response is the type that we expect.
+ assert response is None
+
+
+def test_delete_job_template_from_dict():
+ test_delete_job_template(request_type=dict)
+
+
+def test_delete_job_template_empty_call():
+ # This test is a coverage failsafe to make sure that totally empty calls,
+ # i.e. request == None and no flattened fields passed, work.
+ client = TranscoderServiceClient(
+ credentials=ga_credentials.AnonymousCredentials(), transport="grpc",
+ )
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client.transport.delete_job_template), "__call__"
+ ) as call:
+ client.delete_job_template()
+ call.assert_called()
+ _, args, _ = call.mock_calls[0]
+ assert args[0] == services.DeleteJobTemplateRequest()
+
+
+@pytest.mark.asyncio
+async def test_delete_job_template_async(
+ transport: str = "grpc_asyncio", request_type=services.DeleteJobTemplateRequest
+):
+ client = TranscoderServiceAsyncClient(
+ credentials=ga_credentials.AnonymousCredentials(), transport=transport,
+ )
+
+ # Everything is optional in proto3 as far as the runtime is concerned,
+ # and we are mocking out the actual API, so just send an empty request.
+ request = request_type()
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client.transport.delete_job_template), "__call__"
+ ) as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None)
+ response = await client.delete_job_template(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls)
+ _, args, _ = call.mock_calls[0]
+ assert args[0] == services.DeleteJobTemplateRequest()
+
+ # Establish that the response is the type that we expect.
+ assert response is None
+
+
+@pytest.mark.asyncio
+async def test_delete_job_template_async_from_dict():
+ await test_delete_job_template_async(request_type=dict)
+
+
+def test_delete_job_template_field_headers():
+ client = TranscoderServiceClient(credentials=ga_credentials.AnonymousCredentials(),)
+
+ # Any value that is part of the HTTP/1.1 URI should be sent as
+ # a field header. Set these to a non-empty value.
+ request = services.DeleteJobTemplateRequest()
+
+ request.name = "name/value"
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client.transport.delete_job_template), "__call__"
+ ) as call:
+ call.return_value = None
+ client.delete_job_template(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls) == 1
+ _, args, _ = call.mock_calls[0]
+ assert args[0] == request
+
+ # Establish that the field header was sent.
+ _, _, kw = call.mock_calls[0]
+ assert ("x-goog-request-params", "name=name/value",) in kw["metadata"]
+
+
+@pytest.mark.asyncio
+async def test_delete_job_template_field_headers_async():
+ client = TranscoderServiceAsyncClient(
+ credentials=ga_credentials.AnonymousCredentials(),
+ )
+
+ # Any value that is part of the HTTP/1.1 URI should be sent as
+ # a field header. Set these to a non-empty value.
+ request = services.DeleteJobTemplateRequest()
+
+ request.name = "name/value"
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client.transport.delete_job_template), "__call__"
+ ) as call:
+ call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None)
+ await client.delete_job_template(request)
+
+ # Establish that the underlying gRPC stub method was called.
+ assert len(call.mock_calls)
+ _, args, _ = call.mock_calls[0]
+ assert args[0] == request
+
+ # Establish that the field header was sent.
+ _, _, kw = call.mock_calls[0]
+ assert ("x-goog-request-params", "name=name/value",) in kw["metadata"]
+
+
+def test_delete_job_template_flattened():
+ client = TranscoderServiceClient(credentials=ga_credentials.AnonymousCredentials(),)
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client.transport.delete_job_template), "__call__"
+ ) as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = None
+ # Call the method with a truthy value for each flattened field,
+ # using the keyword arguments to the method.
+ client.delete_job_template(name="name_value",)
+
+ # Establish that the underlying call was made with the expected
+ # request object values.
+ assert len(call.mock_calls) == 1
+ _, args, _ = call.mock_calls[0]
+ assert args[0].name == "name_value"
+
+
+def test_delete_job_template_flattened_error():
+ client = TranscoderServiceClient(credentials=ga_credentials.AnonymousCredentials(),)
+
+ # Attempting to call a method with both a request object and flattened
+ # fields is an error.
+ with pytest.raises(ValueError):
+ client.delete_job_template(
+ services.DeleteJobTemplateRequest(), name="name_value",
+ )
+
+
+@pytest.mark.asyncio
+async def test_delete_job_template_flattened_async():
+ client = TranscoderServiceAsyncClient(
+ credentials=ga_credentials.AnonymousCredentials(),
+ )
+
+ # Mock the actual call within the gRPC stub, and fake the request.
+ with mock.patch.object(
+ type(client.transport.delete_job_template), "__call__"
+ ) as call:
+ # Designate an appropriate return value for the call.
+ call.return_value = None
+
+ call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None)
+ # Call the method with a truthy value for each flattened field,
+ # using the keyword arguments to the method.
+ response = await client.delete_job_template(name="name_value",)
+
+ # Establish that the underlying call was made with the expected
+ # request object values.
+ assert len(call.mock_calls)
+ _, args, _ = call.mock_calls[0]
+ assert args[0].name == "name_value"
+
+
+@pytest.mark.asyncio
+async def test_delete_job_template_flattened_error_async():
+ client = TranscoderServiceAsyncClient(
+ credentials=ga_credentials.AnonymousCredentials(),
+ )
+
+ # Attempting to call a method with both a request object and flattened
+ # fields is an error.
+ with pytest.raises(ValueError):
+ await client.delete_job_template(
+ services.DeleteJobTemplateRequest(), name="name_value",
+ )
+
+
+def test_credentials_transport_error():
+ # It is an error to provide credentials and a transport instance.
+ transport = transports.TranscoderServiceGrpcTransport(
+ credentials=ga_credentials.AnonymousCredentials(),
+ )
+ with pytest.raises(ValueError):
+ client = TranscoderServiceClient(
+ credentials=ga_credentials.AnonymousCredentials(), transport=transport,
+ )
+
+ # It is an error to provide a credentials file and a transport instance.
+ transport = transports.TranscoderServiceGrpcTransport(
+ credentials=ga_credentials.AnonymousCredentials(),
+ )
+ with pytest.raises(ValueError):
+ client = TranscoderServiceClient(
+ client_options={"credentials_file": "credentials.json"},
+ transport=transport,
+ )
+
+ # It is an error to provide scopes and a transport instance.
+ transport = transports.TranscoderServiceGrpcTransport(
+ credentials=ga_credentials.AnonymousCredentials(),
+ )
+ with pytest.raises(ValueError):
+ client = TranscoderServiceClient(
+ client_options={"scopes": ["1", "2"]}, transport=transport,
+ )
+
+
+def test_transport_instance():
+ # A client may be instantiated with a custom transport instance.
+ transport = transports.TranscoderServiceGrpcTransport(
+ credentials=ga_credentials.AnonymousCredentials(),
+ )
+ client = TranscoderServiceClient(transport=transport)
+ assert client.transport is transport
+
+
+def test_transport_get_channel():
+ # A client may be instantiated with a custom transport instance.
+ transport = transports.TranscoderServiceGrpcTransport(
+ credentials=ga_credentials.AnonymousCredentials(),
+ )
+ channel = transport.grpc_channel
+ assert channel
+
+ transport = transports.TranscoderServiceGrpcAsyncIOTransport(
+ credentials=ga_credentials.AnonymousCredentials(),
+ )
+ channel = transport.grpc_channel
+ assert channel
+
+
+@pytest.mark.parametrize(
+ "transport_class",
+ [
+ transports.TranscoderServiceGrpcTransport,
+ transports.TranscoderServiceGrpcAsyncIOTransport,
+ ],
+)
+def test_transport_adc(transport_class):
+ # Test default credentials are used if not provided.
+ with mock.patch.object(google.auth, "default") as adc:
+ adc.return_value = (ga_credentials.AnonymousCredentials(), None)
+ transport_class()
+ adc.assert_called_once()
+
+
+def test_transport_grpc_default():
+ # A client should use the gRPC transport by default.
+ client = TranscoderServiceClient(credentials=ga_credentials.AnonymousCredentials(),)
+ assert isinstance(client.transport, transports.TranscoderServiceGrpcTransport,)
+
+
+def test_transcoder_service_base_transport_error():
+ # Passing both a credentials object and credentials_file should raise an error
+ with pytest.raises(core_exceptions.DuplicateCredentialArgs):
+ transport = transports.TranscoderServiceTransport(
+ credentials=ga_credentials.AnonymousCredentials(),
+ credentials_file="credentials.json",
+ )
+
+
+def test_transcoder_service_base_transport():
+ # Instantiate the base transport.
+ with mock.patch(
+ "google.cloud.video.transcoder_v1.services.transcoder_service.transports.TranscoderServiceTransport.__init__"
+ ) as Transport:
+ Transport.return_value = None
+ transport = transports.TranscoderServiceTransport(
+ credentials=ga_credentials.AnonymousCredentials(),
+ )
+
+ # Every method on the transport should just blindly
+ # raise NotImplementedError.
+ methods = (
+ "create_job",
+ "list_jobs",
+ "get_job",
+ "delete_job",
+ "create_job_template",
+ "list_job_templates",
+ "get_job_template",
+ "delete_job_template",
+ )
+ for method in methods:
+ with pytest.raises(NotImplementedError):
+ getattr(transport, method)(request=object())
+
+
+@requires_google_auth_gte_1_25_0
+def test_transcoder_service_base_transport_with_credentials_file():
+ # Instantiate the base transport with a credentials file
+ with mock.patch.object(
+ google.auth, "load_credentials_from_file", autospec=True
+ ) as load_creds, mock.patch(
+ "google.cloud.video.transcoder_v1.services.transcoder_service.transports.TranscoderServiceTransport._prep_wrapped_messages"
+ ) as Transport:
+ Transport.return_value = None
+ load_creds.return_value = (ga_credentials.AnonymousCredentials(), None)
+ transport = transports.TranscoderServiceTransport(
+ credentials_file="credentials.json", quota_project_id="octopus",
+ )
+ load_creds.assert_called_once_with(
+ "credentials.json",
+ scopes=None,
+ default_scopes=("https://www.googleapis.com/auth/cloud-platform",),
+ quota_project_id="octopus",
+ )
+
+
+@requires_google_auth_lt_1_25_0
+def test_transcoder_service_base_transport_with_credentials_file_old_google_auth():
+ # Instantiate the base transport with a credentials file
+ with mock.patch.object(
+ google.auth, "load_credentials_from_file", autospec=True
+ ) as load_creds, mock.patch(
+ "google.cloud.video.transcoder_v1.services.transcoder_service.transports.TranscoderServiceTransport._prep_wrapped_messages"
+ ) as Transport:
+ Transport.return_value = None
+ load_creds.return_value = (ga_credentials.AnonymousCredentials(), None)
+ transport = transports.TranscoderServiceTransport(
+ credentials_file="credentials.json", quota_project_id="octopus",
+ )
+ load_creds.assert_called_once_with(
+ "credentials.json",
+ scopes=("https://www.googleapis.com/auth/cloud-platform",),
+ quota_project_id="octopus",
+ )
+
+
+def test_transcoder_service_base_transport_with_adc():
+ # Test the default credentials are used if credentials and credentials_file are None.
+ with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch(
+ "google.cloud.video.transcoder_v1.services.transcoder_service.transports.TranscoderServiceTransport._prep_wrapped_messages"
+ ) as Transport:
+ Transport.return_value = None
+ adc.return_value = (ga_credentials.AnonymousCredentials(), None)
+ transport = transports.TranscoderServiceTransport()
+ adc.assert_called_once()
+
+
+@requires_google_auth_gte_1_25_0
+def test_transcoder_service_auth_adc():
+ # If no credentials are provided, we should use ADC credentials.
+ with mock.patch.object(google.auth, "default", autospec=True) as adc:
+ adc.return_value = (ga_credentials.AnonymousCredentials(), None)
+ TranscoderServiceClient()
+ adc.assert_called_once_with(
+ scopes=None,
+ default_scopes=("https://www.googleapis.com/auth/cloud-platform",),
+ quota_project_id=None,
+ )
+
+
+@requires_google_auth_lt_1_25_0
+def test_transcoder_service_auth_adc_old_google_auth():
+ # If no credentials are provided, we should use ADC credentials.
+ with mock.patch.object(google.auth, "default", autospec=True) as adc:
+ adc.return_value = (ga_credentials.AnonymousCredentials(), None)
+ TranscoderServiceClient()
+ adc.assert_called_once_with(
+ scopes=("https://www.googleapis.com/auth/cloud-platform",),
+ quota_project_id=None,
+ )
+
+
+@pytest.mark.parametrize(
+ "transport_class",
+ [
+ transports.TranscoderServiceGrpcTransport,
+ transports.TranscoderServiceGrpcAsyncIOTransport,
+ ],
+)
+@requires_google_auth_gte_1_25_0
+def test_transcoder_service_transport_auth_adc(transport_class):
+ # If credentials and host are not provided, the transport class should use
+ # ADC credentials.
+ with mock.patch.object(google.auth, "default", autospec=True) as adc:
+ adc.return_value = (ga_credentials.AnonymousCredentials(), None)
+ transport_class(quota_project_id="octopus", scopes=["1", "2"])
+ adc.assert_called_once_with(
+ scopes=["1", "2"],
+ default_scopes=("https://www.googleapis.com/auth/cloud-platform",),
+ quota_project_id="octopus",
+ )
+
+
+@pytest.mark.parametrize(
+ "transport_class",
+ [
+ transports.TranscoderServiceGrpcTransport,
+ transports.TranscoderServiceGrpcAsyncIOTransport,
+ ],
+)
+@requires_google_auth_lt_1_25_0
+def test_transcoder_service_transport_auth_adc_old_google_auth(transport_class):
+ # If credentials and host are not provided, the transport class should use
+ # ADC credentials.
+ with mock.patch.object(google.auth, "default", autospec=True) as adc:
+ adc.return_value = (ga_credentials.AnonymousCredentials(), None)
+ transport_class(quota_project_id="octopus")
+ adc.assert_called_once_with(
+ scopes=("https://www.googleapis.com/auth/cloud-platform",),
+ quota_project_id="octopus",
+ )
+
+
+@pytest.mark.parametrize(
+ "transport_class,grpc_helpers",
+ [
+ (transports.TranscoderServiceGrpcTransport, grpc_helpers),
+ (transports.TranscoderServiceGrpcAsyncIOTransport, grpc_helpers_async),
+ ],
+)
+def test_transcoder_service_transport_create_channel(transport_class, grpc_helpers):
+ # If credentials and host are not provided, the transport class should use
+ # ADC credentials.
+ with mock.patch.object(
+ google.auth, "default", autospec=True
+ ) as adc, mock.patch.object(
+ grpc_helpers, "create_channel", autospec=True
+ ) as create_channel:
+ creds = ga_credentials.AnonymousCredentials()
+ adc.return_value = (creds, None)
+ transport_class(quota_project_id="octopus", scopes=["1", "2"])
+
+ create_channel.assert_called_with(
+ "transcoder.googleapis.com:443",
+ credentials=creds,
+ credentials_file=None,
+ quota_project_id="octopus",
+ default_scopes=("https://www.googleapis.com/auth/cloud-platform",),
+ scopes=["1", "2"],
+ default_host="transcoder.googleapis.com",
+ ssl_credentials=None,
+ options=[
+ ("grpc.max_send_message_length", -1),
+ ("grpc.max_receive_message_length", -1),
+ ],
+ )
+
+
+@pytest.mark.parametrize(
+ "transport_class",
+ [
+ transports.TranscoderServiceGrpcTransport,
+ transports.TranscoderServiceGrpcAsyncIOTransport,
+ ],
+)
+def test_transcoder_service_grpc_transport_client_cert_source_for_mtls(transport_class):
+ cred = ga_credentials.AnonymousCredentials()
+
+ # Check ssl_channel_credentials is used if provided.
+ with mock.patch.object(transport_class, "create_channel") as mock_create_channel:
+ mock_ssl_channel_creds = mock.Mock()
+ transport_class(
+ host="squid.clam.whelk",
+ credentials=cred,
+ ssl_channel_credentials=mock_ssl_channel_creds,
+ )
+ mock_create_channel.assert_called_once_with(
+ "squid.clam.whelk:443",
+ credentials=cred,
+ credentials_file=None,
+ scopes=None,
+ ssl_credentials=mock_ssl_channel_creds,
+ quota_project_id=None,
+ options=[
+ ("grpc.max_send_message_length", -1),
+ ("grpc.max_receive_message_length", -1),
+ ],
+ )
+
+ # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls
+ # is used.
+ with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()):
+ with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred:
+ transport_class(
+ credentials=cred,
+ client_cert_source_for_mtls=client_cert_source_callback,
+ )
+ expected_cert, expected_key = client_cert_source_callback()
+ mock_ssl_cred.assert_called_once_with(
+ certificate_chain=expected_cert, private_key=expected_key
+ )
+
+
+def test_transcoder_service_host_no_port():
+ client = TranscoderServiceClient(
+ credentials=ga_credentials.AnonymousCredentials(),
+ client_options=client_options.ClientOptions(
+ api_endpoint="transcoder.googleapis.com"
+ ),
+ )
+ assert client.transport._host == "transcoder.googleapis.com:443"
+
+
+def test_transcoder_service_host_with_port():
+ client = TranscoderServiceClient(
+ credentials=ga_credentials.AnonymousCredentials(),
+ client_options=client_options.ClientOptions(
+ api_endpoint="transcoder.googleapis.com:8000"
+ ),
+ )
+ assert client.transport._host == "transcoder.googleapis.com:8000"
+
+
+def test_transcoder_service_grpc_transport_channel():
+ channel = grpc.secure_channel("http://localhost/", grpc.local_channel_credentials())
+
+ # Check that channel is used if provided.
+ transport = transports.TranscoderServiceGrpcTransport(
+ host="squid.clam.whelk", channel=channel,
+ )
+ assert transport.grpc_channel == channel
+ assert transport._host == "squid.clam.whelk:443"
+ assert transport._ssl_channel_credentials == None
+
+
+def test_transcoder_service_grpc_asyncio_transport_channel():
+ channel = aio.secure_channel("http://localhost/", grpc.local_channel_credentials())
+
+ # Check that channel is used if provided.
+ transport = transports.TranscoderServiceGrpcAsyncIOTransport(
+ host="squid.clam.whelk", channel=channel,
+ )
+ assert transport.grpc_channel == channel
+ assert transport._host == "squid.clam.whelk:443"
+ assert transport._ssl_channel_credentials == None
+
+
+# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are
+# removed from grpc/grpc_asyncio transport constructor.
+@pytest.mark.parametrize(
+ "transport_class",
+ [
+ transports.TranscoderServiceGrpcTransport,
+ transports.TranscoderServiceGrpcAsyncIOTransport,
+ ],
+)
+def test_transcoder_service_transport_channel_mtls_with_client_cert_source(
+ transport_class,
+):
+ with mock.patch(
+ "grpc.ssl_channel_credentials", autospec=True
+ ) as grpc_ssl_channel_cred:
+ with mock.patch.object(
+ transport_class, "create_channel"
+ ) as grpc_create_channel:
+ mock_ssl_cred = mock.Mock()
+ grpc_ssl_channel_cred.return_value = mock_ssl_cred
+
+ mock_grpc_channel = mock.Mock()
+ grpc_create_channel.return_value = mock_grpc_channel
+
+ cred = ga_credentials.AnonymousCredentials()
+ with pytest.warns(DeprecationWarning):
+ with mock.patch.object(google.auth, "default") as adc:
+ adc.return_value = (cred, None)
+ transport = transport_class(
+ host="squid.clam.whelk",
+ api_mtls_endpoint="mtls.squid.clam.whelk",
+ client_cert_source=client_cert_source_callback,
+ )
+ adc.assert_called_once()
+
+ grpc_ssl_channel_cred.assert_called_once_with(
+ certificate_chain=b"cert bytes", private_key=b"key bytes"
+ )
+ grpc_create_channel.assert_called_once_with(
+ "mtls.squid.clam.whelk:443",
+ credentials=cred,
+ credentials_file=None,
+ scopes=None,
+ ssl_credentials=mock_ssl_cred,
+ quota_project_id=None,
+ options=[
+ ("grpc.max_send_message_length", -1),
+ ("grpc.max_receive_message_length", -1),
+ ],
+ )
+ assert transport.grpc_channel == mock_grpc_channel
+ assert transport._ssl_channel_credentials == mock_ssl_cred
+
+
+# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are
+# removed from grpc/grpc_asyncio transport constructor.
+@pytest.mark.parametrize(
+ "transport_class",
+ [
+ transports.TranscoderServiceGrpcTransport,
+ transports.TranscoderServiceGrpcAsyncIOTransport,
+ ],
+)
+def test_transcoder_service_transport_channel_mtls_with_adc(transport_class):
+ mock_ssl_cred = mock.Mock()
+ with mock.patch.multiple(
+ "google.auth.transport.grpc.SslCredentials",
+ __init__=mock.Mock(return_value=None),
+ ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred),
+ ):
+ with mock.patch.object(
+ transport_class, "create_channel"
+ ) as grpc_create_channel:
+ mock_grpc_channel = mock.Mock()
+ grpc_create_channel.return_value = mock_grpc_channel
+ mock_cred = mock.Mock()
+
+ with pytest.warns(DeprecationWarning):
+ transport = transport_class(
+ host="squid.clam.whelk",
+ credentials=mock_cred,
+ api_mtls_endpoint="mtls.squid.clam.whelk",
+ client_cert_source=None,
+ )
+
+ grpc_create_channel.assert_called_once_with(
+ "mtls.squid.clam.whelk:443",
+ credentials=mock_cred,
+ credentials_file=None,
+ scopes=None,
+ ssl_credentials=mock_ssl_cred,
+ quota_project_id=None,
+ options=[
+ ("grpc.max_send_message_length", -1),
+ ("grpc.max_receive_message_length", -1),
+ ],
+ )
+ assert transport.grpc_channel == mock_grpc_channel
+
+
+def test_job_path():
+ project = "squid"
+ location = "clam"
+ job = "whelk"
+ expected = "projects/{project}/locations/{location}/jobs/{job}".format(
+ project=project, location=location, job=job,
+ )
+ actual = TranscoderServiceClient.job_path(project, location, job)
+ assert expected == actual
+
+
+def test_parse_job_path():
+ expected = {
+ "project": "octopus",
+ "location": "oyster",
+ "job": "nudibranch",
+ }
+ path = TranscoderServiceClient.job_path(**expected)
+
+ # Check that the path construction is reversible.
+ actual = TranscoderServiceClient.parse_job_path(path)
+ assert expected == actual
+
+
+def test_job_template_path():
+ project = "cuttlefish"
+ location = "mussel"
+ job_template = "winkle"
+ expected = "projects/{project}/locations/{location}/jobTemplates/{job_template}".format(
+ project=project, location=location, job_template=job_template,
+ )
+ actual = TranscoderServiceClient.job_template_path(project, location, job_template)
+ assert expected == actual
+
+
+def test_parse_job_template_path():
+ expected = {
+ "project": "nautilus",
+ "location": "scallop",
+ "job_template": "abalone",
+ }
+ path = TranscoderServiceClient.job_template_path(**expected)
+
+ # Check that the path construction is reversible.
+ actual = TranscoderServiceClient.parse_job_template_path(path)
+ assert expected == actual
+
+
+def test_common_billing_account_path():
+ billing_account = "squid"
+ expected = "billingAccounts/{billing_account}".format(
+ billing_account=billing_account,
+ )
+ actual = TranscoderServiceClient.common_billing_account_path(billing_account)
+ assert expected == actual
+
+
+def test_parse_common_billing_account_path():
+ expected = {
+ "billing_account": "clam",
+ }
+ path = TranscoderServiceClient.common_billing_account_path(**expected)
+
+ # Check that the path construction is reversible.
+ actual = TranscoderServiceClient.parse_common_billing_account_path(path)
+ assert expected == actual
+
+
+def test_common_folder_path():
+ folder = "whelk"
+ expected = "folders/{folder}".format(folder=folder,)
+ actual = TranscoderServiceClient.common_folder_path(folder)
+ assert expected == actual
+
+
+def test_parse_common_folder_path():
+ expected = {
+ "folder": "octopus",
+ }
+ path = TranscoderServiceClient.common_folder_path(**expected)
+
+ # Check that the path construction is reversible.
+ actual = TranscoderServiceClient.parse_common_folder_path(path)
+ assert expected == actual
+
+
+def test_common_organization_path():
+ organization = "oyster"
+ expected = "organizations/{organization}".format(organization=organization,)
+ actual = TranscoderServiceClient.common_organization_path(organization)
+ assert expected == actual
+
+
+def test_parse_common_organization_path():
+ expected = {
+ "organization": "nudibranch",
+ }
+ path = TranscoderServiceClient.common_organization_path(**expected)
+
+ # Check that the path construction is reversible.
+ actual = TranscoderServiceClient.parse_common_organization_path(path)
+ assert expected == actual
+
+
+def test_common_project_path():
+ project = "cuttlefish"
+ expected = "projects/{project}".format(project=project,)
+ actual = TranscoderServiceClient.common_project_path(project)
+ assert expected == actual
+
+
+def test_parse_common_project_path():
+ expected = {
+ "project": "mussel",
+ }
+ path = TranscoderServiceClient.common_project_path(**expected)
+
+ # Check that the path construction is reversible.
+ actual = TranscoderServiceClient.parse_common_project_path(path)
+ assert expected == actual
+
+
+def test_common_location_path():
+ project = "winkle"
+ location = "nautilus"
+ expected = "projects/{project}/locations/{location}".format(
+ project=project, location=location,
+ )
+ actual = TranscoderServiceClient.common_location_path(project, location)
+ assert expected == actual
+
+
+def test_parse_common_location_path():
+ expected = {
+ "project": "scallop",
+ "location": "abalone",
+ }
+ path = TranscoderServiceClient.common_location_path(**expected)
+
+ # Check that the path construction is reversible.
+ actual = TranscoderServiceClient.parse_common_location_path(path)
+ assert expected == actual
+
+
+def test_client_withDEFAULT_CLIENT_INFO():
+ client_info = gapic_v1.client_info.ClientInfo()
+
+ with mock.patch.object(
+ transports.TranscoderServiceTransport, "_prep_wrapped_messages"
+ ) as prep:
+ client = TranscoderServiceClient(
+ credentials=ga_credentials.AnonymousCredentials(), client_info=client_info,
+ )
+ prep.assert_called_once_with(client_info)
+
+ with mock.patch.object(
+ transports.TranscoderServiceTransport, "_prep_wrapped_messages"
+ ) as prep:
+ transport_class = TranscoderServiceClient.get_transport_class()
+ transport = transport_class(
+ credentials=ga_credentials.AnonymousCredentials(), client_info=client_info,
+ )
+ prep.assert_called_once_with(client_info)
From 86cd3f1c61a649d4c2810a730d7e141e1600fe68 Mon Sep 17 00:00:00 2001
From: "release-please[bot]"
<55107282+release-please[bot]@users.noreply.github.com>
Date: Mon, 12 Jul 2021 23:26:33 +0000
Subject: [PATCH 9/9] chore: release 0.4.0 (#63)
:robot: I have created a release \*beep\* \*boop\*
---
## [0.4.0](https://www.github.com/googleapis/python-video-transcoder/compare/v0.3.1...v0.4.0) (2021-07-09)
### Features
* add always_use_jwt_access ([#62](https://www.github.com/googleapis/python-video-transcoder/issues/62)) ([d43c40e](https://www.github.com/googleapis/python-video-transcoder/commit/d43c40e9ab80c42afd25efa1c2980d23dbc50ce2))
* Add Transcoder V1 ([#67](https://www.github.com/googleapis/python-video-transcoder/issues/67)) ([721d28e](https://www.github.com/googleapis/python-video-transcoder/commit/721d28ec565bfdb41a195167a989baf042ede228))
### Bug Fixes
* disable always_use_jwt_access ([#66](https://www.github.com/googleapis/python-video-transcoder/issues/66)) ([98d8b86](https://www.github.com/googleapis/python-video-transcoder/commit/98d8b860227a9b9a8b4cecc851ec547d7789ac66))
### Documentation
* omit mention of Python 2.7 in 'CONTRIBUTING.rst' ([#1127](https://www.github.com/googleapis/python-video-transcoder/issues/1127)) ([#58](https://www.github.com/googleapis/python-video-transcoder/issues/58)) ([1659ce8](https://www.github.com/googleapis/python-video-transcoder/commit/1659ce88ef94139a271be9719a4adaf4e3a600c0)), closes [#1126](https://www.github.com/googleapis/python-video-transcoder/issues/1126)
---
This PR was generated with [Release Please](https://github.com/googleapis/release-please). See [documentation](https://github.com/googleapis/release-please#release-please).
---
CHANGELOG.md | 18 ++++++++++++++++++
setup.py | 2 +-
2 files changed, 19 insertions(+), 1 deletion(-)
diff --git a/CHANGELOG.md b/CHANGELOG.md
index e916b90..6e22c73 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -1,5 +1,23 @@
# Changelog
+## [0.4.0](https://www.github.com/googleapis/python-video-transcoder/compare/v0.3.1...v0.4.0) (2021-07-09)
+
+
+### Features
+
+* add always_use_jwt_access ([#62](https://www.github.com/googleapis/python-video-transcoder/issues/62)) ([d43c40e](https://www.github.com/googleapis/python-video-transcoder/commit/d43c40e9ab80c42afd25efa1c2980d23dbc50ce2))
+* Add Transcoder V1 ([#67](https://www.github.com/googleapis/python-video-transcoder/issues/67)) ([721d28e](https://www.github.com/googleapis/python-video-transcoder/commit/721d28ec565bfdb41a195167a989baf042ede228))
+
+
+### Bug Fixes
+
+* disable always_use_jwt_access ([#66](https://www.github.com/googleapis/python-video-transcoder/issues/66)) ([98d8b86](https://www.github.com/googleapis/python-video-transcoder/commit/98d8b860227a9b9a8b4cecc851ec547d7789ac66))
+
+
+### Documentation
+
+* omit mention of Python 2.7 in 'CONTRIBUTING.rst' ([#1127](https://www.github.com/googleapis/python-video-transcoder/issues/1127)) ([#58](https://www.github.com/googleapis/python-video-transcoder/issues/58)) ([1659ce8](https://www.github.com/googleapis/python-video-transcoder/commit/1659ce88ef94139a271be9719a4adaf4e3a600c0)), closes [#1126](https://www.github.com/googleapis/python-video-transcoder/issues/1126)
+
### [0.3.1](https://www.github.com/googleapis/python-video-transcoder/compare/v0.3.0...v0.3.1) (2021-05-28)
diff --git a/setup.py b/setup.py
index 83242bc..f9a5501 100644
--- a/setup.py
+++ b/setup.py
@@ -19,7 +19,7 @@
import os
import setuptools # type: ignore
-version = "0.3.1"
+version = "0.4.0"
package_root = os.path.abspath(os.path.dirname(__file__))