From 90bfb962a7a01748312c0b60de034fb2697a8c3e Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Tue, 9 Nov 2021 15:31:25 -0500 Subject: [PATCH 01/20] chore: use gapic-generator-python 0.56.2 (#115) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * chore: update Java and Python dependencies PiperOrigin-RevId: 408420890 Source-Link: https://github.com/googleapis/googleapis/commit/2921f9fb3bfbd16f6b2da0104373e2b47a80a65e Source-Link: https://github.com/googleapis/googleapis-gen/commit/6598ca8cbbf5226733a099c4506518a5af6ff74c Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiNjU5OGNhOGNiYmY1MjI2NzMzYTA5OWM0NTA2NTE4YTVhZjZmZjc0YyJ9 * 🦉 Updates from OwlBot See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md Co-authored-by: Owl Bot --- .../transcoder_service/async_client.py | 13 +-- .../services/transcoder_service/client.py | 25 ++++-- .../transcoder_service/transports/base.py | 8 +- .../transcoder_service/transports/grpc.py | 4 +- .../transports/grpc_asyncio.py | 4 +- .../video/transcoder_v1/types/resources.py | 18 ++++ .../transcoder_service/async_client.py | 13 +-- .../services/transcoder_service/client.py | 25 ++++-- .../transcoder_service/transports/base.py | 8 +- .../transcoder_service/transports/grpc.py | 4 +- .../transports/grpc_asyncio.py | 4 +- .../transcoder_v1beta1/types/resources.py | 14 +++ .../transcoder_v1/test_transcoder_service.py | 88 ++++++++++++++----- .../test_transcoder_service.py | 88 ++++++++++++++----- 14 files changed, 230 insertions(+), 86 deletions(-) diff --git a/google/cloud/video/transcoder_v1/services/transcoder_service/async_client.py b/google/cloud/video/transcoder_v1/services/transcoder_service/async_client.py index b7daea9..b7567db 100644 --- a/google/cloud/video/transcoder_v1/services/transcoder_service/async_client.py +++ b/google/cloud/video/transcoder_v1/services/transcoder_service/async_client.py @@ -19,14 +19,17 @@ from typing import Dict, Sequence, Tuple, Type, Union import pkg_resources -from google.api_core.client_options import ClientOptions # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.api_core import gapic_v1 # type: ignore -from google.api_core import retry as retries # type: ignore +from google.api_core.client_options import ClientOptions +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore -OptionalRetry = Union[retries.Retry, object] +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore from google.cloud.video.transcoder_v1.services.transcoder_service import pagers from google.cloud.video.transcoder_v1.types import resources diff --git a/google/cloud/video/transcoder_v1/services/transcoder_service/client.py b/google/cloud/video/transcoder_v1/services/transcoder_service/client.py index be4298d..1e10da5 100644 --- a/google/cloud/video/transcoder_v1/services/transcoder_service/client.py +++ b/google/cloud/video/transcoder_v1/services/transcoder_service/client.py @@ -14,23 +14,25 @@ # limitations under the License. # from collections import OrderedDict -from distutils import util import os import re from typing import Dict, Optional, Sequence, Tuple, Type, Union import pkg_resources -from google.api_core import client_options as client_options_lib # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.api_core import gapic_v1 # type: ignore -from google.api_core import retry as retries # type: ignore +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport import mtls # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore from google.auth.exceptions import MutualTLSChannelError # type: ignore from google.oauth2 import service_account # type: ignore -OptionalRetry = Union[retries.Retry, object] +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore from google.cloud.video.transcoder_v1.services.transcoder_service import pagers from google.cloud.video.transcoder_v1.types import resources @@ -311,8 +313,15 @@ def __init__( client_options = client_options_lib.ClientOptions() # Create SSL credentials for mutual TLS if needed. - use_client_cert = bool( - util.strtobool(os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false")) + if os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") not in ( + "true", + "false", + ): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + use_client_cert = ( + os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") == "true" ) client_cert_source_func = None diff --git a/google/cloud/video/transcoder_v1/services/transcoder_service/transports/base.py b/google/cloud/video/transcoder_v1/services/transcoder_service/transports/base.py index 2e7bc75..9c9f3cf 100644 --- a/google/cloud/video/transcoder_v1/services/transcoder_service/transports/base.py +++ b/google/cloud/video/transcoder_v1/services/transcoder_service/transports/base.py @@ -18,10 +18,10 @@ import pkg_resources import google.auth # type: ignore -import google.api_core # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.api_core import gapic_v1 # type: ignore -from google.api_core import retry as retries # type: ignore +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore diff --git a/google/cloud/video/transcoder_v1/services/transcoder_service/transports/grpc.py b/google/cloud/video/transcoder_v1/services/transcoder_service/transports/grpc.py index 457c5af..56d7343 100644 --- a/google/cloud/video/transcoder_v1/services/transcoder_service/transports/grpc.py +++ b/google/cloud/video/transcoder_v1/services/transcoder_service/transports/grpc.py @@ -16,8 +16,8 @@ import warnings from typing import Callable, Dict, Optional, Sequence, Tuple, Union -from google.api_core import grpc_helpers # type: ignore -from google.api_core import gapic_v1 # type: ignore +from google.api_core import grpc_helpers +from google.api_core import gapic_v1 import google.auth # type: ignore from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore diff --git a/google/cloud/video/transcoder_v1/services/transcoder_service/transports/grpc_asyncio.py b/google/cloud/video/transcoder_v1/services/transcoder_service/transports/grpc_asyncio.py index 0a07613..c0c87d1 100644 --- a/google/cloud/video/transcoder_v1/services/transcoder_service/transports/grpc_asyncio.py +++ b/google/cloud/video/transcoder_v1/services/transcoder_service/transports/grpc_asyncio.py @@ -16,8 +16,8 @@ import warnings from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union -from google.api_core import gapic_v1 # type: ignore -from google.api_core import grpc_helpers_async # type: ignore +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers_async from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore diff --git a/google/cloud/video/transcoder_v1/types/resources.py b/google/cloud/video/transcoder_v1/types/resources.py index 23248b0..cb02c1a 100644 --- a/google/cloud/video/transcoder_v1/types/resources.py +++ b/google/cloud/video/transcoder_v1/types/resources.py @@ -85,6 +85,7 @@ class Job(proto.Message): This field is a member of `oneof`_ ``job_config``. config (google.cloud.video.transcoder_v1.types.JobConfig): The configuration for this job. + This field is a member of `oneof`_ ``job_config``. state (google.cloud.video.transcoder_v1.types.Job.ProcessingState): Output only. The current state of the job. @@ -291,13 +292,16 @@ class ElementaryStream(proto.Message): A unique key for this elementary stream. video_stream (google.cloud.video.transcoder_v1.types.VideoStream): Encoding of a video stream. + This field is a member of `oneof`_ ``elementary_stream``. audio_stream (google.cloud.video.transcoder_v1.types.AudioStream): Encoding of an audio stream. + This field is a member of `oneof`_ ``elementary_stream``. text_stream (google.cloud.video.transcoder_v1.types.TextStream): Encoding of a text stream. For example, closed captions or subtitles. + This field is a member of `oneof`_ ``elementary_stream``. """ @@ -455,10 +459,12 @@ class SpriteSheet(proto.Message): number of sprites distributed evenly across the timeline of the output media. The default is 100. + This field is a member of `oneof`_ ``extraction_strategy``. interval (google.protobuf.duration_pb2.Duration): Starting from ``0s``, create sprites at regular intervals. Specify the interval value in seconds. + This field is a member of `oneof`_ ``extraction_strategy``. quality (int): The quality of the generated sprite sheet. @@ -631,12 +637,15 @@ class Animation(proto.Message): Attributes: animation_static (google.cloud.video.transcoder_v1.types.Overlay.AnimationStatic): Display static overlay object. + This field is a member of `oneof`_ ``animation_type``. animation_fade (google.cloud.video.transcoder_v1.types.Overlay.AnimationFade): Display overlay object with fade animation. + This field is a member of `oneof`_ ``animation_type``. animation_end (google.cloud.video.transcoder_v1.types.Overlay.AnimationEnd): End previous animation. + This field is a member of `oneof`_ ``animation_type``. """ @@ -843,12 +852,15 @@ class VideoStream(proto.Message): Attributes: h264 (google.cloud.video.transcoder_v1.types.VideoStream.H264CodecSettings): H264 codec settings. + This field is a member of `oneof`_ ``codec_settings``. h265 (google.cloud.video.transcoder_v1.types.VideoStream.H265CodecSettings): H265 codec settings. + This field is a member of `oneof`_ ``codec_settings``. vp9 (google.cloud.video.transcoder_v1.types.VideoStream.Vp9CodecSettings): VP9 codec settings. + This field is a member of `oneof`_ ``codec_settings``. """ @@ -919,6 +931,7 @@ class H264CodecSettings(proto.Message): gop_frame_count (int): Select the GOP size based on the specified frame count. Must be greater than zero. + This field is a member of `oneof`_ ``gop_mode``. gop_duration (google.protobuf.duration_pb2.Duration): Select the GOP size based on the specified duration. The @@ -926,6 +939,7 @@ class H264CodecSettings(proto.Message): than or equal to ```segmentDuration`` <#SegmentSettings>`__, and ```segmentDuration`` <#SegmentSettings>`__ must be divisible by ``gopDuration``. + This field is a member of `oneof`_ ``gop_mode``. enable_two_pass (bool): Use two-pass encoding strategy to achieve better video @@ -1077,6 +1091,7 @@ class H265CodecSettings(proto.Message): gop_frame_count (int): Select the GOP size based on the specified frame count. Must be greater than zero. + This field is a member of `oneof`_ ``gop_mode``. gop_duration (google.protobuf.duration_pb2.Duration): Select the GOP size based on the specified duration. The @@ -1084,6 +1099,7 @@ class H265CodecSettings(proto.Message): than or equal to ```segmentDuration`` <#SegmentSettings>`__, and ```segmentDuration`` <#SegmentSettings>`__ must be divisible by ``gopDuration``. + This field is a member of `oneof`_ ``gop_mode``. enable_two_pass (bool): Use two-pass encoding strategy to achieve better video @@ -1244,6 +1260,7 @@ class Vp9CodecSettings(proto.Message): gop_frame_count (int): Select the GOP size based on the specified frame count. Must be greater than zero. + This field is a member of `oneof`_ ``gop_mode``. gop_duration (google.protobuf.duration_pb2.Duration): Select the GOP size based on the specified duration. The @@ -1251,6 +1268,7 @@ class Vp9CodecSettings(proto.Message): than or equal to ```segmentDuration`` <#SegmentSettings>`__, and ```segmentDuration`` <#SegmentSettings>`__ must be divisible by ``gopDuration``. + This field is a member of `oneof`_ ``gop_mode``. profile (str): Enforces the specified codec profile. The following profiles diff --git a/google/cloud/video/transcoder_v1beta1/services/transcoder_service/async_client.py b/google/cloud/video/transcoder_v1beta1/services/transcoder_service/async_client.py index 8aac7a0..6f41c3f 100644 --- a/google/cloud/video/transcoder_v1beta1/services/transcoder_service/async_client.py +++ b/google/cloud/video/transcoder_v1beta1/services/transcoder_service/async_client.py @@ -19,14 +19,17 @@ from typing import Dict, Sequence, Tuple, Type, Union import pkg_resources -from google.api_core.client_options import ClientOptions # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.api_core import gapic_v1 # type: ignore -from google.api_core import retry as retries # type: ignore +from google.api_core.client_options import ClientOptions +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore -OptionalRetry = Union[retries.Retry, object] +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore from google.cloud.video.transcoder_v1beta1.services.transcoder_service import pagers from google.cloud.video.transcoder_v1beta1.types import resources diff --git a/google/cloud/video/transcoder_v1beta1/services/transcoder_service/client.py b/google/cloud/video/transcoder_v1beta1/services/transcoder_service/client.py index f86e1c3..8df1341 100644 --- a/google/cloud/video/transcoder_v1beta1/services/transcoder_service/client.py +++ b/google/cloud/video/transcoder_v1beta1/services/transcoder_service/client.py @@ -14,23 +14,25 @@ # limitations under the License. # from collections import OrderedDict -from distutils import util import os import re from typing import Dict, Optional, Sequence, Tuple, Type, Union import pkg_resources -from google.api_core import client_options as client_options_lib # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.api_core import gapic_v1 # type: ignore -from google.api_core import retry as retries # type: ignore +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport import mtls # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore from google.auth.exceptions import MutualTLSChannelError # type: ignore from google.oauth2 import service_account # type: ignore -OptionalRetry = Union[retries.Retry, object] +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore from google.cloud.video.transcoder_v1beta1.services.transcoder_service import pagers from google.cloud.video.transcoder_v1beta1.types import resources @@ -310,8 +312,15 @@ def __init__( client_options = client_options_lib.ClientOptions() # Create SSL credentials for mutual TLS if needed. - use_client_cert = bool( - util.strtobool(os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false")) + if os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") not in ( + "true", + "false", + ): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + use_client_cert = ( + os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") == "true" ) client_cert_source_func = None diff --git a/google/cloud/video/transcoder_v1beta1/services/transcoder_service/transports/base.py b/google/cloud/video/transcoder_v1beta1/services/transcoder_service/transports/base.py index 65661e8..7a9c092 100644 --- a/google/cloud/video/transcoder_v1beta1/services/transcoder_service/transports/base.py +++ b/google/cloud/video/transcoder_v1beta1/services/transcoder_service/transports/base.py @@ -18,10 +18,10 @@ import pkg_resources import google.auth # type: ignore -import google.api_core # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.api_core import gapic_v1 # type: ignore -from google.api_core import retry as retries # type: ignore +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore diff --git a/google/cloud/video/transcoder_v1beta1/services/transcoder_service/transports/grpc.py b/google/cloud/video/transcoder_v1beta1/services/transcoder_service/transports/grpc.py index 4f4f46f..72b5df3 100644 --- a/google/cloud/video/transcoder_v1beta1/services/transcoder_service/transports/grpc.py +++ b/google/cloud/video/transcoder_v1beta1/services/transcoder_service/transports/grpc.py @@ -16,8 +16,8 @@ import warnings from typing import Callable, Dict, Optional, Sequence, Tuple, Union -from google.api_core import grpc_helpers # type: ignore -from google.api_core import gapic_v1 # type: ignore +from google.api_core import grpc_helpers +from google.api_core import gapic_v1 import google.auth # type: ignore from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore diff --git a/google/cloud/video/transcoder_v1beta1/services/transcoder_service/transports/grpc_asyncio.py b/google/cloud/video/transcoder_v1beta1/services/transcoder_service/transports/grpc_asyncio.py index 5d71361..0e88ee2 100644 --- a/google/cloud/video/transcoder_v1beta1/services/transcoder_service/transports/grpc_asyncio.py +++ b/google/cloud/video/transcoder_v1beta1/services/transcoder_service/transports/grpc_asyncio.py @@ -16,8 +16,8 @@ import warnings from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union -from google.api_core import gapic_v1 # type: ignore -from google.api_core import grpc_helpers_async # type: ignore +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers_async from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore diff --git a/google/cloud/video/transcoder_v1beta1/types/resources.py b/google/cloud/video/transcoder_v1beta1/types/resources.py index fae1432..7c7a7f8 100644 --- a/google/cloud/video/transcoder_v1beta1/types/resources.py +++ b/google/cloud/video/transcoder_v1beta1/types/resources.py @@ -87,6 +87,7 @@ class Job(proto.Message): This field is a member of `oneof`_ ``job_config``. config (google.cloud.video.transcoder_v1beta1.types.JobConfig): The configuration for this job. + This field is a member of `oneof`_ ``job_config``. priority (int): Specify the priority of the job. Enter a @@ -340,13 +341,16 @@ class ElementaryStream(proto.Message): A unique key for this elementary stream. video_stream (google.cloud.video.transcoder_v1beta1.types.VideoStream): Encoding of a video stream. + This field is a member of `oneof`_ ``elementary_stream``. audio_stream (google.cloud.video.transcoder_v1beta1.types.AudioStream): Encoding of an audio stream. + This field is a member of `oneof`_ ``elementary_stream``. text_stream (google.cloud.video.transcoder_v1beta1.types.TextStream): Encoding of a text stream. For example, closed captions or subtitles. + This field is a member of `oneof`_ ``elementary_stream``. """ @@ -508,10 +512,12 @@ class SpriteSheet(proto.Message): number of sprites distributed evenly across the timeline of the output media. The default is 100. + This field is a member of `oneof`_ ``extraction_strategy``. interval (google.protobuf.duration_pb2.Duration): Starting from ``0s``, create sprites at regular intervals. Specify the interval value in seconds. + This field is a member of `oneof`_ ``extraction_strategy``. quality (int): The quality of the generated sprite sheet. @@ -684,12 +690,15 @@ class Animation(proto.Message): Attributes: animation_static (google.cloud.video.transcoder_v1beta1.types.Overlay.AnimationStatic): Display static overlay object. + This field is a member of `oneof`_ ``animation_type``. animation_fade (google.cloud.video.transcoder_v1beta1.types.Overlay.AnimationFade): Display overlay object with fade animation. + This field is a member of `oneof`_ ``animation_type``. animation_end (google.cloud.video.transcoder_v1beta1.types.Overlay.AnimationEnd): End previous animation. + This field is a member of `oneof`_ ``animation_type``. """ @@ -983,6 +992,7 @@ class VideoStream(proto.Message): gop_frame_count (int): Select the GOP size based on the specified frame count. Must be greater than zero. + This field is a member of `oneof`_ ``gop_mode``. gop_duration (google.protobuf.duration_pb2.Duration): Select the GOP size based on the specified duration. The @@ -990,6 +1000,7 @@ class VideoStream(proto.Message): than or equal to ```segmentDuration`` <#SegmentSettings>`__, and ```segmentDuration`` <#SegmentSettings>`__ must be divisible by ``gopDuration``. + This field is a member of `oneof`_ ``gop_mode``. entropy_coder (str): The entropy coder to use. The default is ``"cabac"``. @@ -1255,13 +1266,16 @@ class Encryption(proto.Message): represented as lowercase hexadecimal digits. aes_128 (google.cloud.video.transcoder_v1beta1.types.Encryption.Aes128Encryption): Configuration for AES-128 encryption. + This field is a member of `oneof`_ ``encryption_mode``. sample_aes (google.cloud.video.transcoder_v1beta1.types.Encryption.SampleAesEncryption): Configuration for SAMPLE-AES encryption. + This field is a member of `oneof`_ ``encryption_mode``. mpeg_cenc (google.cloud.video.transcoder_v1beta1.types.Encryption.MpegCommonEncryption): Configuration for MPEG Common Encryption (MPEG-CENC). + This field is a member of `oneof`_ ``encryption_mode``. """ diff --git a/tests/unit/gapic/transcoder_v1/test_transcoder_service.py b/tests/unit/gapic/transcoder_v1/test_transcoder_service.py index 0a1c81f..ef49f51 100644 --- a/tests/unit/gapic/transcoder_v1/test_transcoder_service.py +++ b/tests/unit/gapic/transcoder_v1/test_transcoder_service.py @@ -665,8 +665,12 @@ def test_create_job_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].parent == "parent_value" - assert args[0].job == resources.Job(name="name_value") + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].job + mock_val = resources.Job(name="name_value") + assert arg == mock_val def test_create_job_flattened_error(): @@ -704,8 +708,12 @@ async def test_create_job_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].parent == "parent_value" - assert args[0].job == resources.Job(name="name_value") + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].job + mock_val = resources.Job(name="name_value") + assert arg == mock_val @pytest.mark.asyncio @@ -878,7 +886,9 @@ def test_list_jobs_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].parent == "parent_value" + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val def test_list_jobs_flattened_error(): @@ -914,7 +924,9 @@ async def test_list_jobs_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].parent == "parent_value" + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val @pytest.mark.asyncio @@ -1207,7 +1219,9 @@ def test_get_job_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].name == "name_value" + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val def test_get_job_flattened_error(): @@ -1241,7 +1255,9 @@ async def test_get_job_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].name == "name_value" + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val @pytest.mark.asyncio @@ -1399,7 +1415,9 @@ def test_delete_job_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].name == "name_value" + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val def test_delete_job_flattened_error(): @@ -1433,7 +1451,9 @@ async def test_delete_job_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].name == "name_value" + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val @pytest.mark.asyncio @@ -1615,9 +1635,15 @@ def test_create_job_template_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].parent == "parent_value" - assert args[0].job_template == resources.JobTemplate(name="name_value") - assert args[0].job_template_id == "job_template_id_value" + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].job_template + mock_val = resources.JobTemplate(name="name_value") + assert arg == mock_val + arg = args[0].job_template_id + mock_val = "job_template_id_value" + assert arg == mock_val def test_create_job_template_flattened_error(): @@ -1662,9 +1688,15 @@ async def test_create_job_template_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].parent == "parent_value" - assert args[0].job_template == resources.JobTemplate(name="name_value") - assert args[0].job_template_id == "job_template_id_value" + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].job_template + mock_val = resources.JobTemplate(name="name_value") + assert arg == mock_val + arg = args[0].job_template_id + mock_val = "job_template_id_value" + assert arg == mock_val @pytest.mark.asyncio @@ -1852,7 +1884,9 @@ def test_list_job_templates_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].parent == "parent_value" + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val def test_list_job_templates_flattened_error(): @@ -1890,7 +1924,9 @@ async def test_list_job_templates_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].parent == "parent_value" + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val @pytest.mark.asyncio @@ -2206,7 +2242,9 @@ def test_get_job_template_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].name == "name_value" + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val def test_get_job_template_flattened_error(): @@ -2242,7 +2280,9 @@ async def test_get_job_template_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].name == "name_value" + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val @pytest.mark.asyncio @@ -2414,7 +2454,9 @@ def test_delete_job_template_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].name == "name_value" + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val def test_delete_job_template_flattened_error(): @@ -2450,7 +2492,9 @@ async def test_delete_job_template_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].name == "name_value" + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val @pytest.mark.asyncio diff --git a/tests/unit/gapic/transcoder_v1beta1/test_transcoder_service.py b/tests/unit/gapic/transcoder_v1beta1/test_transcoder_service.py index 13b9639..364c2ef 100644 --- a/tests/unit/gapic/transcoder_v1beta1/test_transcoder_service.py +++ b/tests/unit/gapic/transcoder_v1beta1/test_transcoder_service.py @@ -671,8 +671,12 @@ def test_create_job_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].parent == "parent_value" - assert args[0].job == resources.Job(name="name_value") + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].job + mock_val = resources.Job(name="name_value") + assert arg == mock_val def test_create_job_flattened_error(): @@ -710,8 +714,12 @@ async def test_create_job_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].parent == "parent_value" - assert args[0].job == resources.Job(name="name_value") + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].job + mock_val = resources.Job(name="name_value") + assert arg == mock_val @pytest.mark.asyncio @@ -879,7 +887,9 @@ def test_list_jobs_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].parent == "parent_value" + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val def test_list_jobs_flattened_error(): @@ -915,7 +925,9 @@ async def test_list_jobs_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].parent == "parent_value" + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val @pytest.mark.asyncio @@ -1216,7 +1228,9 @@ def test_get_job_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].name == "name_value" + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val def test_get_job_flattened_error(): @@ -1250,7 +1264,9 @@ async def test_get_job_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].name == "name_value" + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val @pytest.mark.asyncio @@ -1408,7 +1424,9 @@ def test_delete_job_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].name == "name_value" + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val def test_delete_job_flattened_error(): @@ -1442,7 +1460,9 @@ async def test_delete_job_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].name == "name_value" + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val @pytest.mark.asyncio @@ -1624,9 +1644,15 @@ def test_create_job_template_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].parent == "parent_value" - assert args[0].job_template == resources.JobTemplate(name="name_value") - assert args[0].job_template_id == "job_template_id_value" + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].job_template + mock_val = resources.JobTemplate(name="name_value") + assert arg == mock_val + arg = args[0].job_template_id + mock_val = "job_template_id_value" + assert arg == mock_val def test_create_job_template_flattened_error(): @@ -1671,9 +1697,15 @@ async def test_create_job_template_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].parent == "parent_value" - assert args[0].job_template == resources.JobTemplate(name="name_value") - assert args[0].job_template_id == "job_template_id_value" + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val + arg = args[0].job_template + mock_val = resources.JobTemplate(name="name_value") + assert arg == mock_val + arg = args[0].job_template_id + mock_val = "job_template_id_value" + assert arg == mock_val @pytest.mark.asyncio @@ -1856,7 +1888,9 @@ def test_list_job_templates_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].parent == "parent_value" + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val def test_list_job_templates_flattened_error(): @@ -1894,7 +1928,9 @@ async def test_list_job_templates_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].parent == "parent_value" + arg = args[0].parent + mock_val = "parent_value" + assert arg == mock_val @pytest.mark.asyncio @@ -2210,7 +2246,9 @@ def test_get_job_template_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].name == "name_value" + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val def test_get_job_template_flattened_error(): @@ -2246,7 +2284,9 @@ async def test_get_job_template_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].name == "name_value" + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val @pytest.mark.asyncio @@ -2418,7 +2458,9 @@ def test_delete_job_template_flattened(): # request object values. assert len(call.mock_calls) == 1 _, args, _ = call.mock_calls[0] - assert args[0].name == "name_value" + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val def test_delete_job_template_flattened_error(): @@ -2454,7 +2496,9 @@ async def test_delete_job_template_flattened_async(): # request object values. assert len(call.mock_calls) _, args, _ = call.mock_calls[0] - assert args[0].name == "name_value" + arg = args[0].name + mock_val = "name_value" + assert arg == mock_val @pytest.mark.asyncio From 19e4459a75d54596a020e9a72dcb83e084bf0195 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Thu, 11 Nov 2021 18:28:06 -0500 Subject: [PATCH 02/20] chore(python): add .github/CODEOWNERS as a templated file (#116) Source-Link: https://github.com/googleapis/synthtool/commit/c5026b3217973a8db55db8ee85feee0e9a65e295 Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:0e18b9475fbeb12d9ad4302283171edebb6baf2dfca1bd215ee3b34ed79d95d7 Co-authored-by: Owl Bot --- .github/.OwlBot.lock.yaml | 2 +- .github/CODEOWNERS | 9 +++++---- 2 files changed, 6 insertions(+), 5 deletions(-) diff --git a/.github/.OwlBot.lock.yaml b/.github/.OwlBot.lock.yaml index 108063d..7519fa3 100644 --- a/.github/.OwlBot.lock.yaml +++ b/.github/.OwlBot.lock.yaml @@ -1,3 +1,3 @@ docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:4ee57a76a176ede9087c14330c625a71553cf9c72828b2c0ca12f5338171ba60 + digest: sha256:0e18b9475fbeb12d9ad4302283171edebb6baf2dfca1bd215ee3b34ed79d95d7 diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS index f0bc940..44cc868 100644 --- a/.github/CODEOWNERS +++ b/.github/CODEOWNERS @@ -3,9 +3,10 @@ # # For syntax help see: # https://help.github.com/en/github/creating-cloning-and-archiving-repositories/about-code-owners#codeowners-syntax +# Note: This file is autogenerated. To make changes to the codeowner team, please update .repo-metadata.json. -# The @googleapis/yoshi-python is the default owner for changes in this repo -* @googleapis/yoshi-python +# @googleapis/yoshi-python is the default owner for changes in this repo +* @googleapis/yoshi-python -# The python-samples-reviewers team is the default owner for samples changes -/samples/ @googleapis/python-samples-owners +# @googleapis/python-samples-owners is the default owner for samples changes +/samples/ @googleapis/python-samples-owners From 32eb752a0cc98afaeac0dad21e6a4b17bc215751 Mon Sep 17 00:00:00 2001 From: Dan Lee <71398022+dandhlee@users.noreply.github.com> Date: Tue, 16 Nov 2021 11:01:14 -0500 Subject: [PATCH 03/20] chore: update doc links from googleapis.dev to cloud.google.com (#117) --- .repo-metadata.json | 2 +- README.rst | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/.repo-metadata.json b/.repo-metadata.json index ee08121..d634ab9 100644 --- a/.repo-metadata.json +++ b/.repo-metadata.json @@ -2,7 +2,7 @@ "name": "transcoder", "name_pretty": "Transcoder", "product_documentation": "https://cloud.google.com/transcoder", - "client_documentation": "https://googleapis.dev/python/transcoder/latest", + "client_documentation": "https://cloud.google.com/python/docs/reference/transcoder/latest", "issue_tracker": "", "release_level": "ga", "language": "python", diff --git a/README.rst b/README.rst index 0ecdc12..e613c07 100644 --- a/README.rst +++ b/README.rst @@ -15,7 +15,7 @@ Python Client for Transcoder API .. |versions| image:: https://img.shields.io/pypi/pyversions/google-cloud-video-transcoder.svg :target: https://pypi.org/project/google-cloud-video-transcoder/ .. _Transcoder API: https://cloud.google.com/transcoder -.. _Client Library Documentation: https://googleapis.dev/python/transcoder/latest +.. _Client Library Documentation: https://cloud.google.com/python/docs/reference/transcoder/latest .. _Product Documentation: https://cloud.google.com/transcoder Quick Start From 2033de33c7b0e0c21c22848d544164342553e7ff Mon Sep 17 00:00:00 2001 From: Anthonios Partheniou Date: Tue, 28 Dec 2021 13:10:34 -0500 Subject: [PATCH 04/20] chore: update .repo-metadata.json (#120) --- .repo-metadata.json | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/.repo-metadata.json b/.repo-metadata.json index d634ab9..3e20e91 100644 --- a/.repo-metadata.json +++ b/.repo-metadata.json @@ -4,12 +4,13 @@ "product_documentation": "https://cloud.google.com/transcoder", "client_documentation": "https://cloud.google.com/python/docs/reference/transcoder/latest", "issue_tracker": "", - "release_level": "ga", + "release_level": "stable", "language": "python", "library_type": "GAPIC_AUTO", "repo": "googleapis/python-video-transcoder", "distribution_name": "google-cloud-video-transcoder", "api_id": "transcoder.googleapis.com", "default_version": "v1", - "codeowner_team": "" + "codeowner_team": "", + "api_shortname": "transcoder" } From ac645abd2580078308e9da6574903bd76fb36f76 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Thu, 6 Jan 2022 16:06:23 +0000 Subject: [PATCH 05/20] chore: use python-samples-reviewers (#122) --- .github/.OwlBot.lock.yaml | 2 +- .github/CODEOWNERS | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/.OwlBot.lock.yaml b/.github/.OwlBot.lock.yaml index 7519fa3..f33299d 100644 --- a/.github/.OwlBot.lock.yaml +++ b/.github/.OwlBot.lock.yaml @@ -1,3 +1,3 @@ docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:0e18b9475fbeb12d9ad4302283171edebb6baf2dfca1bd215ee3b34ed79d95d7 + digest: sha256:899d5d7cc340fa8ef9d8ae1a8cfba362c6898584f779e156f25ee828ba824610 diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS index 44cc868..e446644 100644 --- a/.github/CODEOWNERS +++ b/.github/CODEOWNERS @@ -8,5 +8,5 @@ # @googleapis/yoshi-python is the default owner for changes in this repo * @googleapis/yoshi-python -# @googleapis/python-samples-owners is the default owner for samples changes -/samples/ @googleapis/python-samples-owners +# @googleapis/python-samples-reviewers is the default owner for samples changes +/samples/ @googleapis/python-samples-reviewers From adfc8eacdee067ce34f847014b816154106935b7 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Thu, 13 Jan 2022 15:50:23 +0000 Subject: [PATCH 06/20] build: switch to release-please for tagging (#123) --- .github/.OwlBot.lock.yaml | 2 +- .github/release-please.yml | 1 + .github/release-trigger.yml | 1 + 3 files changed, 3 insertions(+), 1 deletion(-) create mode 100644 .github/release-trigger.yml diff --git a/.github/.OwlBot.lock.yaml b/.github/.OwlBot.lock.yaml index f33299d..ff5126c 100644 --- a/.github/.OwlBot.lock.yaml +++ b/.github/.OwlBot.lock.yaml @@ -1,3 +1,3 @@ docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:899d5d7cc340fa8ef9d8ae1a8cfba362c6898584f779e156f25ee828ba824610 + digest: sha256:dfa9b663b32de8b5b327e32c1da665a80de48876558dd58091d8160c60ad7355 diff --git a/.github/release-please.yml b/.github/release-please.yml index 4507ad0..466597e 100644 --- a/.github/release-please.yml +++ b/.github/release-please.yml @@ -1 +1,2 @@ releaseType: python +handleGHRelease: true diff --git a/.github/release-trigger.yml b/.github/release-trigger.yml new file mode 100644 index 0000000..d4ca941 --- /dev/null +++ b/.github/release-trigger.yml @@ -0,0 +1 @@ +enabled: true From 6937ac23195f62701b90307e8895871d08ced47c Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Fri, 14 Jan 2022 11:08:20 -0500 Subject: [PATCH 07/20] chore(python): update release.sh to use keystore (#124) Source-Link: https://github.com/googleapis/synthtool/commit/69fda12e2994f0b595a397e8bb6e3e9f380524eb Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:ae600f36b6bc972b368367b6f83a1d91ec2c82a4a116b383d67d547c56fe6de3 Co-authored-by: Owl Bot --- .github/.OwlBot.lock.yaml | 2 +- .kokoro/release.sh | 2 +- .kokoro/release/common.cfg | 12 +++++++++++- 3 files changed, 13 insertions(+), 3 deletions(-) diff --git a/.github/.OwlBot.lock.yaml b/.github/.OwlBot.lock.yaml index ff5126c..eecb84c 100644 --- a/.github/.OwlBot.lock.yaml +++ b/.github/.OwlBot.lock.yaml @@ -1,3 +1,3 @@ docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:dfa9b663b32de8b5b327e32c1da665a80de48876558dd58091d8160c60ad7355 + digest: sha256:ae600f36b6bc972b368367b6f83a1d91ec2c82a4a116b383d67d547c56fe6de3 diff --git a/.kokoro/release.sh b/.kokoro/release.sh index b69466e..9c8645e 100755 --- a/.kokoro/release.sh +++ b/.kokoro/release.sh @@ -26,7 +26,7 @@ python3 -m pip install --upgrade twine wheel setuptools export PYTHONUNBUFFERED=1 # Move into the package, build the distribution and upload. -TWINE_PASSWORD=$(cat "${KOKORO_GFILE_DIR}/secret_manager/google-cloud-pypi-token") +TWINE_PASSWORD=$(cat "${KOKORO_KEYSTORE_DIR}/73713_google-cloud-pypi-token-keystore-1") cd github/python-video-transcoder python3 setup.py sdist bdist_wheel twine upload --username __token__ --password "${TWINE_PASSWORD}" dist/* diff --git a/.kokoro/release/common.cfg b/.kokoro/release/common.cfg index 2456c30..de02a84 100644 --- a/.kokoro/release/common.cfg +++ b/.kokoro/release/common.cfg @@ -23,8 +23,18 @@ env_vars: { value: "github/python-video-transcoder/.kokoro/release.sh" } +# Fetch PyPI password +before_action { + fetch_keystore { + keystore_resource { + keystore_config_id: 73713 + keyname: "google-cloud-pypi-token-keystore-1" + } + } +} + # Tokens needed to report release status back to GitHub env_vars: { key: "SECRET_MANAGER_KEYS" - value: "releasetool-publish-reporter-app,releasetool-publish-reporter-googleapis-installation,releasetool-publish-reporter-pem,google-cloud-pypi-token" + value: "releasetool-publish-reporter-app,releasetool-publish-reporter-googleapis-installation,releasetool-publish-reporter-pem" } From 84404cab8206b928b912ee90f93de103975ff40f Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Thu, 20 Jan 2022 06:04:54 -0500 Subject: [PATCH 08/20] ci(python): run lint / unit tests / docs as GH actions (#126) * ci(python): run lint / unit tests / docs as GH actions Source-Link: https://github.com/googleapis/synthtool/commit/57be0cdb0b94e1669cee0ca38d790de1dfdbcd44 Post-Processor: gcr.io/cloud-devrel-public-resources/owlbot-python:latest@sha256:ed1f9983d5a935a89fe8085e8bb97d94e41015252c5b6c9771257cf8624367e6 * add a commit to activate gh actions Co-authored-by: Owl Bot Co-authored-by: Anthonios Partheniou --- .github/.OwlBot.lock.yaml | 16 +++++++++- .github/workflows/docs.yml | 38 +++++++++++++++++++++++ .github/workflows/lint.yml | 25 +++++++++++++++ .github/workflows/unittest.yml | 57 ++++++++++++++++++++++++++++++++++ 4 files changed, 135 insertions(+), 1 deletion(-) create mode 100644 .github/workflows/docs.yml create mode 100644 .github/workflows/lint.yml create mode 100644 .github/workflows/unittest.yml diff --git a/.github/.OwlBot.lock.yaml b/.github/.OwlBot.lock.yaml index eecb84c..b668c04 100644 --- a/.github/.OwlBot.lock.yaml +++ b/.github/.OwlBot.lock.yaml @@ -1,3 +1,17 @@ +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:ae600f36b6bc972b368367b6f83a1d91ec2c82a4a116b383d67d547c56fe6de3 + digest: sha256:ed1f9983d5a935a89fe8085e8bb97d94e41015252c5b6c9771257cf8624367e6 + diff --git a/.github/workflows/docs.yml b/.github/workflows/docs.yml new file mode 100644 index 0000000..f7b8344 --- /dev/null +++ b/.github/workflows/docs.yml @@ -0,0 +1,38 @@ +on: + pull_request: + branches: + - main +name: docs +jobs: + docs: + runs-on: ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v2 + - name: Setup Python + uses: actions/setup-python@v2 + with: + python-version: "3.10" + - name: Install nox + run: | + python -m pip install --upgrade setuptools pip wheel + python -m pip install nox + - name: Run docs + run: | + nox -s docs + docfx: + runs-on: ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v2 + - name: Setup Python + uses: actions/setup-python@v2 + with: + python-version: "3.10" + - name: Install nox + run: | + python -m pip install --upgrade setuptools pip wheel + python -m pip install nox + - name: Run docfx + run: | + nox -s docfx diff --git a/.github/workflows/lint.yml b/.github/workflows/lint.yml new file mode 100644 index 0000000..1e8b05c --- /dev/null +++ b/.github/workflows/lint.yml @@ -0,0 +1,25 @@ +on: + pull_request: + branches: + - main +name: lint +jobs: + lint: + runs-on: ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v2 + - name: Setup Python + uses: actions/setup-python@v2 + with: + python-version: "3.10" + - name: Install nox + run: | + python -m pip install --upgrade setuptools pip wheel + python -m pip install nox + - name: Run lint + run: | + nox -s lint + - name: Run lint_setup_py + run: | + nox -s lint_setup_py diff --git a/.github/workflows/unittest.yml b/.github/workflows/unittest.yml new file mode 100644 index 0000000..074ee25 --- /dev/null +++ b/.github/workflows/unittest.yml @@ -0,0 +1,57 @@ +on: + pull_request: + branches: + - main +name: unittest +jobs: + unit: + runs-on: ubuntu-latest + strategy: + matrix: + python: ['3.6', '3.7', '3.8', '3.9', '3.10'] + steps: + - name: Checkout + uses: actions/checkout@v2 + - name: Setup Python + uses: actions/setup-python@v2 + with: + python-version: ${{ matrix.python }} + - name: Install nox + run: | + python -m pip install --upgrade setuptools pip wheel + python -m pip install nox + - name: Run unit tests + env: + COVERAGE_FILE: .coverage-${{ matrix.python }} + run: | + nox -s unit-${{ matrix.python }} + - name: Upload coverage results + uses: actions/upload-artifact@v2 + with: + name: coverage-artifacts + path: .coverage-${{ matrix.python }} + + cover: + runs-on: ubuntu-latest + needs: + - unit + steps: + - name: Checkout + uses: actions/checkout@v2 + - name: Setup Python + uses: actions/setup-python@v2 + with: + python-version: "3.10" + - name: Install coverage + run: | + python -m pip install --upgrade setuptools pip wheel + python -m pip install coverage + - name: Download coverage results + uses: actions/download-artifact@v2 + with: + name: coverage-artifacts + path: .coverage-results/ + - name: Report coverage results + run: | + coverage combine .coverage-results/.coverage* + coverage report --show-missing --fail-under=100 From 66c58109be7fe29a9b43192466dec83d15be3320 Mon Sep 17 00:00:00 2001 From: Nicholas Cook Date: Mon, 24 Jan 2022 12:26:38 -0800 Subject: [PATCH 09/20] chore: move samples from GoogleCloudPlatform/python-docs-samples (#125) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * feat: move samples from GoogleCloudPlatform/python-docs-samples * 🦉 Updates from OwlBot See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md Co-authored-by: Owl Bot --- README.rst | 2 + samples/snippets/README.md | 40 ++ samples/snippets/create_job_from_ad_hoc.py | 118 +++++ samples/snippets/create_job_from_preset.py | 82 ++++ samples/snippets/create_job_from_template.py | 86 ++++ samples/snippets/create_job_template.py | 115 +++++ .../create_job_with_animated_overlay.py | 148 +++++++ .../create_job_with_concatenated_inputs.py | 195 +++++++++ ...te_job_with_periodic_images_spritesheet.py | 128 ++++++ ..._job_with_set_number_images_spritesheet.py | 131 ++++++ .../create_job_with_static_overlay.py | 141 ++++++ samples/snippets/delete_job.py | 56 +++ samples/snippets/delete_job_template.py | 58 +++ samples/snippets/get_job.py | 57 +++ samples/snippets/get_job_state.py | 58 +++ samples/snippets/get_job_template.py | 58 +++ samples/snippets/job_template_test.py | 60 +++ samples/snippets/job_test.py | 410 ++++++++++++++++++ samples/snippets/list_job_templates.py | 59 +++ samples/snippets/list_jobs.py | 57 +++ samples/snippets/noxfile.py | 278 ++++++++++++ samples/snippets/requirements-test.txt | 3 + samples/snippets/requirements.txt | 3 + 23 files changed, 2343 insertions(+) create mode 100644 samples/snippets/README.md create mode 100644 samples/snippets/create_job_from_ad_hoc.py create mode 100644 samples/snippets/create_job_from_preset.py create mode 100644 samples/snippets/create_job_from_template.py create mode 100644 samples/snippets/create_job_template.py create mode 100644 samples/snippets/create_job_with_animated_overlay.py create mode 100644 samples/snippets/create_job_with_concatenated_inputs.py create mode 100644 samples/snippets/create_job_with_periodic_images_spritesheet.py create mode 100644 samples/snippets/create_job_with_set_number_images_spritesheet.py create mode 100644 samples/snippets/create_job_with_static_overlay.py create mode 100644 samples/snippets/delete_job.py create mode 100644 samples/snippets/delete_job_template.py create mode 100644 samples/snippets/get_job.py create mode 100644 samples/snippets/get_job_state.py create mode 100644 samples/snippets/get_job_template.py create mode 100644 samples/snippets/job_template_test.py create mode 100644 samples/snippets/job_test.py create mode 100644 samples/snippets/list_job_templates.py create mode 100644 samples/snippets/list_jobs.py create mode 100644 samples/snippets/noxfile.py create mode 100644 samples/snippets/requirements-test.txt create mode 100644 samples/snippets/requirements.txt diff --git a/README.rst b/README.rst index e613c07..889cba7 100644 --- a/README.rst +++ b/README.rst @@ -68,9 +68,11 @@ Windows \Scripts\activate \Scripts\pip.exe install google-cloud-video-transcoder + Next Steps ~~~~~~~~~~ +- See the [Samples](./samples/snippets/README.md). - Read the `Client Library Documentation`_ for Cloud Transcoder API API to see other available methods on the client. - Read the `Transcoder API Product documentation`_ to learn diff --git a/samples/snippets/README.md b/samples/snippets/README.md new file mode 100644 index 0000000..ce4ff95 --- /dev/null +++ b/samples/snippets/README.md @@ -0,0 +1,40 @@ +# Transcoder API Python Samples + +This directory contains samples for the Transcoder API. Use this API to transcode videos into a variety of formats. The Transcoder API benefits broadcasters, production companies, businesses, and individuals looking to transform their video content for use across a variety of user devices. For more information, see the [Transcoder API documentation](https://cloud.google.com/transcoder/). + +## Setup + +To run the samples, you need to first follow the steps in [Before you begin](https://cloud.google.com/transcoder/docs/how-to/before-you-begin). + +For more information on authentication, refer to the +[Authentication Getting Started Guide](https://cloud.google.com/docs/authentication/getting-started). + +## Install Dependencies + +1. Clone python-docs-samples and change directory to the sample directory you want to use. + + $ git clone https://github.com/googleapis/python-video-transcoder.git + +1. Install [pip](https://pip.pypa.io/) and [virtualenv](https://virtualenv.pypa.io/) if you do not already have them. You may want to refer to the [Python Development Environment Setup Guide](https://cloud.google.com/python/setup) for Google Cloud Platform for instructions. + +1. Create a virtualenv. Samples are compatible with Python 3.6+. + + $ virtualenv env + $ source env/bin/activate + +1. Install the dependencies needed to run the samples. + + $ pip install -r requirements.txt + +## The client library + +This sample uses the [Google Cloud Client Library for Python](https://googlecloudplatform.github.io/google-cloud-python/). +You can read the documentation for more details on API usage and use GitHub +to [browse the source](https://github.com/GoogleCloudPlatform/google-cloud-python) and [report issues](https://github.com/GoogleCloudPlatform/google-cloud-python/issues). + +## Testing + +Make sure to enable the Transcoder API on the test project. Set the following environment variables: + +* `GOOGLE_CLOUD_PROJECT` +* `GOOGLE_CLOUD_PROJECT_NUMBER` diff --git a/samples/snippets/create_job_from_ad_hoc.py b/samples/snippets/create_job_from_ad_hoc.py new file mode 100644 index 0000000..2e08c61 --- /dev/null +++ b/samples/snippets/create_job_from_ad_hoc.py @@ -0,0 +1,118 @@ +#!/usr/bin/env python + +# Copyright 2020 Google Inc. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Google Cloud Transcoder sample for creating a job based on a supplied job config. + +Example usage: + python create_job_from_ad_hoc.py --project_id --location --input_uri --output_uri +""" + +# [START transcoder_create_job_from_ad_hoc] + +import argparse + +from google.cloud.video import transcoder_v1 +from google.cloud.video.transcoder_v1.services.transcoder_service import ( + TranscoderServiceClient, +) + + +def create_job_from_ad_hoc(project_id, location, input_uri, output_uri): + """Creates a job based on an ad-hoc job configuration. + + Args: + project_id: The GCP project ID. + location: The location to start the job in. + input_uri: Uri of the video in the Cloud Storage bucket. + output_uri: Uri of the video output folder in the Cloud Storage bucket.""" + + client = TranscoderServiceClient() + + parent = f"projects/{project_id}/locations/{location}" + job = transcoder_v1.types.Job() + job.input_uri = input_uri + job.output_uri = output_uri + job.config = transcoder_v1.types.JobConfig( + elementary_streams=[ + transcoder_v1.types.ElementaryStream( + key="video-stream0", + video_stream=transcoder_v1.types.VideoStream( + h264=transcoder_v1.types.VideoStream.H264CodecSettings( + height_pixels=360, + width_pixels=640, + bitrate_bps=550000, + frame_rate=60, + ), + ), + ), + transcoder_v1.types.ElementaryStream( + key="video-stream1", + video_stream=transcoder_v1.types.VideoStream( + h264=transcoder_v1.types.VideoStream.H264CodecSettings( + height_pixels=720, + width_pixels=1280, + bitrate_bps=2500000, + frame_rate=60, + ), + ), + ), + transcoder_v1.types.ElementaryStream( + key="audio-stream0", + audio_stream=transcoder_v1.types.AudioStream( + codec="aac", bitrate_bps=64000 + ), + ), + ], + mux_streams=[ + transcoder_v1.types.MuxStream( + key="sd", + container="mp4", + elementary_streams=["video-stream0", "audio-stream0"], + ), + transcoder_v1.types.MuxStream( + key="hd", + container="mp4", + elementary_streams=["video-stream1", "audio-stream0"], + ), + ], + ) + response = client.create_job(parent=parent, job=job) + print(f"Job: {response.name}") + return response + + +# [END transcoder_create_job_from_ad_hoc] + +if __name__ == "__main__": + parser = argparse.ArgumentParser() + parser.add_argument("--project_id", help="Your Cloud project ID.", required=True) + parser.add_argument( + "--location", help="The location to start this job in.", default="us-central1", + ) + parser.add_argument( + "--input_uri", + help="Uri of the video in the Cloud Storage bucket.", + required=True, + ) + parser.add_argument( + "--output_uri", + help="Uri of the video output folder in the Cloud Storage bucket. Must end in '/'.", + required=True, + ) + args = parser.parse_args() + create_job_from_ad_hoc( + args.project_id, args.location, args.input_uri, args.output_uri, + ) diff --git a/samples/snippets/create_job_from_preset.py b/samples/snippets/create_job_from_preset.py new file mode 100644 index 0000000..3539b32 --- /dev/null +++ b/samples/snippets/create_job_from_preset.py @@ -0,0 +1,82 @@ +#!/usr/bin/env python + +# Copyright 2020 Google Inc. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Google Cloud Transcoder sample for creating a job based on a job preset. + +Example usage: + python create_job_from_preset.py --project_id --location --input_uri --output_uri [--preset ] +""" + +# [START transcoder_create_job_from_preset] + +import argparse + +from google.cloud.video import transcoder_v1 +from google.cloud.video.transcoder_v1.services.transcoder_service import ( + TranscoderServiceClient, +) + + +def create_job_from_preset(project_id, location, input_uri, output_uri, preset): + """Creates a job based on a job preset. + + Args: + project_id: The GCP project ID. + location: The location to start the job in. + input_uri: Uri of the video in the Cloud Storage bucket. + output_uri: Uri of the video output folder in the Cloud Storage bucket. + preset: The preset template (for example, 'preset/web-hd').""" + + client = TranscoderServiceClient() + + parent = f"projects/{project_id}/locations/{location}" + job = transcoder_v1.types.Job() + job.input_uri = input_uri + job.output_uri = output_uri + job.template_id = preset + + response = client.create_job(parent=parent, job=job) + print(f"Job: {response.name}") + return response + + +# [END transcoder_create_job_from_preset] + +if __name__ == "__main__": + parser = argparse.ArgumentParser() + parser.add_argument("--project_id", help="Your Cloud project ID.", required=True) + parser.add_argument( + "--location", help="The location to start this job in.", default="us-central1", + ) + parser.add_argument( + "--input_uri", + help="Uri of the video in the Cloud Storage bucket.", + required=True, + ) + parser.add_argument( + "--output_uri", + help="Uri of the video output folder in the Cloud Storage bucket. Must end in '/'.", + required=True, + ) + parser.add_argument( + "--preset", + help="The preset template (for example, 'preset/web-hd').", + default="preset/web-hd", + ) + args = parser.parse_args() + create_job_from_preset( + args.project_id, args.location, args.input_uri, args.output_uri, args.preset, + ) diff --git a/samples/snippets/create_job_from_template.py b/samples/snippets/create_job_from_template.py new file mode 100644 index 0000000..0a69704 --- /dev/null +++ b/samples/snippets/create_job_from_template.py @@ -0,0 +1,86 @@ +#!/usr/bin/env python + +# Copyright 2020 Google Inc. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Google Cloud Transcoder sample for creating a job based on a job template. + +Example usage: + python create_job_from_template.py --project_id --location --input_uri --output_uri --template_id +""" + +# [START transcoder_create_job_from_template] + +import argparse + +from google.cloud.video import transcoder_v1 +from google.cloud.video.transcoder_v1.services.transcoder_service import ( + TranscoderServiceClient, +) + + +def create_job_from_template(project_id, location, input_uri, output_uri, template_id): + """Creates a job based on a job template. + + Args: + project_id: The GCP project ID. + location: The location to start the job in. + input_uri: Uri of the video in the Cloud Storage bucket. + output_uri: Uri of the video output folder in the Cloud Storage bucket. + template_id: The user-defined template ID.""" + + client = TranscoderServiceClient() + + parent = f"projects/{project_id}/locations/{location}" + job = transcoder_v1.types.Job() + job.input_uri = input_uri + job.output_uri = output_uri + job.template_id = template_id + + response = client.create_job(parent=parent, job=job) + print(f"Job: {response.name}") + return response + + +# [END transcoder_create_job_from_template] + +if __name__ == "__main__": + parser = argparse.ArgumentParser() + parser.add_argument("--project_id", help="Your Cloud project ID.", required=True) + parser.add_argument( + "--location", help="The location to start this job in.", default="us-central1", + ) + parser.add_argument( + "--input_uri", + help="Uri of the video in the Cloud Storage bucket.", + required=True, + ) + parser.add_argument( + "--output_uri", + help="Uri of the video output folder in the Cloud Storage bucket. Must end in '/'.", + required=True, + ) + parser.add_argument( + "--template_id", + help="The job template ID. The template must be located in the same location as the job.", + required=True, + ) + args = parser.parse_args() + create_job_from_template( + args.project_id, + args.location, + args.input_uri, + args.output_uri, + args.template_id, + ) diff --git a/samples/snippets/create_job_template.py b/samples/snippets/create_job_template.py new file mode 100644 index 0000000..95ed05d --- /dev/null +++ b/samples/snippets/create_job_template.py @@ -0,0 +1,115 @@ +#!/usr/bin/env python + +# Copyright 2020 Google Inc. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Google Cloud Transcoder sample for creating a job template. + +Example usage: + python create_job_template.py --project_id [--location ] [--template_id ] +""" + +# [START transcoder_create_job_template] + +import argparse + +from google.cloud.video import transcoder_v1 +from google.cloud.video.transcoder_v1.services.transcoder_service import ( + TranscoderServiceClient, +) + + +def create_job_template(project_id, location, template_id): + """Creates a job template. + + Args: + project_id: The GCP project ID. + location: The location to store this template in. + template_id: The user-defined template ID.""" + + client = TranscoderServiceClient() + + parent = f"projects/{project_id}/locations/{location}" + + job_template = transcoder_v1.types.JobTemplate() + job_template.name = ( + f"projects/{project_id}/locations/{location}/jobTemplates/{template_id}" + ) + job_template.config = transcoder_v1.types.JobConfig( + elementary_streams=[ + transcoder_v1.types.ElementaryStream( + key="video-stream0", + video_stream=transcoder_v1.types.VideoStream( + h264=transcoder_v1.types.VideoStream.H264CodecSettings( + height_pixels=360, + width_pixels=640, + bitrate_bps=550000, + frame_rate=60, + ), + ), + ), + transcoder_v1.types.ElementaryStream( + key="video-stream1", + video_stream=transcoder_v1.types.VideoStream( + h264=transcoder_v1.types.VideoStream.H264CodecSettings( + height_pixels=720, + width_pixels=1280, + bitrate_bps=2500000, + frame_rate=60, + ), + ), + ), + transcoder_v1.types.ElementaryStream( + key="audio-stream0", + audio_stream=transcoder_v1.types.AudioStream( + codec="aac", bitrate_bps=64000 + ), + ), + ], + mux_streams=[ + transcoder_v1.types.MuxStream( + key="sd", + container="mp4", + elementary_streams=["video-stream0", "audio-stream0"], + ), + transcoder_v1.types.MuxStream( + key="hd", + container="mp4", + elementary_streams=["video-stream1", "audio-stream0"], + ), + ], + ) + + response = client.create_job_template( + parent=parent, job_template=job_template, job_template_id=template_id + ) + print(f"Job template: {response.name}") + return response + + +# [END transcoder_create_job_template] + +if __name__ == "__main__": + parser = argparse.ArgumentParser() + parser.add_argument("--project_id", help="Your Cloud project ID.", required=True) + parser.add_argument( + "--location", + help="The location to store this template in.", + default="us-central1", + ) + parser.add_argument( + "--template_id", help="The job template ID.", default="my-job-template" + ) + args = parser.parse_args() + create_job_template(args.project_id, args.location, args.template_id) diff --git a/samples/snippets/create_job_with_animated_overlay.py b/samples/snippets/create_job_with_animated_overlay.py new file mode 100644 index 0000000..a90c542 --- /dev/null +++ b/samples/snippets/create_job_with_animated_overlay.py @@ -0,0 +1,148 @@ +#!/usr/bin/env python + +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Google Cloud Transcoder sample for creating a job based on a supplied job config that includes an animated overlay. + +Example usage: + python create_job_with_animated_overlay.py --project_id --location --input_uri --overlay_image_uri --output_uri +""" + +# [START transcoder_create_job_with_animated_overlay] + +import argparse + +from google.cloud.video import transcoder_v1 +from google.cloud.video.transcoder_v1.services.transcoder_service import ( + TranscoderServiceClient, +) +from google.protobuf import duration_pb2 as duration + + +def create_job_with_animated_overlay( + project_id, location, input_uri, overlay_image_uri, output_uri +): + """Creates a job based on an ad-hoc job configuration that includes an animated image overlay. + + Args: + project_id: The GCP project ID. + location: The location to start the job in. + input_uri: Uri of the video in the Cloud Storage bucket. + overlay_image_uri: Uri of the JPEG image for the overlay in the Cloud Storage bucket. Must be a JPEG. + output_uri: Uri of the video output folder in the Cloud Storage bucket.""" + + client = TranscoderServiceClient() + + parent = f"projects/{project_id}/locations/{location}" + job = transcoder_v1.types.Job() + job.input_uri = input_uri + job.output_uri = output_uri + job.config = transcoder_v1.types.JobConfig( + elementary_streams=[ + transcoder_v1.types.ElementaryStream( + key="video-stream0", + video_stream=transcoder_v1.types.VideoStream( + h264=transcoder_v1.types.VideoStream.H264CodecSettings( + height_pixels=360, + width_pixels=640, + bitrate_bps=550000, + frame_rate=60, + ), + ), + ), + transcoder_v1.types.ElementaryStream( + key="audio-stream0", + audio_stream=transcoder_v1.types.AudioStream( + codec="aac", bitrate_bps=64000 + ), + ), + ], + mux_streams=[ + transcoder_v1.types.MuxStream( + key="sd", + container="mp4", + elementary_streams=["video-stream0", "audio-stream0"], + ), + ], + overlays=[ + transcoder_v1.types.Overlay( + image=transcoder_v1.types.Overlay.Image( + uri=overlay_image_uri, + resolution=transcoder_v1.types.Overlay.NormalizedCoordinate( + x=0, y=0, + ), + alpha=1, + ), + animations=[ + transcoder_v1.types.Overlay.Animation( + animation_fade=transcoder_v1.types.Overlay.AnimationFade( + fade_type=transcoder_v1.types.Overlay.FadeType.FADE_IN, + xy=transcoder_v1.types.Overlay.NormalizedCoordinate( + x=0.5, y=0.5, + ), + start_time_offset=duration.Duration(seconds=5,), + end_time_offset=duration.Duration(seconds=10,), + ), + ), + transcoder_v1.types.Overlay.Animation( + animation_fade=transcoder_v1.types.Overlay.AnimationFade( + fade_type=transcoder_v1.types.Overlay.FadeType.FADE_OUT, + xy=transcoder_v1.types.Overlay.NormalizedCoordinate( + x=0.5, y=0.5, + ), + start_time_offset=duration.Duration(seconds=12,), + end_time_offset=duration.Duration(seconds=15,), + ), + ), + ], + ), + ], + ) + response = client.create_job(parent=parent, job=job) + print(f"Job: {response.name}") + return response + + +# [END transcoder_create_job_with_animated_overlay] + +if __name__ == "__main__": + parser = argparse.ArgumentParser() + parser.add_argument("--project_id", help="Your Cloud project ID.", required=True) + parser.add_argument( + "--location", help="The location to start this job in.", default="us-central1", + ) + parser.add_argument( + "--input_uri", + help="Uri of the video in the Cloud Storage bucket.", + required=True, + ) + parser.add_argument( + "--overlay_image_uri", + help="Uri of the overlay JPEG image in the Cloud Storage bucket. Must be a JPEG.", + required=True, + ) + parser.add_argument( + "--output_uri", + help="Uri of the video output folder in the Cloud Storage bucket. Must end in '/'.", + required=True, + ) + args = parser.parse_args() + create_job_with_animated_overlay( + args.project_id, + args.location, + args.input_uri, + args.overlay_image_uri, + args.output_uri, + ) diff --git a/samples/snippets/create_job_with_concatenated_inputs.py b/samples/snippets/create_job_with_concatenated_inputs.py new file mode 100644 index 0000000..0a2d3ad --- /dev/null +++ b/samples/snippets/create_job_with_concatenated_inputs.py @@ -0,0 +1,195 @@ +#!/usr/bin/env python + +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Google Cloud Transcoder sample for creating a job based on concatenating two input videos. + +Example usage: + python create_job_with_concatenated_inputs.py --project_id --location \ + --input1_uri --start_time_input1 --end_time_input1 \ + --input2_uri --start_time_input2 --end_time_input2 \ + --output_uri +""" + +# [START transcoder_create_job_with_concatenated_inputs] + +import argparse + +from google.cloud.video import transcoder_v1 +from google.cloud.video.transcoder_v1.services.transcoder_service import ( + TranscoderServiceClient, +) +from google.protobuf import duration_pb2 as duration + + +def create_job_with_concatenated_inputs( + project_id, + location, + input1_uri, + start_time_input1, + end_time_input1, + input2_uri, + start_time_input2, + end_time_input2, + output_uri, +): + """Creates a job based on an ad-hoc job configuration that concatenates two input videos. + + Args: + project_id (str): The GCP project ID. + location (str): The location to start the job in. + input1_uri (str): Uri of the first video in the Cloud Storage bucket. + start_time_input1 (str): Start time, in fractional seconds ending in 's' + (e.g., '0s'), relative to the first input video timeline. + end_time_input1 (str): End time, in fractional seconds ending in 's' + (e.g., '8.1s'), relative to the first input video timeline. + input2_uri (str): Uri of the second video in the Cloud Storage bucket. + start_time_input2 (str): Start time, in fractional seconds ending in 's' + (e.g., '3.5s'), relative to the second input video timeline. + end_time_input2 (str): End time, in fractional seconds ending in 's' + (e.g., '15s'), relative to the second input video timeline. + output_uri (str): Uri of the video output folder in the Cloud Storage + bucket.""" + + s1 = duration.Duration() + s1.FromJsonString(start_time_input1) + e1 = duration.Duration() + e1.FromJsonString(end_time_input1) + + s2 = duration.Duration() + s2.FromJsonString(start_time_input2) + e2 = duration.Duration() + e2.FromJsonString(end_time_input2) + + client = TranscoderServiceClient() + + parent = f"projects/{project_id}/locations/{location}" + job = transcoder_v1.types.Job() + job.output_uri = output_uri + job.config = transcoder_v1.types.JobConfig( + inputs=[ + transcoder_v1.types.Input(key="input1", uri=input1_uri,), + transcoder_v1.types.Input(key="input2", uri=input2_uri,), + ], + edit_list=[ + transcoder_v1.types.EditAtom( + key="atom1", + inputs=["input1"], + start_time_offset=s1, + end_time_offset=e1, + ), + transcoder_v1.types.EditAtom( + key="atom2", + inputs=["input2"], + start_time_offset=s2, + end_time_offset=e2, + ), + ], + elementary_streams=[ + transcoder_v1.types.ElementaryStream( + key="video-stream0", + video_stream=transcoder_v1.types.VideoStream( + h264=transcoder_v1.types.VideoStream.H264CodecSettings( + height_pixels=360, + width_pixels=640, + bitrate_bps=550000, + frame_rate=60, + ), + ), + ), + transcoder_v1.types.ElementaryStream( + key="audio-stream0", + audio_stream=transcoder_v1.types.AudioStream( + codec="aac", bitrate_bps=64000 + ), + ), + ], + mux_streams=[ + transcoder_v1.types.MuxStream( + key="sd", + container="mp4", + elementary_streams=["video-stream0", "audio-stream0"], + ), + ], + ) + response = client.create_job(parent=parent, job=job) + print(f"Job: {response.name}") + return response + + +# [END transcoder_create_job_with_concatenated_inputs] + +if __name__ == "__main__": + parser = argparse.ArgumentParser() + parser.add_argument("--project_id", help="Your Cloud project ID.", required=True) + parser.add_argument( + "--location", help="The location to start this job in.", default="us-central1", + ) + parser.add_argument( + "--input1_uri", + help="Uri of the first video in the Cloud Storage bucket.", + required=True, + ) + parser.add_argument( + "--start_time_input1", + help="Start time, in fractional seconds ending in 's' (e.g., '1.1s'), " + + "relative to the first input video timeline. Use this field to trim " + + "content from the beginning of the first video.", + required=True, + ) + parser.add_argument( + "--end_time_input1", + help="End time, in fractional seconds ending in 's' (e.g., '9.5s'), " + + "relative to the first input video timeline. Use this field to trim " + + "content from the end of the first video.", + required=True, + ) + parser.add_argument( + "--input2_uri", + help="Uri of the second video in the Cloud Storage bucket.", + required=True, + ) + parser.add_argument( + "--start_time_input2", + help="Start time, in fractional seconds ending in 's' (e.g., '1.1s'), " + + "relative to the second input video timeline. Use this field to trim " + + "content from the beginning of the second video.", + required=True, + ) + parser.add_argument( + "--end_time_input2", + help="End time, in fractional seconds ending in 's' (e.g., '9.5s'), " + + "relative to the second input video timeline. Use this field to trim " + + "content from the end of the second video.", + required=True, + ) + parser.add_argument( + "--output_uri", + help="Uri of the video output folder in the Cloud Storage bucket. " + + "Must end in '/'.", + required=True, + ) + args = parser.parse_args() + create_job_with_concatenated_inputs( + args.project_id, + args.location, + args.input1_uri, + args.start_time_input1, + args.end_time_input1, + args.input2_uri, + args.start_time_input2, + args.end_time_input2, + args.output_uri, + ) diff --git a/samples/snippets/create_job_with_periodic_images_spritesheet.py b/samples/snippets/create_job_with_periodic_images_spritesheet.py new file mode 100644 index 0000000..5028a27 --- /dev/null +++ b/samples/snippets/create_job_with_periodic_images_spritesheet.py @@ -0,0 +1,128 @@ +#!/usr/bin/env python + +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Google Cloud Transcoder sample for creating a job that generates two spritesheets from the input video. Each spritesheet contains images that are captured periodically. + +Example usage: + python create_job_with_periodic_images_spritesheet.py --project_id --location --input_uri --output_uri +""" + +# [START transcoder_create_job_with_periodic_images_spritesheet] + +import argparse + +from google.cloud.video import transcoder_v1 +from google.cloud.video.transcoder_v1.services.transcoder_service import ( + TranscoderServiceClient, +) +from google.protobuf import duration_pb2 as duration + + +def create_job_with_periodic_images_spritesheet( + project_id, location, input_uri, output_uri +): + """Creates a job based on an ad-hoc job configuration that generates two spritesheets. + + Args: + project_id: The GCP project ID. + location: The location to start the job in. + input_uri: Uri of the video in the Cloud Storage bucket. + output_uri: Uri of the video output folder in the Cloud Storage bucket.""" + + client = TranscoderServiceClient() + + parent = f"projects/{project_id}/locations/{location}" + job = transcoder_v1.types.Job() + job.input_uri = input_uri + job.output_uri = output_uri + job.config = transcoder_v1.types.JobConfig( + # Create an ad-hoc job. For more information, see https://cloud.google.com/transcoder/docs/how-to/jobs#create_jobs_ad_hoc. + # See all options for the job config at https://cloud.google.com/transcoder/docs/reference/rest/v1beta1/JobConfig. + elementary_streams=[ + # This section defines the output video stream. + transcoder_v1.types.ElementaryStream( + key="video-stream0", + video_stream=transcoder_v1.types.VideoStream( + h264=transcoder_v1.types.VideoStream.H264CodecSettings( + height_pixels=360, + width_pixels=640, + bitrate_bps=550000, + frame_rate=60, + ), + ), + ), + # This section defines the output audio stream. + transcoder_v1.types.ElementaryStream( + key="audio-stream0", + audio_stream=transcoder_v1.types.AudioStream( + codec="aac", bitrate_bps=64000 + ), + ), + ], + # This section multiplexes the output audio and video together into a container. + mux_streams=[ + transcoder_v1.types.MuxStream( + key="sd", + container="mp4", + elementary_streams=["video-stream0", "audio-stream0"], + ), + ], + # Generate two sprite sheets from the input video into the GCS bucket. For more information, see + # https://cloud.google.com/transcoder/docs/how-to/generate-spritesheet#generate_image_periodically. + sprite_sheets=[ + # Generate a sprite sheet with 64x32px images. An image is taken every 7 seconds from the video. + transcoder_v1.types.SpriteSheet( + file_prefix="small-sprite-sheet", + sprite_width_pixels=64, + sprite_height_pixels=32, + interval=duration.Duration(seconds=7,), + ), + # Generate a sprite sheet with 128x72px images. An image is taken every 7 seconds from the video. + transcoder_v1.types.SpriteSheet( + file_prefix="large-sprite-sheet", + sprite_width_pixels=128, + sprite_height_pixels=72, + interval=duration.Duration(seconds=7,), + ), + ], + ) + response = client.create_job(parent=parent, job=job) + print(f"Job: {response.name}") + return response + + +# [END transcoder_create_job_with_periodic_images_spritesheet] + +if __name__ == "__main__": + parser = argparse.ArgumentParser() + parser.add_argument("--project_id", help="Your Cloud project ID.", required=True) + parser.add_argument( + "--location", help="The location to start this job in.", default="us-central1", + ) + parser.add_argument( + "--input_uri", + help="Uri of the video in the Cloud Storage bucket.", + required=True, + ) + parser.add_argument( + "--output_uri", + help="Uri of the video output folder in the Cloud Storage bucket. Must end in '/'.", + required=True, + ) + args = parser.parse_args() + create_job_with_periodic_images_spritesheet( + args.project_id, args.location, args.input_uri, args.output_uri, + ) diff --git a/samples/snippets/create_job_with_set_number_images_spritesheet.py b/samples/snippets/create_job_with_set_number_images_spritesheet.py new file mode 100644 index 0000000..d416eec --- /dev/null +++ b/samples/snippets/create_job_with_set_number_images_spritesheet.py @@ -0,0 +1,131 @@ +#!/usr/bin/env python + +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Google Cloud Transcoder sample for creating a job that generates two spritesheets from the input video. Each spritesheet contains a set number of images. + +Example usage: + python create_job_with_set_number_images_spritesheet.py --project_id --location --input_uri --output_uri +""" + +# [START transcoder_create_job_with_set_number_images_spritesheet] + +import argparse + +from google.cloud.video import transcoder_v1 +from google.cloud.video.transcoder_v1.services.transcoder_service import ( + TranscoderServiceClient, +) + + +def create_job_with_set_number_images_spritesheet( + project_id, location, input_uri, output_uri +): + """Creates a job based on an ad-hoc job configuration that generates two spritesheets. + + Args: + project_id: The GCP project ID. + location: The location to start the job in. + input_uri: Uri of the video in the Cloud Storage bucket. + output_uri: Uri of the video output folder in the Cloud Storage bucket.""" + + client = TranscoderServiceClient() + + parent = f"projects/{project_id}/locations/{location}" + job = transcoder_v1.types.Job() + job.input_uri = input_uri + job.output_uri = output_uri + job.config = transcoder_v1.types.JobConfig( + # Create an ad-hoc job. For more information, see https://cloud.google.com/transcoder/docs/how-to/jobs#create_jobs_ad_hoc. + # See all options for the job config at https://cloud.google.com/transcoder/docs/reference/rest/v1beta1/JobConfig. + elementary_streams=[ + # This section defines the output video stream. + transcoder_v1.types.ElementaryStream( + key="video-stream0", + video_stream=transcoder_v1.types.VideoStream( + h264=transcoder_v1.types.VideoStream.H264CodecSettings( + height_pixels=360, + width_pixels=640, + bitrate_bps=550000, + frame_rate=60, + ), + ), + ), + # This section defines the output audio stream. + transcoder_v1.types.ElementaryStream( + key="audio-stream0", + audio_stream=transcoder_v1.types.AudioStream( + codec="aac", bitrate_bps=64000 + ), + ), + ], + # This section multiplexes the output audio and video together into a container. + mux_streams=[ + transcoder_v1.types.MuxStream( + key="sd", + container="mp4", + elementary_streams=["video-stream0", "audio-stream0"], + ), + ], + # Generate two sprite sheets from the input video into the GCS bucket. For more information, see + # https://cloud.google.com/transcoder/docs/how-to/generate-spritesheet#generate_set_number_of_images. + sprite_sheets=[ + # Generate a 10x10 sprite sheet with 64x32px images. + transcoder_v1.types.SpriteSheet( + file_prefix="small-sprite-sheet", + sprite_width_pixels=64, + sprite_height_pixels=32, + column_count=10, + row_count=10, + total_count=100, + ), + # Generate a 10x10 sprite sheet with 128x72px images. + transcoder_v1.types.SpriteSheet( + file_prefix="large-sprite-sheet", + sprite_width_pixels=128, + sprite_height_pixels=72, + column_count=10, + row_count=10, + total_count=100, + ), + ], + ) + response = client.create_job(parent=parent, job=job) + print(f"Job: {response.name}") + return response + + +# [END transcoder_create_job_with_set_number_images_spritesheet] + +if __name__ == "__main__": + parser = argparse.ArgumentParser() + parser.add_argument("--project_id", help="Your Cloud project ID.", required=True) + parser.add_argument( + "--location", help="The location to start this job in.", default="us-central1", + ) + parser.add_argument( + "--input_uri", + help="Uri of the video in the Cloud Storage bucket.", + required=True, + ) + parser.add_argument( + "--output_uri", + help="Uri of the video output folder in the Cloud Storage bucket. Must end in '/'.", + required=True, + ) + args = parser.parse_args() + create_job_with_set_number_images_spritesheet( + args.project_id, args.location, args.input_uri, args.output_uri, + ) diff --git a/samples/snippets/create_job_with_static_overlay.py b/samples/snippets/create_job_with_static_overlay.py new file mode 100644 index 0000000..5386a8a --- /dev/null +++ b/samples/snippets/create_job_with_static_overlay.py @@ -0,0 +1,141 @@ +#!/usr/bin/env python + +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Google Cloud Transcoder sample for creating a job based on a supplied job config that includes a static overlay. + +Example usage: + python create_job_with_static_overlay.py --project_id --location --input_uri --overlay_image_uri --output_uri +""" + +# [START transcoder_create_job_with_static_overlay] + +import argparse + +from google.cloud.video import transcoder_v1 +from google.cloud.video.transcoder_v1.services.transcoder_service import ( + TranscoderServiceClient, +) +from google.protobuf import duration_pb2 as duration + + +def create_job_with_static_overlay( + project_id, location, input_uri, overlay_image_uri, output_uri +): + """Creates a job based on an ad-hoc job configuration that includes a static image overlay. + + Args: + project_id: The GCP project ID. + location: The location to start the job in. + input_uri: Uri of the video in the Cloud Storage bucket. + overlay_image_uri: Uri of the JPEG image for the overlay in the Cloud Storage bucket. Must be a JPEG. + output_uri: Uri of the video output folder in the Cloud Storage bucket.""" + + client = TranscoderServiceClient() + + parent = f"projects/{project_id}/locations/{location}" + job = transcoder_v1.types.Job() + job.input_uri = input_uri + job.output_uri = output_uri + job.config = transcoder_v1.types.JobConfig( + elementary_streams=[ + transcoder_v1.types.ElementaryStream( + key="video-stream0", + video_stream=transcoder_v1.types.VideoStream( + h264=transcoder_v1.types.VideoStream.H264CodecSettings( + height_pixels=360, + width_pixels=640, + bitrate_bps=550000, + frame_rate=60, + ), + ), + ), + transcoder_v1.types.ElementaryStream( + key="audio-stream0", + audio_stream=transcoder_v1.types.AudioStream( + codec="aac", bitrate_bps=64000 + ), + ), + ], + mux_streams=[ + transcoder_v1.types.MuxStream( + key="sd", + container="mp4", + elementary_streams=["video-stream0", "audio-stream0"], + ), + ], + overlays=[ + transcoder_v1.types.Overlay( + image=transcoder_v1.types.Overlay.Image( + uri=overlay_image_uri, + resolution=transcoder_v1.types.Overlay.NormalizedCoordinate( + x=1, y=0.5, + ), + alpha=1, + ), + animations=[ + transcoder_v1.types.Overlay.Animation( + animation_static=transcoder_v1.types.Overlay.AnimationStatic( + xy=transcoder_v1.types.Overlay.NormalizedCoordinate( + x=0, y=0, + ), + start_time_offset=duration.Duration(seconds=0,), + ), + ), + transcoder_v1.types.Overlay.Animation( + animation_end=transcoder_v1.types.Overlay.AnimationEnd( + start_time_offset=duration.Duration(seconds=10,), + ), + ), + ], + ), + ], + ) + response = client.create_job(parent=parent, job=job) + print(f"Job: {response.name}") + return response + + +# [END transcoder_create_job_with_static_overlay] + +if __name__ == "__main__": + parser = argparse.ArgumentParser() + parser.add_argument("--project_id", help="Your Cloud project ID.", required=True) + parser.add_argument( + "--location", help="The location to start this job in.", default="us-central1", + ) + parser.add_argument( + "--input_uri", + help="Uri of the video in the Cloud Storage bucket.", + required=True, + ) + parser.add_argument( + "--overlay_image_uri", + help="Uri of the overlay JPEG image in the Cloud Storage bucket. Must be a JPEG.", + required=True, + ) + parser.add_argument( + "--output_uri", + help="Uri of the video output folder in the Cloud Storage bucket. Must end in '/'.", + required=True, + ) + args = parser.parse_args() + create_job_with_static_overlay( + args.project_id, + args.location, + args.input_uri, + args.overlay_image_uri, + args.output_uri, + ) diff --git a/samples/snippets/delete_job.py b/samples/snippets/delete_job.py new file mode 100644 index 0000000..5f139f9 --- /dev/null +++ b/samples/snippets/delete_job.py @@ -0,0 +1,56 @@ +#!/usr/bin/env python + +# Copyright 2020 Google Inc. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Google Cloud Transcoder sample for deleting a job. + +Example usage: + python delete_job.py --project_id --location --job_id +""" + +# [START transcoder_delete_job] + +import argparse + +from google.cloud.video.transcoder_v1.services.transcoder_service import ( + TranscoderServiceClient, +) + + +def delete_job(project_id, location, job_id): + """Gets a job. + + Args: + project_id: The GCP project ID. + location: The location this job is in. + job_id: The job ID.""" + + client = TranscoderServiceClient() + + name = f"projects/{project_id}/locations/{location}/jobs/{job_id}" + response = client.delete_job(name=name) + print("Deleted job") + return response + + +# [END transcoder_delete_job] + +if __name__ == "__main__": + parser = argparse.ArgumentParser() + parser.add_argument("--project_id", help="Your Cloud project ID.", required=True) + parser.add_argument("--location", help="The location of the job.", required=True) + parser.add_argument("--job_id", help="The job ID.", required=True) + args = parser.parse_args() + delete_job(args.project_id, args.location, args.job_id) diff --git a/samples/snippets/delete_job_template.py b/samples/snippets/delete_job_template.py new file mode 100644 index 0000000..ccce70f --- /dev/null +++ b/samples/snippets/delete_job_template.py @@ -0,0 +1,58 @@ +#!/usr/bin/env python + +# Copyright 2020 Google Inc. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Google Cloud Transcoder sample for deleting a job template. + +Example usage: + python delete_job_template.py --project_id --location --template_id +""" + +# [START transcoder_delete_job_template] + +import argparse + +from google.cloud.video.transcoder_v1.services.transcoder_service import ( + TranscoderServiceClient, +) + + +def delete_job_template(project_id, location, template_id): + """Deletes a job template. + + Args: + project_id: The GCP project ID. + location: The location of the template. + template_id: The user-defined template ID.""" + + client = TranscoderServiceClient() + + name = f"projects/{project_id}/locations/{location}/jobTemplates/{template_id}" + response = client.delete_job_template(name=name) + print("Deleted job template") + return response + + +# [END transcoder_delete_job_template] + +if __name__ == "__main__": + parser = argparse.ArgumentParser() + parser.add_argument("--project_id", help="Your Cloud project ID.", required=True) + parser.add_argument( + "--location", help="The location of the template.", required=True + ) + parser.add_argument("--template_id", help="The job template ID.", required=True) + args = parser.parse_args() + delete_job_template(args.project_id, args.location, args.template_id) diff --git a/samples/snippets/get_job.py b/samples/snippets/get_job.py new file mode 100644 index 0000000..ec5d7f1 --- /dev/null +++ b/samples/snippets/get_job.py @@ -0,0 +1,57 @@ +#!/usr/bin/env python + +# Copyright 2020 Google Inc. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Google Cloud Transcoder sample for getting the details for a job. + +Example usage: + python get_job.py --project_id --location --job_id +""" + +# [START transcoder_get_job] + +import argparse + +from google.cloud.video.transcoder_v1.services.transcoder_service import ( + TranscoderServiceClient, +) + + +def get_job(project_id, location, job_id): + """Gets a job. + + Args: + project_id: The GCP project ID. + location: The location this job is in. + job_id: The job ID.""" + + client = TranscoderServiceClient() + + name = f"projects/{project_id}/locations/{location}/jobs/{job_id}" + response = client.get_job(name=name) + print(f"Job: {response.name}") + return response + + +# [END transcoder_get_job] + + +if __name__ == "__main__": + parser = argparse.ArgumentParser() + parser.add_argument("--project_id", help="Your Cloud project ID.", required=True) + parser.add_argument("--location", help="The location of the job.", required=True) + parser.add_argument("--job_id", help="The job ID.", required=True) + args = parser.parse_args() + get_job(args.project_id, args.location, args.job_id) diff --git a/samples/snippets/get_job_state.py b/samples/snippets/get_job_state.py new file mode 100644 index 0000000..6b73acf --- /dev/null +++ b/samples/snippets/get_job_state.py @@ -0,0 +1,58 @@ +#!/usr/bin/env python + +# Copyright 2020 Google Inc. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Google Cloud Transcoder sample for getting the state for a job. + +Example usage: + python get_job_state.py --project_id --location --job_id +""" + +# [START transcoder_get_job_state] + +import argparse + +from google.cloud.video.transcoder_v1.services.transcoder_service import ( + TranscoderServiceClient, +) + + +def get_job_state(project_id, location, job_id): + """Gets a job's state. + + Args: + project_id: The GCP project ID. + location: The location this job is in. + job_id: The job ID.""" + + client = TranscoderServiceClient() + + name = f"projects/{project_id}/locations/{location}/jobs/{job_id}" + response = client.get_job(name=name) + + print(f"Job state: {str(response.state)}") + return response + + +# [END transcoder_get_job_state] + + +if __name__ == "__main__": + parser = argparse.ArgumentParser() + parser.add_argument("--project_id", help="Your Cloud project ID.", required=True) + parser.add_argument("--location", help="The location of the job.", required=True) + parser.add_argument("--job_id", help="The job ID.", required=True) + args = parser.parse_args() + get_job_state(args.project_id, args.location, args.job_id) diff --git a/samples/snippets/get_job_template.py b/samples/snippets/get_job_template.py new file mode 100644 index 0000000..4d6ccf7 --- /dev/null +++ b/samples/snippets/get_job_template.py @@ -0,0 +1,58 @@ +#!/usr/bin/env python + +# Copyright 2020 Google Inc. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Google Cloud Transcoder sample for getting a job template. + +Example usage: + python get_job_template.py --project_id --location --template_id +""" + +# [START transcoder_get_job_template] + +import argparse + +from google.cloud.video.transcoder_v1.services.transcoder_service import ( + TranscoderServiceClient, +) + + +def get_job_template(project_id, location, template_id): + """Gets a job template. + + Args: + project_id: The GCP project ID. + location: The location of the template. + template_id: The user-defined template ID.""" + + client = TranscoderServiceClient() + + name = f"projects/{project_id}/locations/{location}/jobTemplates/{template_id}" + response = client.get_job_template(name=name) + print(f"Job template: {response.name}") + return response + + +# [END transcoder_get_job_template] + +if __name__ == "__main__": + parser = argparse.ArgumentParser() + parser.add_argument("--project_id", help="Your Cloud project ID.", required=True) + parser.add_argument( + "--location", help="The location of the template.", required=True + ) + parser.add_argument("--template_id", help="The job template ID.", required=True) + args = parser.parse_args() + get_job_template(args.project_id, args.location, args.template_id) diff --git a/samples/snippets/job_template_test.py b/samples/snippets/job_template_test.py new file mode 100644 index 0000000..259595a --- /dev/null +++ b/samples/snippets/job_template_test.py @@ -0,0 +1,60 @@ +# Copyright 2020 Google Inc. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import os +import uuid + +from google.api_core.exceptions import NotFound + +import create_job_template +import delete_job_template +import get_job_template +import list_job_templates + +location = "us-central1" +project_id = os.environ["GOOGLE_CLOUD_PROJECT"] +project_number = os.environ["GOOGLE_CLOUD_PROJECT_NUMBER"] +template_id = f"my-python-test-template-{uuid.uuid4()}" + + +def test_template_operations(capsys): + + # Enable the following API on the test project: + # * Transcoder API + + job_template_name = ( + f"projects/{project_number}/locations/{location}/jobTemplates/{template_id}" + ) + + try: + delete_job_template.delete_job_template(project_id, location, template_id) + except NotFound as e: + print(f"Ignoring NotFound, details: {e}") + out, _ = capsys.readouterr() + + create_job_template.create_job_template(project_id, location, template_id) + out, _ = capsys.readouterr() + assert job_template_name in out + + get_job_template.get_job_template(project_id, location, template_id) + out, _ = capsys.readouterr() + assert job_template_name in out + + list_job_templates.list_job_templates(project_id, location) + out, _ = capsys.readouterr() + assert job_template_name in out + + delete_job_template.delete_job_template(project_id, location, template_id) + out, _ = capsys.readouterr() + assert "Deleted job template" in out diff --git a/samples/snippets/job_test.py b/samples/snippets/job_test.py new file mode 100644 index 0000000..2d9a6ba --- /dev/null +++ b/samples/snippets/job_test.py @@ -0,0 +1,410 @@ +# Copyright 2020 Google Inc. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import os +import time +import uuid + +import backoff +from google.cloud import storage +from googleapiclient.errors import HttpError +import pytest + +import create_job_from_ad_hoc +import create_job_from_preset +import create_job_from_template +import create_job_template +import create_job_with_animated_overlay +import create_job_with_concatenated_inputs +import create_job_with_periodic_images_spritesheet +import create_job_with_set_number_images_spritesheet +import create_job_with_static_overlay +import delete_job +import delete_job_template +import get_job +import get_job_state +import list_jobs + +location = "us-central1" +project_id = os.environ["GOOGLE_CLOUD_PROJECT"] +project_number = os.environ["GOOGLE_CLOUD_PROJECT_NUMBER"] +template_id = f"my-python-test-template-{uuid.uuid4()}" + +input_bucket_name = "cloud-samples-data/media/" +output_bucket_name = f"python-samples-transcoder-{uuid.uuid4()}" +test_video_file_name = "ChromeCast.mp4" +test_overlay_image_file_name = "overlay.jpg" +test_concat1_file_name = "ForBiggerEscapes.mp4" +test_concat2_file_name = "ForBiggerJoyrides.mp4" + +input_uri = f"gs://{input_bucket_name}{test_video_file_name}" +overlay_image_uri = f"gs://{input_bucket_name}{test_overlay_image_file_name}" +concat1_uri = f"gs://{input_bucket_name}{test_concat1_file_name}" +concat2_uri = f"gs://{input_bucket_name}{test_concat2_file_name}" +output_uri_for_preset = f"gs://{output_bucket_name}/test-output-preset/" +output_uri_for_template = f"gs://{output_bucket_name}/test-output-template/" +output_uri_for_adhoc = f"gs://{output_bucket_name}/test-output-adhoc/" +output_uri_for_static_overlay = f"gs://{output_bucket_name}/test-output-static-overlay/" +output_uri_for_animated_overlay = ( + f"gs://{output_bucket_name}/test-output-animated-overlay/" +) +small_spritesheet_file_prefix = "small-sprite-sheet" +large_spritesheet_file_prefix = "large-sprite-sheet" +spritesheet_file_suffix = "0000000000.jpeg" + +output_dir_for_set_number_spritesheet = "test-output-set-number-spritesheet/" +output_uri_for_set_number_spritesheet = ( + f"gs://{output_bucket_name}/{output_dir_for_set_number_spritesheet}" +) +output_dir_for_periodic_spritesheet = "test-output-periodic-spritesheet/" +output_uri_for_periodic_spritesheet = ( + f"gs://{output_bucket_name}/{output_dir_for_periodic_spritesheet}" +) +output_uri_for_concat = f"gs://{output_bucket_name}/test-output-concat/" + +preset = "preset/web-hd" +job_succeeded_state = "ProcessingState.SUCCEEDED" + + +@pytest.fixture(scope="module") +def test_bucket(): + storage_client = storage.Client() + bucket = storage_client.create_bucket(output_bucket_name) + + yield bucket + bucket.delete(force=True) + + +def test_create_job_from_preset(capsys, test_bucket): + create_job_from_preset.create_job_from_preset( + project_id, location, input_uri, output_uri_for_preset, preset + ) + out, _ = capsys.readouterr() + job_name_prefix = f"projects/{project_number}/locations/{location}/jobs/" + assert job_name_prefix in out + + str_slice = out.split("/") + job_id = str_slice[len(str_slice) - 1].rstrip("\n") + job_name = f"projects/{project_number}/locations/{location}/jobs/{job_id}" + assert job_name in out + + get_job.get_job(project_id, location, job_id) + out, _ = capsys.readouterr() + assert job_name in out + + time.sleep(30) + + _assert_job_state_succeeded(capsys, job_id) + + list_jobs.list_jobs(project_id, location) + out, _ = capsys.readouterr() + assert job_name in out + + delete_job.delete_job(project_id, location, job_id) + out, _ = capsys.readouterr() + assert "Deleted job" in out + + +def test_create_job_from_template(capsys, test_bucket): + + job_template_name = ( + f"projects/{project_number}/locations/{location}/jobTemplates/{template_id}" + ) + + create_job_template.create_job_template(project_id, location, template_id) + out, _ = capsys.readouterr() + assert job_template_name in out + + create_job_from_template.create_job_from_template( + project_id, location, input_uri, output_uri_for_template, template_id + ) + out, _ = capsys.readouterr() + job_name_prefix = f"projects/{project_number}/locations/{location}/jobs/" + assert job_name_prefix in out + + str_slice = out.split("/") + job_id = str_slice[len(str_slice) - 1].rstrip("\n") + job_name = f"projects/{project_number}/locations/{location}/jobs/{job_id}" + assert job_name in out + + get_job.get_job(project_id, location, job_id) + out, _ = capsys.readouterr() + assert job_name in out + + time.sleep(30) + + _assert_job_state_succeeded(capsys, job_id) + + list_jobs.list_jobs(project_id, location) + out, _ = capsys.readouterr() + assert job_name in out + + delete_job.delete_job(project_id, location, job_id) + out, _ = capsys.readouterr() + assert "Deleted job" in out + + delete_job_template.delete_job_template(project_id, location, template_id) + out, _ = capsys.readouterr() + assert "Deleted job template" in out + + +def test_create_job_from_ad_hoc(capsys, test_bucket): + create_job_from_ad_hoc.create_job_from_ad_hoc( + project_id, location, input_uri, output_uri_for_adhoc + ) + out, _ = capsys.readouterr() + job_name_prefix = f"projects/{project_number}/locations/{location}/jobs/" + assert job_name_prefix in out + + str_slice = out.split("/") + job_id = str_slice[len(str_slice) - 1].rstrip("\n") + job_name = f"projects/{project_number}/locations/{location}/jobs/{job_id}" + assert job_name in out + + get_job.get_job(project_id, location, job_id) + out, _ = capsys.readouterr() + assert job_name in out + + time.sleep(30) + + _assert_job_state_succeeded(capsys, job_id) + + list_jobs.list_jobs(project_id, location) + out, _ = capsys.readouterr() + assert job_name in out + + delete_job.delete_job(project_id, location, job_id) + out, _ = capsys.readouterr() + assert "Deleted job" in out + + +def test_create_job_with_static_overlay(capsys, test_bucket): + create_job_with_static_overlay.create_job_with_static_overlay( + project_id, + location, + input_uri, + overlay_image_uri, + output_uri_for_static_overlay, + ) + out, _ = capsys.readouterr() + job_name_prefix = f"projects/{project_number}/locations/{location}/jobs/" + assert job_name_prefix in out + + str_slice = out.split("/") + job_id = str_slice[len(str_slice) - 1].rstrip("\n") + job_name = f"projects/{project_number}/locations/{location}/jobs/{job_id}" + assert job_name in out + + get_job.get_job(project_id, location, job_id) + out, _ = capsys.readouterr() + assert job_name in out + + time.sleep(30) + + _assert_job_state_succeeded(capsys, job_id) + + list_jobs.list_jobs(project_id, location) + out, _ = capsys.readouterr() + assert job_name in out + + delete_job.delete_job(project_id, location, job_id) + out, _ = capsys.readouterr() + assert "Deleted job" in out + + +def test_create_job_with_animated_overlay(capsys, test_bucket): + create_job_with_animated_overlay.create_job_with_animated_overlay( + project_id, + location, + input_uri, + overlay_image_uri, + output_uri_for_animated_overlay, + ) + out, _ = capsys.readouterr() + job_name_prefix = f"projects/{project_number}/locations/{location}/jobs/" + assert job_name_prefix in out + + str_slice = out.split("/") + job_id = str_slice[len(str_slice) - 1].rstrip("\n") + job_name = f"projects/{project_number}/locations/{location}/jobs/{job_id}" + assert job_name in out + + get_job.get_job(project_id, location, job_id) + out, _ = capsys.readouterr() + assert job_name in out + + time.sleep(30) + + _assert_job_state_succeeded(capsys, job_id) + + list_jobs.list_jobs(project_id, location) + out, _ = capsys.readouterr() + assert job_name in out + + delete_job.delete_job(project_id, location, job_id) + out, _ = capsys.readouterr() + assert "Deleted job" in out + + +def test_create_job_with_set_number_spritesheet(capsys, test_bucket): + create_job_with_set_number_images_spritesheet.create_job_with_set_number_images_spritesheet( + project_id, location, input_uri, output_uri_for_set_number_spritesheet, + ) + out, _ = capsys.readouterr() + job_name_prefix = f"projects/{project_number}/locations/{location}/jobs/" + assert job_name_prefix in out + + str_slice = out.split("/") + job_id = str_slice[len(str_slice) - 1].rstrip("\n") + job_name = f"projects/{project_number}/locations/{location}/jobs/{job_id}" + assert job_name in out + + get_job.get_job(project_id, location, job_id) + out, _ = capsys.readouterr() + assert ( + job_name in out + ) # Get the job name so you can use it later to get the job and delete the job. + + time.sleep( + 30 + ) # Transcoding jobs need time to complete. Once the job completes, check the job state. + + _assert_job_state_succeeded(capsys, job_id) + _assert_file_in_bucket( + capsys, + test_bucket, + output_dir_for_set_number_spritesheet + + small_spritesheet_file_prefix + + spritesheet_file_suffix, + ) + _assert_file_in_bucket( + capsys, + test_bucket, + output_dir_for_set_number_spritesheet + + large_spritesheet_file_prefix + + spritesheet_file_suffix, + ) + + list_jobs.list_jobs(project_id, location) + out, _ = capsys.readouterr() + assert job_name in out + + delete_job.delete_job(project_id, location, job_id) + out, _ = capsys.readouterr() + assert "Deleted job" in out + + +def test_create_job_with_periodic_spritesheet(capsys, test_bucket): + create_job_with_periodic_images_spritesheet.create_job_with_periodic_images_spritesheet( + project_id, location, input_uri, output_uri_for_periodic_spritesheet, + ) + out, _ = capsys.readouterr() + job_name_prefix = f"projects/{project_number}/locations/{location}/jobs/" + assert job_name_prefix in out + + str_slice = out.split("/") + job_id = str_slice[len(str_slice) - 1].rstrip("\n") + job_name = f"projects/{project_number}/locations/{location}/jobs/{job_id}" + assert job_name in out + + get_job.get_job(project_id, location, job_id) + out, _ = capsys.readouterr() + assert ( + job_name in out + ) # Get the job name so you can use it later to get the job and delete the job. + + time.sleep( + 30 + ) # Transcoding jobs need time to complete. Once the job completes, check the job state. + + _assert_job_state_succeeded(capsys, job_id) + _assert_file_in_bucket( + capsys, + test_bucket, + output_dir_for_periodic_spritesheet + + small_spritesheet_file_prefix + + spritesheet_file_suffix, + ) + _assert_file_in_bucket( + capsys, + test_bucket, + output_dir_for_periodic_spritesheet + + large_spritesheet_file_prefix + + spritesheet_file_suffix, + ) + + list_jobs.list_jobs(project_id, location) + out, _ = capsys.readouterr() + assert job_name in out + + delete_job.delete_job(project_id, location, job_id) + out, _ = capsys.readouterr() + assert "Deleted job" in out + + +def test_create_job_with_concatenated_inputs(capsys, test_bucket): + create_job_with_concatenated_inputs.create_job_with_concatenated_inputs( + project_id, + location, + concat1_uri, + "0s", + "8.1s", + concat2_uri, + "3.5s", + "15s", + output_uri_for_concat, + ) + out, _ = capsys.readouterr() + job_name_prefix = f"projects/{project_number}/locations/{location}/jobs/" + assert job_name_prefix in out + + str_slice = out.split("/") + job_id = str_slice[len(str_slice) - 1].rstrip("\n") + job_name = f"projects/{project_number}/locations/{location}/jobs/{job_id}" + assert job_name in out + + get_job.get_job(project_id, location, job_id) + out, _ = capsys.readouterr() + assert job_name in out + + time.sleep( + 30 + ) # Transcoding jobs need time to complete. Once the job completes, check the job state. + + _assert_job_state_succeeded(capsys, job_id) + + list_jobs.list_jobs(project_id, location) + out, _ = capsys.readouterr() + assert job_name in out + + delete_job.delete_job(project_id, location, job_id) + out, _ = capsys.readouterr() + assert "Deleted job" in out + + +# Retrying up to 10 mins. +@backoff.on_exception(backoff.expo, AssertionError, max_time=600) +def _assert_job_state_succeeded(capsys, job_id): + try: + get_job_state.get_job_state(project_id, location, job_id) + except HttpError as err: + raise AssertionError(f"Could not get job state: {err.resp.status}") + + out, _ = capsys.readouterr() + assert job_succeeded_state in out + + +def _assert_file_in_bucket(capsys, test_bucket, directory_and_filename): + blob = test_bucket.blob(directory_and_filename) + assert blob.exists() diff --git a/samples/snippets/list_job_templates.py b/samples/snippets/list_job_templates.py new file mode 100644 index 0000000..020f7a3 --- /dev/null +++ b/samples/snippets/list_job_templates.py @@ -0,0 +1,59 @@ +#!/usr/bin/env python + +# Copyright 2020 Google Inc. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Google Cloud Transcoder sample for listing job templates in a location. + +Example usage: + python list_job_templates.py --project_id --location +""" + +# [START transcoder_list_job_templates] + +import argparse + +from google.cloud.video.transcoder_v1.services.transcoder_service import ( + TranscoderServiceClient, +) + + +def list_job_templates(project_id, location): + """Lists all job templates in a location. + + Args: + project_id: The GCP project ID. + location: The location of the templates.""" + + client = TranscoderServiceClient() + + parent = f"projects/{project_id}/locations/{location}" + response = client.list_job_templates(parent=parent) + print("Job templates:") + for jobTemplate in response.job_templates: + print({jobTemplate.name}) + + return response + + +# [END transcoder_list_job_templates] + +if __name__ == "__main__": + parser = argparse.ArgumentParser() + parser.add_argument("--project_id", help="Your Cloud project ID.", required=True) + parser.add_argument( + "--location", help="The location of the templates.", required=True + ) + args = parser.parse_args() + list_job_templates(args.project_id, args.location) diff --git a/samples/snippets/list_jobs.py b/samples/snippets/list_jobs.py new file mode 100644 index 0000000..cf1fdbd --- /dev/null +++ b/samples/snippets/list_jobs.py @@ -0,0 +1,57 @@ +#!/usr/bin/env python + +# Copyright 2020 Google Inc. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Google Cloud Transcoder sample for listing jobs in a location. + +Example usage: + python list_jobs.py --project_id --location +""" + +# [START transcoder_list_jobs] + +import argparse + +from google.cloud.video.transcoder_v1.services.transcoder_service import ( + TranscoderServiceClient, +) + + +def list_jobs(project_id, location): + """Lists all jobs in a location. + + Args: + project_id: The GCP project ID. + location: The location of the jobs.""" + + client = TranscoderServiceClient() + + parent = f"projects/{project_id}/locations/{location}" + response = client.list_jobs(parent=parent) + print("Jobs:") + for job in response.jobs: + print({job.name}) + + return response + + +# [END transcoder_list_jobs] + +if __name__ == "__main__": + parser = argparse.ArgumentParser() + parser.add_argument("--project_id", help="Your Cloud project ID.", required=True) + parser.add_argument("--location", help="The location of the jobs.", required=True) + args = parser.parse_args() + list_jobs(args.project_id, args.location) diff --git a/samples/snippets/noxfile.py b/samples/snippets/noxfile.py new file mode 100644 index 0000000..3bbef5d --- /dev/null +++ b/samples/snippets/noxfile.py @@ -0,0 +1,278 @@ +# Copyright 2019 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import print_function + +import glob +import os +from pathlib import Path +import sys +from typing import Callable, Dict, List, Optional + +import nox + + +# WARNING - WARNING - WARNING - WARNING - WARNING +# WARNING - WARNING - WARNING - WARNING - WARNING +# DO NOT EDIT THIS FILE EVER! +# WARNING - WARNING - WARNING - WARNING - WARNING +# WARNING - WARNING - WARNING - WARNING - WARNING + +BLACK_VERSION = "black==19.10b0" + +# Copy `noxfile_config.py` to your directory and modify it instead. + +# `TEST_CONFIG` dict is a configuration hook that allows users to +# modify the test configurations. The values here should be in sync +# with `noxfile_config.py`. Users will copy `noxfile_config.py` into +# their directory and modify it. + +TEST_CONFIG = { + # You can opt out from the test for specific Python versions. + "ignored_versions": [], + # Old samples are opted out of enforcing Python type hints + # All new samples should feature them + "enforce_type_hints": False, + # An envvar key for determining the project id to use. Change it + # to 'BUILD_SPECIFIC_GCLOUD_PROJECT' if you want to opt in using a + # build specific Cloud project. You can also use your own string + # to use your own Cloud project. + "gcloud_project_env": "GOOGLE_CLOUD_PROJECT", + # 'gcloud_project_env': 'BUILD_SPECIFIC_GCLOUD_PROJECT', + # If you need to use a specific version of pip, + # change pip_version_override to the string representation + # of the version number, for example, "20.2.4" + "pip_version_override": None, + # A dictionary you want to inject into your test. Don't put any + # secrets here. These values will override predefined values. + "envs": {}, +} + + +try: + # Ensure we can import noxfile_config in the project's directory. + sys.path.append(".") + from noxfile_config import TEST_CONFIG_OVERRIDE +except ImportError as e: + print("No user noxfile_config found: detail: {}".format(e)) + TEST_CONFIG_OVERRIDE = {} + +# Update the TEST_CONFIG with the user supplied values. +TEST_CONFIG.update(TEST_CONFIG_OVERRIDE) + + +def get_pytest_env_vars() -> Dict[str, str]: + """Returns a dict for pytest invocation.""" + ret = {} + + # Override the GCLOUD_PROJECT and the alias. + env_key = TEST_CONFIG["gcloud_project_env"] + # This should error out if not set. + ret["GOOGLE_CLOUD_PROJECT"] = os.environ[env_key] + + # Apply user supplied envs. + ret.update(TEST_CONFIG["envs"]) + return ret + + +# DO NOT EDIT - automatically generated. +# All versions used to test samples. +ALL_VERSIONS = ["3.6", "3.7", "3.8", "3.9", "3.10"] + +# Any default versions that should be ignored. +IGNORED_VERSIONS = TEST_CONFIG["ignored_versions"] + +TESTED_VERSIONS = sorted([v for v in ALL_VERSIONS if v not in IGNORED_VERSIONS]) + +INSTALL_LIBRARY_FROM_SOURCE = os.environ.get("INSTALL_LIBRARY_FROM_SOURCE", False) in ( + "True", + "true", +) + +# Error if a python version is missing +nox.options.error_on_missing_interpreters = True + +# +# Style Checks +# + + +def _determine_local_import_names(start_dir: str) -> List[str]: + """Determines all import names that should be considered "local". + + This is used when running the linter to insure that import order is + properly checked. + """ + file_ext_pairs = [os.path.splitext(path) for path in os.listdir(start_dir)] + return [ + basename + for basename, extension in file_ext_pairs + if extension == ".py" + or os.path.isdir(os.path.join(start_dir, basename)) + and basename not in ("__pycache__") + ] + + +# Linting with flake8. +# +# We ignore the following rules: +# E203: whitespace before ‘:’ +# E266: too many leading ‘#’ for block comment +# E501: line too long +# I202: Additional newline in a section of imports +# +# We also need to specify the rules which are ignored by default: +# ['E226', 'W504', 'E126', 'E123', 'W503', 'E24', 'E704', 'E121'] +FLAKE8_COMMON_ARGS = [ + "--show-source", + "--builtin=gettext", + "--max-complexity=20", + "--import-order-style=google", + "--exclude=.nox,.cache,env,lib,generated_pb2,*_pb2.py,*_pb2_grpc.py", + "--ignore=E121,E123,E126,E203,E226,E24,E266,E501,E704,W503,W504,I202", + "--max-line-length=88", +] + + +@nox.session +def lint(session: nox.sessions.Session) -> None: + if not TEST_CONFIG["enforce_type_hints"]: + session.install("flake8", "flake8-import-order") + else: + session.install("flake8", "flake8-import-order", "flake8-annotations") + + local_names = _determine_local_import_names(".") + args = FLAKE8_COMMON_ARGS + [ + "--application-import-names", + ",".join(local_names), + ".", + ] + session.run("flake8", *args) + + +# +# Black +# + + +@nox.session +def blacken(session: nox.sessions.Session) -> None: + session.install(BLACK_VERSION) + python_files = [path for path in os.listdir(".") if path.endswith(".py")] + + session.run("black", *python_files) + + +# +# Sample Tests +# + + +PYTEST_COMMON_ARGS = ["--junitxml=sponge_log.xml"] + + +def _session_tests( + session: nox.sessions.Session, post_install: Callable = None +) -> None: + # check for presence of tests + test_list = glob.glob("*_test.py") + glob.glob("test_*.py") + if len(test_list) == 0: + print("No tests found, skipping directory.") + else: + if TEST_CONFIG["pip_version_override"]: + pip_version = TEST_CONFIG["pip_version_override"] + session.install(f"pip=={pip_version}") + """Runs py.test for a particular project.""" + if os.path.exists("requirements.txt"): + if os.path.exists("constraints.txt"): + session.install("-r", "requirements.txt", "-c", "constraints.txt") + else: + session.install("-r", "requirements.txt") + + if os.path.exists("requirements-test.txt"): + if os.path.exists("constraints-test.txt"): + session.install( + "-r", "requirements-test.txt", "-c", "constraints-test.txt" + ) + else: + session.install("-r", "requirements-test.txt") + + if INSTALL_LIBRARY_FROM_SOURCE: + session.install("-e", _get_repo_root()) + + if post_install: + post_install(session) + + session.run( + "pytest", + *(PYTEST_COMMON_ARGS + session.posargs), + # Pytest will return 5 when no tests are collected. This can happen + # on travis where slow and flaky tests are excluded. + # See http://doc.pytest.org/en/latest/_modules/_pytest/main.html + success_codes=[0, 5], + env=get_pytest_env_vars(), + ) + + +@nox.session(python=ALL_VERSIONS) +def py(session: nox.sessions.Session) -> None: + """Runs py.test for a sample using the specified version of Python.""" + if session.python in TESTED_VERSIONS: + _session_tests(session) + else: + session.skip( + "SKIPPED: {} tests are disabled for this sample.".format(session.python) + ) + + +# +# Readmegen +# + + +def _get_repo_root() -> Optional[str]: + """ Returns the root folder of the project. """ + # Get root of this repository. Assume we don't have directories nested deeper than 10 items. + p = Path(os.getcwd()) + for i in range(10): + if p is None: + break + if Path(p / ".git").exists(): + return str(p) + # .git is not available in repos cloned via Cloud Build + # setup.py is always in the library's root, so use that instead + # https://github.com/googleapis/synthtool/issues/792 + if Path(p / "setup.py").exists(): + return str(p) + p = p.parent + raise Exception("Unable to detect repository root.") + + +GENERATED_READMES = sorted([x for x in Path(".").rglob("*.rst.in")]) + + +@nox.session +@nox.parametrize("path", GENERATED_READMES) +def readmegen(session: nox.sessions.Session, path: str) -> None: + """(Re-)generates the readme for a sample.""" + session.install("jinja2", "pyyaml") + dir_ = os.path.dirname(path) + + if os.path.exists(os.path.join(dir_, "requirements.txt")): + session.install("-r", os.path.join(dir_, "requirements.txt")) + + in_file = os.path.join(dir_, "README.rst.in") + session.run( + "python", _get_repo_root() + "/scripts/readme-gen/readme_gen.py", in_file + ) diff --git a/samples/snippets/requirements-test.txt b/samples/snippets/requirements-test.txt new file mode 100644 index 0000000..52c6682 --- /dev/null +++ b/samples/snippets/requirements-test.txt @@ -0,0 +1,3 @@ +backoff==1.11.1 +google-cloud-storage==1.43.0 +pytest==6.2.4 diff --git a/samples/snippets/requirements.txt b/samples/snippets/requirements.txt new file mode 100644 index 0000000..7011d0b --- /dev/null +++ b/samples/snippets/requirements.txt @@ -0,0 +1,3 @@ +google-api-python-client==2.34.0 +grpcio==1.43.0 +google-cloud-video-transcoder==1.2.1 From 9e91383c2f9f20dcbb77b56a5c1e3159f0275ca4 Mon Sep 17 00:00:00 2001 From: nicain Date: Mon, 24 Jan 2022 13:52:20 -0800 Subject: [PATCH 10/20] chore: set cdpe-cloudai as codeowner (#128) --- .github/CODEOWNERS | 8 ++++---- .repo-metadata.json | 2 +- samples/snippets/noxfile.py | 1 + 3 files changed, 6 insertions(+), 5 deletions(-) diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS index e446644..62aced9 100644 --- a/.github/CODEOWNERS +++ b/.github/CODEOWNERS @@ -5,8 +5,8 @@ # https://help.github.com/en/github/creating-cloning-and-archiving-repositories/about-code-owners#codeowners-syntax # Note: This file is autogenerated. To make changes to the codeowner team, please update .repo-metadata.json. -# @googleapis/yoshi-python is the default owner for changes in this repo -* @googleapis/yoshi-python +# @googleapis/yoshi-python @googleapis/cdpe-cloudai are the default owners for changes in this repo +* @googleapis/yoshi-python @googleapis/cdpe-cloudai -# @googleapis/python-samples-reviewers is the default owner for samples changes -/samples/ @googleapis/python-samples-reviewers +# @googleapis/python-samples-reviewers @googleapis/cdpe-cloudai are the default owners for samples changes +/samples/ @googleapis/python-samples-reviewers @googleapis/cdpe-cloudai diff --git a/.repo-metadata.json b/.repo-metadata.json index 3e20e91..afb76d0 100644 --- a/.repo-metadata.json +++ b/.repo-metadata.json @@ -11,6 +11,6 @@ "distribution_name": "google-cloud-video-transcoder", "api_id": "transcoder.googleapis.com", "default_version": "v1", - "codeowner_team": "", + "codeowner_team": "@googleapis/cdpe-cloudai", "api_shortname": "transcoder" } diff --git a/samples/snippets/noxfile.py b/samples/snippets/noxfile.py index 3bbef5d..20cdfc6 100644 --- a/samples/snippets/noxfile.py +++ b/samples/snippets/noxfile.py @@ -187,6 +187,7 @@ def _session_tests( ) -> None: # check for presence of tests test_list = glob.glob("*_test.py") + glob.glob("test_*.py") + test_list.extend(glob.glob("tests")) if len(test_list) == 0: print("No tests found, skipping directory.") else: From 0ae071f61c193655445d94e86480b7a9028260f6 Mon Sep 17 00:00:00 2001 From: Nicholas Cook Date: Mon, 24 Jan 2022 14:23:23 -0800 Subject: [PATCH 11/20] chore(cleanup): broken link in README (#130) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * fix: broken link in README * 🦉 Updates from OwlBot See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md Co-authored-by: Owl Bot --- README.rst | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/README.rst b/README.rst index 889cba7..0345e82 100644 --- a/README.rst +++ b/README.rst @@ -72,7 +72,7 @@ Windows Next Steps ~~~~~~~~~~ -- See the [Samples](./samples/snippets/README.md). +- See the `Samples`_. - Read the `Client Library Documentation`_ for Cloud Transcoder API API to see other available methods on the client. - Read the `Transcoder API Product documentation`_ to learn @@ -80,5 +80,6 @@ Next Steps - View this `README`_ to see the full list of Cloud APIs that we cover. +.. _Samples: https://github.com/googleapis/python-video-transcoder/blob/main/samples/snippets/README.md .. _Transcoder API Product documentation: https://cloud.google.com/transcoder/docs -.. _README: https://github.com/googleapis/google-cloud-python/blob/main/README.rst \ No newline at end of file +.. _README: https://github.com/googleapis/google-cloud-python/blob/main/README.rst From e30431fec7c15666afbb5bc975f7077389aac06d Mon Sep 17 00:00:00 2001 From: Nicholas Cook Date: Fri, 4 Feb 2022 10:47:25 -0800 Subject: [PATCH 12/20] feat: add samples and tests for adding captions to a job (#131) * feat: add samples and tests for adding captions to a job * remove space * remove extraneous explanation --- README.rst | 2 +- .../create_job_with_embedded_captions.py | 195 ++++++++++++++++++ .../create_job_with_standalone_captions.py | 192 +++++++++++++++++ samples/snippets/job_test.py | 113 +++++++++- 4 files changed, 495 insertions(+), 7 deletions(-) create mode 100644 samples/snippets/create_job_with_embedded_captions.py create mode 100644 samples/snippets/create_job_with_standalone_captions.py diff --git a/README.rst b/README.rst index 0345e82..87242e1 100644 --- a/README.rst +++ b/README.rst @@ -81,5 +81,5 @@ Next Steps APIs that we cover. .. _Samples: https://github.com/googleapis/python-video-transcoder/blob/main/samples/snippets/README.md -.. _Transcoder API Product documentation: https://cloud.google.com/transcoder/docs +.. _Transcoder API Product documentation: https://cloud.google.com/transcoder/docs .. _README: https://github.com/googleapis/google-cloud-python/blob/main/README.rst diff --git a/samples/snippets/create_job_with_embedded_captions.py b/samples/snippets/create_job_with_embedded_captions.py new file mode 100644 index 0000000..31e4216 --- /dev/null +++ b/samples/snippets/create_job_with_embedded_captions.py @@ -0,0 +1,195 @@ +#!/usr/bin/env python + +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Google Cloud Transcoder sample for creating a job that embeds captions in the output video. + +Example usage: + python create_job_with_embedded_captions.py --project_id --location \ + --input_video_uri --input_captions_uri --output_uri +""" + +# [START transcoder_create_job_with_embedded_captions] + +import argparse + +from google.cloud.video import transcoder_v1 +from google.cloud.video.transcoder_v1.services.transcoder_service import ( + TranscoderServiceClient, +) + + +def create_job_with_embedded_captions( + project_id, + location, + input_video_uri, + input_captions_uri, + output_uri, +): + """Creates a job based on an ad-hoc job configuration that embeds captions in the output video. + + Args: + project_id (str): The GCP project ID. + location (str): The location to start the job in. + input_video_uri (str): Uri of the input video in the Cloud Storage + bucket. + input_captions_uri (str): Uri of the input captions file in the Cloud + Storage bucket. + output_uri (str): Uri of the video output folder in the Cloud Storage + bucket.""" + + client = TranscoderServiceClient() + + parent = f"projects/{project_id}/locations/{location}" + job = transcoder_v1.types.Job() + job.output_uri = output_uri + job.config = transcoder_v1.types.JobConfig( + inputs=[ + transcoder_v1.types.Input( + key="input0", + uri=input_video_uri, + ), + transcoder_v1.types.Input( + key="caption-input0", + uri=input_captions_uri, + ), + ], + edit_list=[ + transcoder_v1.types.EditAtom( + key="atom0", + inputs=["input0", "caption-input0"], + ), + ], + elementary_streams=[ + transcoder_v1.types.ElementaryStream( + key="video-stream0", + video_stream=transcoder_v1.types.VideoStream( + h264=transcoder_v1.types.VideoStream.H264CodecSettings( + height_pixels=360, + width_pixels=640, + bitrate_bps=550000, + frame_rate=60, + ), + ), + ), + transcoder_v1.types.ElementaryStream( + key="audio-stream0", + audio_stream=transcoder_v1.types.AudioStream( + codec="aac", bitrate_bps=64000 + ), + ), + transcoder_v1.types.ElementaryStream( + key="cea-stream0", + # The following doesn't work because "mapping" is a reserved + # argument name in GCP python client libraries (see + # https://github.com/googleapis/proto-plus-python/blob/main/proto/message.py#L447): + # + # text_stream=transcoder_v1.types.TextStream( + # codec="cea608", + # mapping=[ + # transcoder_v1.types.TextStream.TextMapping( + # atom_key="atom0", + # input_key="caption-input0", + # input_track=0, + # ), + # ], + # ), + # Use a python dictionary as a workaround: + text_stream={ + "codec": "cea608", + "mapping": [ + { + "atom_key": "atom0", + "input_key": "caption-input0", + "input_track": 0, + } + ], + }, + ), + ], + mux_streams=[ + transcoder_v1.types.MuxStream( + key="sd", + container="mp4", + elementary_streams=["video-stream0", "audio-stream0"], + ), + transcoder_v1.types.MuxStream( + key="sd-hls", + container="ts", + elementary_streams=["video-stream0", "audio-stream0"], + ), + transcoder_v1.types.MuxStream( + key="sd-dash", + container="fmp4", + elementary_streams=["video-stream0"], + ), + transcoder_v1.types.MuxStream( + key="audio-dash", + container="fmp4", + elementary_streams=["audio-stream0"], + ), + ], + manifests=[ + transcoder_v1.types.Manifest( + file_name="manifest.m3u8", + type_="HLS", + mux_streams=["sd-hls"], + ), + transcoder_v1.types.Manifest( + file_name="manifest.mpd", + type_="DASH", + mux_streams=["sd-dash", "audio-dash"], + ), + ], + ) + response = client.create_job(parent=parent, job=job) + print(f"Job: {response.name}") + return response + + +# [END transcoder_create_job_with_embedded_captions] + +if __name__ == "__main__": + parser = argparse.ArgumentParser() + parser.add_argument("--project_id", help="Your Cloud project ID.", required=True) + parser.add_argument( + "--location", + help="The location to start this job in.", + default="us-central1", + ) + parser.add_argument( + "--input_video_uri", + help="Uri of the input video in the Cloud Storage bucket.", + required=True, + ) + parser.add_argument( + "--input_captions_uri", + help="Uri of the input captions file in the Cloud Storage bucket.", + required=True, + ) + parser.add_argument( + "--output_uri", + help="Uri of the video output folder in the Cloud Storage bucket. " + + "Must end in '/'.", + required=True, + ) + args = parser.parse_args() + create_job_with_embedded_captions( + args.project_id, + args.location, + args.input_video_uri, + args.input_captions_uri, + args.output_uri, + ) diff --git a/samples/snippets/create_job_with_standalone_captions.py b/samples/snippets/create_job_with_standalone_captions.py new file mode 100644 index 0000000..5ed5467 --- /dev/null +++ b/samples/snippets/create_job_with_standalone_captions.py @@ -0,0 +1,192 @@ +#!/usr/bin/env python + +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Google Cloud Transcoder sample for creating a job that can use captions from a standalone file. + +Example usage: + python create_job_with_standalone_captions.py --project_id --location \ + --input_video_uri --input_captions_uri --output_uri +""" + +# [START transcoder_create_job_with_standalone_captions] + +import argparse + +from google.cloud.video import transcoder_v1 +from google.cloud.video.transcoder_v1.services.transcoder_service import ( + TranscoderServiceClient, +) +from google.protobuf import duration_pb2 as duration + + +def create_job_with_standalone_captions( + project_id, + location, + input_video_uri, + input_captions_uri, + output_uri, +): + """Creates a job based on an ad-hoc job configuration that can use captions from a standalone file. + + Args: + project_id (str): The GCP project ID. + location (str): The location to start the job in. + input_video_uri (str): Uri of the input video in the Cloud Storage + bucket. + input_captions_uri (str): Uri of the input captions file in the Cloud + Storage bucket. + output_uri (str): Uri of the video output folder in the Cloud Storage + bucket.""" + + client = TranscoderServiceClient() + + parent = f"projects/{project_id}/locations/{location}" + job = transcoder_v1.types.Job() + job.output_uri = output_uri + job.config = transcoder_v1.types.JobConfig( + inputs=[ + transcoder_v1.types.Input( + key="input0", + uri=input_video_uri, + ), + transcoder_v1.types.Input( + key="caption-input0", + uri=input_captions_uri, + ), + ], + edit_list=[ + transcoder_v1.types.EditAtom( + key="atom0", + inputs=["input0", "caption-input0"], + ), + ], + elementary_streams=[ + transcoder_v1.types.ElementaryStream( + key="video-stream0", + video_stream=transcoder_v1.types.VideoStream( + h264=transcoder_v1.types.VideoStream.H264CodecSettings( + height_pixels=360, + width_pixels=640, + bitrate_bps=550000, + frame_rate=60, + ), + ), + ), + transcoder_v1.types.ElementaryStream( + key="audio-stream0", + audio_stream=transcoder_v1.types.AudioStream( + codec="aac", bitrate_bps=64000 + ), + ), + transcoder_v1.types.ElementaryStream( + key="vtt-stream0", + # The following doesn't work because "mapping" is a reserved + # argument name in GCP python client libraries (see + # https://github.com/googleapis/proto-plus-python/blob/main/proto/message.py#L447): + # + # text_stream=transcoder_v1.types.TextStream( + # codec="webvtt", + # mapping=[ + # transcoder_v1.types.TextStream.TextMapping( + # atom_key="atom0", + # input_key="caption-input0", + # input_track=0, + # ), + # ], + # ), + # Use a python dictionary as a workaround: + text_stream={ + "codec": "webvtt", + "mapping": [ + { + "atom_key": "atom0", + "input_key": "caption-input0", + "input_track": 0, + } + ], + }, + ), + ], + mux_streams=[ + transcoder_v1.types.MuxStream( + key="sd-hls-fmp4", + container="fmp4", + elementary_streams=["video-stream0"], + ), + transcoder_v1.types.MuxStream( + key="audio-hls-fmp4", + container="fmp4", + elementary_streams=["audio-stream0"], + ), + transcoder_v1.types.MuxStream( + key="text-vtt", + container="vtt", + elementary_streams=["vtt-stream0"], + segment_settings=transcoder_v1.types.SegmentSettings( + segment_duration=duration.Duration( + seconds=6, + ), + individual_segments=True, + ), + ), + ], + manifests=[ + transcoder_v1.types.Manifest( + file_name="manifest.m3u8", + type_="HLS", + mux_streams=["sd-hls-fmp4", "audio-hls-fmp4", "text-vtt"], + ), + ], + ) + response = client.create_job(parent=parent, job=job) + print(f"Job: {response.name}") + return response + + +# [END transcoder_create_job_with_standalone_captions] + +if __name__ == "__main__": + parser = argparse.ArgumentParser() + parser.add_argument("--project_id", help="Your Cloud project ID.", required=True) + parser.add_argument( + "--location", + help="The location to start this job in.", + default="us-central1", + ) + parser.add_argument( + "--input_video_uri", + help="Uri of the input video in the Cloud Storage bucket.", + required=True, + ) + parser.add_argument( + "--input_captions_uri", + help="Uri of the input captions file in the Cloud Storage bucket.", + required=True, + ) + parser.add_argument( + "--output_uri", + help="Uri of the video output folder in the Cloud Storage bucket. " + + "Must end in '/'.", + required=True, + ) + args = parser.parse_args() + create_job_with_standalone_captions( + args.project_id, + args.location, + args.input_video_uri, + args.input_captions_uri, + args.output_uri, + ) diff --git a/samples/snippets/job_test.py b/samples/snippets/job_test.py index 2d9a6ba..fa9a93e 100644 --- a/samples/snippets/job_test.py +++ b/samples/snippets/job_test.py @@ -27,8 +27,10 @@ import create_job_template import create_job_with_animated_overlay import create_job_with_concatenated_inputs +import create_job_with_embedded_captions import create_job_with_periodic_images_spritesheet import create_job_with_set_number_images_spritesheet +import create_job_with_standalone_captions import create_job_with_static_overlay import delete_job import delete_job_template @@ -47,11 +49,13 @@ test_overlay_image_file_name = "overlay.jpg" test_concat1_file_name = "ForBiggerEscapes.mp4" test_concat2_file_name = "ForBiggerJoyrides.mp4" +test_captions_file_name = "caption.srt" input_uri = f"gs://{input_bucket_name}{test_video_file_name}" overlay_image_uri = f"gs://{input_bucket_name}{test_overlay_image_file_name}" concat1_uri = f"gs://{input_bucket_name}{test_concat1_file_name}" concat2_uri = f"gs://{input_bucket_name}{test_concat2_file_name}" +captions_uri = f"gs://{input_bucket_name}{test_captions_file_name}" output_uri_for_preset = f"gs://{output_bucket_name}/test-output-preset/" output_uri_for_template = f"gs://{output_bucket_name}/test-output-template/" output_uri_for_adhoc = f"gs://{output_bucket_name}/test-output-adhoc/" @@ -59,6 +63,9 @@ output_uri_for_animated_overlay = ( f"gs://{output_bucket_name}/test-output-animated-overlay/" ) +output_uri_for_embedded_captions = f"gs://{output_bucket_name}/test-output-embedded-captions/" +output_uri_for_standalone_captions = f"gs://{output_bucket_name}/test-output-standalone-captions/" + small_spritesheet_file_prefix = "small-sprite-sheet" large_spritesheet_file_prefix = "large-sprite-sheet" spritesheet_file_suffix = "0000000000.jpeg" @@ -75,6 +82,7 @@ preset = "preset/web-hd" job_succeeded_state = "ProcessingState.SUCCEEDED" +job_running_state = "ProcessingState.RUNNING" @pytest.fixture(scope="module") @@ -105,7 +113,7 @@ def test_create_job_from_preset(capsys, test_bucket): time.sleep(30) - _assert_job_state_succeeded(capsys, job_id) + _assert_job_state_succeeded_or_running(capsys, job_id) list_jobs.list_jobs(project_id, location) out, _ = capsys.readouterr() @@ -144,7 +152,7 @@ def test_create_job_from_template(capsys, test_bucket): time.sleep(30) - _assert_job_state_succeeded(capsys, job_id) + _assert_job_state_succeeded_or_running(capsys, job_id) list_jobs.list_jobs(project_id, location) out, _ = capsys.readouterr() @@ -178,7 +186,7 @@ def test_create_job_from_ad_hoc(capsys, test_bucket): time.sleep(30) - _assert_job_state_succeeded(capsys, job_id) + _assert_job_state_succeeded_or_running(capsys, job_id) list_jobs.list_jobs(project_id, location) out, _ = capsys.readouterr() @@ -259,7 +267,10 @@ def test_create_job_with_animated_overlay(capsys, test_bucket): def test_create_job_with_set_number_spritesheet(capsys, test_bucket): create_job_with_set_number_images_spritesheet.create_job_with_set_number_images_spritesheet( - project_id, location, input_uri, output_uri_for_set_number_spritesheet, + project_id, + location, + input_uri, + output_uri_for_set_number_spritesheet, ) out, _ = capsys.readouterr() job_name_prefix = f"projects/{project_number}/locations/{location}/jobs/" @@ -307,7 +318,10 @@ def test_create_job_with_set_number_spritesheet(capsys, test_bucket): def test_create_job_with_periodic_spritesheet(capsys, test_bucket): create_job_with_periodic_images_spritesheet.create_job_with_periodic_images_spritesheet( - project_id, location, input_uri, output_uri_for_periodic_spritesheet, + project_id, + location, + input_uri, + output_uri_for_periodic_spritesheet, ) out, _ = capsys.readouterr() job_name_prefix = f"projects/{project_number}/locations/{location}/jobs/" @@ -393,7 +407,80 @@ def test_create_job_with_concatenated_inputs(capsys, test_bucket): assert "Deleted job" in out -# Retrying up to 10 mins. +def test_create_job_with_embedded_captions(capsys, test_bucket): + create_job_with_embedded_captions.create_job_with_embedded_captions( + project_id, + location, + input_uri, + captions_uri, + output_uri_for_embedded_captions, + ) + out, _ = capsys.readouterr() + job_name_prefix = f"projects/{project_number}/locations/{location}/jobs/" + assert job_name_prefix in out + + str_slice = out.split("/") + job_id = str_slice[len(str_slice) - 1].rstrip("\n") + job_name = f"projects/{project_number}/locations/{location}/jobs/{job_id}" + assert job_name in out + + get_job.get_job(project_id, location, job_id) + out, _ = capsys.readouterr() + assert job_name in out + + time.sleep( + 30 + ) # Transcoding jobs need time to complete. Once the job completes, check the job state. + + _assert_job_state_succeeded(capsys, job_id) + + list_jobs.list_jobs(project_id, location) + out, _ = capsys.readouterr() + assert job_name in out + + delete_job.delete_job(project_id, location, job_id) + out, _ = capsys.readouterr() + assert "Deleted job" in out + + +def test_create_job_with_standalone_captions(capsys, test_bucket): + create_job_with_standalone_captions.create_job_with_standalone_captions( + project_id, + location, + input_uri, + captions_uri, + output_uri_for_standalone_captions, + ) + out, _ = capsys.readouterr() + job_name_prefix = f"projects/{project_number}/locations/{location}/jobs/" + assert job_name_prefix in out + + str_slice = out.split("/") + job_id = str_slice[len(str_slice) - 1].rstrip("\n") + job_name = f"projects/{project_number}/locations/{location}/jobs/{job_id}" + assert job_name in out + + get_job.get_job(project_id, location, job_id) + out, _ = capsys.readouterr() + assert job_name in out + + time.sleep( + 30 + ) # Transcoding jobs need time to complete. Once the job completes, check the job state. + + _assert_job_state_succeeded(capsys, job_id) + + list_jobs.list_jobs(project_id, location) + out, _ = capsys.readouterr() + assert job_name in out + + delete_job.delete_job(project_id, location, job_id) + out, _ = capsys.readouterr() + assert "Deleted job" in out + + +# Retrying up to 10 mins. This function checks if the job completed +# successfully. @backoff.on_exception(backoff.expo, AssertionError, max_time=600) def _assert_job_state_succeeded(capsys, job_id): try: @@ -405,6 +492,20 @@ def _assert_job_state_succeeded(capsys, job_id): assert job_succeeded_state in out +# Retrying up to 10 mins. This function checks if the job is running or has +# completed. Both of these conditions signal the API is functioning. The test +# can list or delete a job that is running or completed with no ill effects. +@backoff.on_exception(backoff.expo, AssertionError, max_time=600) +def _assert_job_state_succeeded_or_running(capsys, job_id): + try: + get_job_state.get_job_state(project_id, location, job_id) + except HttpError as err: + raise AssertionError(f"Could not get job state: {err.resp.status}") + + out, _ = capsys.readouterr() + assert (job_succeeded_state in out) or (job_running_state in out) + + def _assert_file_in_bucket(capsys, test_bucket, directory_and_filename): blob = test_bucket.blob(directory_and_filename) assert blob.exists() From c14695bc9070ec64a890c8f81af382165f5d04ea Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Mon, 14 Feb 2022 20:49:17 -0500 Subject: [PATCH 13/20] fix: rename mapping attribute of AudioStream to mapping_ (#121) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit fix: resolve issue where mapping attribute of AudioStream could not be set fix(deps): require proto-plus >= 1.20.1 * chore: use gapic-generator-python 0.58.4 committer: dovs PiperOrigin-RevId: 419025932 Source-Link: https://github.com/googleapis/googleapis/commit/73da6697f598f1ba30618924936a59f8e457ec89 Source-Link: https://github.com/googleapis/googleapis-gen/commit/46df624a54b9ed47c1a7eefb7a49413cf7b82f98 Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiNDZkZjYyNGE1NGI5ZWQ0N2MxYTdlZWZiN2E0OTQxM2NmN2I4MmY5OCJ9 * 🦉 Updates from OwlBot See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * 🦉 Updates from OwlBot See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * 🦉 Updates from OwlBot See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md * fix(deps): require proto-plus >= 1.20.1 * ensure proto-plus>=1.20.1 is used for testing Co-authored-by: Owl Bot Co-authored-by: Dov Shlachter Co-authored-by: Anthonios Partheniou --- .../transcoder_service/transports/base.py | 1 - .../video/transcoder_v1/types/resources.py | 8 +- .../transcoder_service/transports/base.py | 1 - .../transcoder_v1beta1/types/resources.py | 8 +- .../create_job_with_embedded_captions.py | 31 ++---- .../create_job_with_standalone_captions.py | 27 ++--- samples/snippets/job_test.py | 24 ++--- samples/snippets/requirements-test.txt | 1 + setup.py | 6 +- testing/constraints-3.6.txt | 2 +- .../transcoder_v1/test_transcoder_service.py | 102 +++++++----------- .../test_transcoder_service.py | 102 +++++++----------- 12 files changed, 115 insertions(+), 198 deletions(-) diff --git a/google/cloud/video/transcoder_v1/services/transcoder_service/transports/base.py b/google/cloud/video/transcoder_v1/services/transcoder_service/transports/base.py index 9c9f3cf..85cff1e 100644 --- a/google/cloud/video/transcoder_v1/services/transcoder_service/transports/base.py +++ b/google/cloud/video/transcoder_v1/services/transcoder_service/transports/base.py @@ -103,7 +103,6 @@ def __init__( credentials, _ = google.auth.load_credentials_from_file( credentials_file, **scopes_kwargs, quota_project_id=quota_project_id ) - elif credentials is None: credentials, _ = google.auth.default( **scopes_kwargs, quota_project_id=quota_project_id diff --git a/google/cloud/video/transcoder_v1/types/resources.py b/google/cloud/video/transcoder_v1/types/resources.py index cb02c1a..3108526 100644 --- a/google/cloud/video/transcoder_v1/types/resources.py +++ b/google/cloud/video/transcoder_v1/types/resources.py @@ -1345,7 +1345,7 @@ class AudioStream(proto.Message): - ``sr`` - Side right channel - ``fc`` - Front center channel - ``lfe`` - Low frequency - mapping (Sequence[google.cloud.video.transcoder_v1.types.AudioStream.AudioMapping]): + mapping_ (Sequence[google.cloud.video.transcoder_v1.types.AudioStream.AudioMapping]): The mapping for the ``Job.edit_list`` atoms with audio ``EditAtom.inputs``. sample_rate_hertz (int): @@ -1389,7 +1389,7 @@ class AudioMapping(proto.Message): bitrate_bps = proto.Field(proto.INT32, number=2,) channel_count = proto.Field(proto.INT32, number=3,) channel_layout = proto.RepeatedField(proto.STRING, number=4,) - mapping = proto.RepeatedField(proto.MESSAGE, number=5, message=AudioMapping,) + mapping_ = proto.RepeatedField(proto.MESSAGE, number=5, message=AudioMapping,) sample_rate_hertz = proto.Field(proto.INT32, number=6,) @@ -1408,7 +1408,7 @@ class TextStream(proto.Message): - ``cea608`` - ``cea708`` - ``webvtt`` - mapping (Sequence[google.cloud.video.transcoder_v1.types.TextStream.TextMapping]): + mapping_ (Sequence[google.cloud.video.transcoder_v1.types.TextStream.TextMapping]): The mapping for the ``Job.edit_list`` atoms with text ``EditAtom.inputs``. """ @@ -1433,7 +1433,7 @@ class TextMapping(proto.Message): input_track = proto.Field(proto.INT32, number=3,) codec = proto.Field(proto.STRING, number=1,) - mapping = proto.RepeatedField(proto.MESSAGE, number=3, message=TextMapping,) + mapping_ = proto.RepeatedField(proto.MESSAGE, number=3, message=TextMapping,) class SegmentSettings(proto.Message): diff --git a/google/cloud/video/transcoder_v1beta1/services/transcoder_service/transports/base.py b/google/cloud/video/transcoder_v1beta1/services/transcoder_service/transports/base.py index 7a9c092..4f2167b 100644 --- a/google/cloud/video/transcoder_v1beta1/services/transcoder_service/transports/base.py +++ b/google/cloud/video/transcoder_v1beta1/services/transcoder_service/transports/base.py @@ -103,7 +103,6 @@ def __init__( credentials, _ = google.auth.load_credentials_from_file( credentials_file, **scopes_kwargs, quota_project_id=quota_project_id ) - elif credentials is None: credentials, _ = google.auth.default( **scopes_kwargs, quota_project_id=quota_project_id diff --git a/google/cloud/video/transcoder_v1beta1/types/resources.py b/google/cloud/video/transcoder_v1beta1/types/resources.py index 7c7a7f8..41ac5c7 100644 --- a/google/cloud/video/transcoder_v1beta1/types/resources.py +++ b/google/cloud/video/transcoder_v1beta1/types/resources.py @@ -1093,7 +1093,7 @@ class AudioStream(proto.Message): - 'sr' - Side right channel - 'fc' - Front center channel - 'lfe' - Low frequency - mapping (Sequence[google.cloud.video.transcoder_v1beta1.types.AudioStream.AudioAtom]): + mapping_ (Sequence[google.cloud.video.transcoder_v1beta1.types.AudioStream.AudioAtom]): The mapping for the ``Job.edit_list`` atoms with audio ``EditAtom.inputs``. sample_rate_hertz (int): @@ -1161,7 +1161,7 @@ class AudioChannelInput(proto.Message): bitrate_bps = proto.Field(proto.INT32, number=2,) channel_count = proto.Field(proto.INT32, number=3,) channel_layout = proto.RepeatedField(proto.STRING, number=4,) - mapping = proto.RepeatedField(proto.MESSAGE, number=5, message=AudioAtom,) + mapping_ = proto.RepeatedField(proto.MESSAGE, number=5, message=AudioAtom,) sample_rate_hertz = proto.Field(proto.INT32, number=6,) @@ -1184,7 +1184,7 @@ class TextStream(proto.Message): Required. The BCP-47 language code, such as ``"en-US"`` or ``"sr-Latn"``. For more information, see https://www.unicode.org/reports/tr35/#Unicode_locale_identifier. - mapping (Sequence[google.cloud.video.transcoder_v1beta1.types.TextStream.TextAtom]): + mapping_ (Sequence[google.cloud.video.transcoder_v1beta1.types.TextStream.TextAtom]): The mapping for the ``Job.edit_list`` atoms with text ``EditAtom.inputs``. """ @@ -1223,7 +1223,7 @@ class TextInput(proto.Message): codec = proto.Field(proto.STRING, number=1,) language_code = proto.Field(proto.STRING, number=2,) - mapping = proto.RepeatedField(proto.MESSAGE, number=3, message=TextAtom,) + mapping_ = proto.RepeatedField(proto.MESSAGE, number=3, message=TextAtom,) class SegmentSettings(proto.Message): diff --git a/samples/snippets/create_job_with_embedded_captions.py b/samples/snippets/create_job_with_embedded_captions.py index 31e4216..a81e668 100644 --- a/samples/snippets/create_job_with_embedded_captions.py +++ b/samples/snippets/create_job_with_embedded_captions.py @@ -32,11 +32,7 @@ def create_job_with_embedded_captions( - project_id, - location, - input_video_uri, - input_captions_uri, - output_uri, + project_id, location, input_video_uri, input_captions_uri, output_uri, ): """Creates a job based on an ad-hoc job configuration that embeds captions in the output video. @@ -57,19 +53,12 @@ def create_job_with_embedded_captions( job.output_uri = output_uri job.config = transcoder_v1.types.JobConfig( inputs=[ - transcoder_v1.types.Input( - key="input0", - uri=input_video_uri, - ), - transcoder_v1.types.Input( - key="caption-input0", - uri=input_captions_uri, - ), + transcoder_v1.types.Input(key="input0", uri=input_video_uri,), + transcoder_v1.types.Input(key="caption-input0", uri=input_captions_uri,), ], edit_list=[ transcoder_v1.types.EditAtom( - key="atom0", - inputs=["input0", "caption-input0"], + key="atom0", inputs=["input0", "caption-input0"], ), ], elementary_streams=[ @@ -131,9 +120,7 @@ def create_job_with_embedded_captions( elementary_streams=["video-stream0", "audio-stream0"], ), transcoder_v1.types.MuxStream( - key="sd-dash", - container="fmp4", - elementary_streams=["video-stream0"], + key="sd-dash", container="fmp4", elementary_streams=["video-stream0"], ), transcoder_v1.types.MuxStream( key="audio-dash", @@ -143,9 +130,7 @@ def create_job_with_embedded_captions( ], manifests=[ transcoder_v1.types.Manifest( - file_name="manifest.m3u8", - type_="HLS", - mux_streams=["sd-hls"], + file_name="manifest.m3u8", type_="HLS", mux_streams=["sd-hls"], ), transcoder_v1.types.Manifest( file_name="manifest.mpd", @@ -165,9 +150,7 @@ def create_job_with_embedded_captions( parser = argparse.ArgumentParser() parser.add_argument("--project_id", help="Your Cloud project ID.", required=True) parser.add_argument( - "--location", - help="The location to start this job in.", - default="us-central1", + "--location", help="The location to start this job in.", default="us-central1", ) parser.add_argument( "--input_video_uri", diff --git a/samples/snippets/create_job_with_standalone_captions.py b/samples/snippets/create_job_with_standalone_captions.py index 5ed5467..de32f49 100644 --- a/samples/snippets/create_job_with_standalone_captions.py +++ b/samples/snippets/create_job_with_standalone_captions.py @@ -33,11 +33,7 @@ def create_job_with_standalone_captions( - project_id, - location, - input_video_uri, - input_captions_uri, - output_uri, + project_id, location, input_video_uri, input_captions_uri, output_uri, ): """Creates a job based on an ad-hoc job configuration that can use captions from a standalone file. @@ -58,19 +54,12 @@ def create_job_with_standalone_captions( job.output_uri = output_uri job.config = transcoder_v1.types.JobConfig( inputs=[ - transcoder_v1.types.Input( - key="input0", - uri=input_video_uri, - ), - transcoder_v1.types.Input( - key="caption-input0", - uri=input_captions_uri, - ), + transcoder_v1.types.Input(key="input0", uri=input_video_uri,), + transcoder_v1.types.Input(key="caption-input0", uri=input_captions_uri,), ], edit_list=[ transcoder_v1.types.EditAtom( - key="atom0", - inputs=["input0", "caption-input0"], + key="atom0", inputs=["input0", "caption-input0"], ), ], elementary_streams=[ @@ -136,9 +125,7 @@ def create_job_with_standalone_captions( container="vtt", elementary_streams=["vtt-stream0"], segment_settings=transcoder_v1.types.SegmentSettings( - segment_duration=duration.Duration( - seconds=6, - ), + segment_duration=duration.Duration(seconds=6,), individual_segments=True, ), ), @@ -162,9 +149,7 @@ def create_job_with_standalone_captions( parser = argparse.ArgumentParser() parser.add_argument("--project_id", help="Your Cloud project ID.", required=True) parser.add_argument( - "--location", - help="The location to start this job in.", - default="us-central1", + "--location", help="The location to start this job in.", default="us-central1", ) parser.add_argument( "--input_video_uri", diff --git a/samples/snippets/job_test.py b/samples/snippets/job_test.py index fa9a93e..d924472 100644 --- a/samples/snippets/job_test.py +++ b/samples/snippets/job_test.py @@ -63,8 +63,12 @@ output_uri_for_animated_overlay = ( f"gs://{output_bucket_name}/test-output-animated-overlay/" ) -output_uri_for_embedded_captions = f"gs://{output_bucket_name}/test-output-embedded-captions/" -output_uri_for_standalone_captions = f"gs://{output_bucket_name}/test-output-standalone-captions/" +output_uri_for_embedded_captions = ( + f"gs://{output_bucket_name}/test-output-embedded-captions/" +) +output_uri_for_standalone_captions = ( + f"gs://{output_bucket_name}/test-output-standalone-captions/" +) small_spritesheet_file_prefix = "small-sprite-sheet" large_spritesheet_file_prefix = "large-sprite-sheet" @@ -267,10 +271,7 @@ def test_create_job_with_animated_overlay(capsys, test_bucket): def test_create_job_with_set_number_spritesheet(capsys, test_bucket): create_job_with_set_number_images_spritesheet.create_job_with_set_number_images_spritesheet( - project_id, - location, - input_uri, - output_uri_for_set_number_spritesheet, + project_id, location, input_uri, output_uri_for_set_number_spritesheet, ) out, _ = capsys.readouterr() job_name_prefix = f"projects/{project_number}/locations/{location}/jobs/" @@ -318,10 +319,7 @@ def test_create_job_with_set_number_spritesheet(capsys, test_bucket): def test_create_job_with_periodic_spritesheet(capsys, test_bucket): create_job_with_periodic_images_spritesheet.create_job_with_periodic_images_spritesheet( - project_id, - location, - input_uri, - output_uri_for_periodic_spritesheet, + project_id, location, input_uri, output_uri_for_periodic_spritesheet, ) out, _ = capsys.readouterr() job_name_prefix = f"projects/{project_number}/locations/{location}/jobs/" @@ -409,11 +407,7 @@ def test_create_job_with_concatenated_inputs(capsys, test_bucket): def test_create_job_with_embedded_captions(capsys, test_bucket): create_job_with_embedded_captions.create_job_with_embedded_captions( - project_id, - location, - input_uri, - captions_uri, - output_uri_for_embedded_captions, + project_id, location, input_uri, captions_uri, output_uri_for_embedded_captions, ) out, _ = capsys.readouterr() job_name_prefix = f"projects/{project_number}/locations/{location}/jobs/" diff --git a/samples/snippets/requirements-test.txt b/samples/snippets/requirements-test.txt index 52c6682..2996534 100644 --- a/samples/snippets/requirements-test.txt +++ b/samples/snippets/requirements-test.txt @@ -1,3 +1,4 @@ backoff==1.11.1 google-cloud-storage==1.43.0 pytest==6.2.4 +proto-plus>=1.20.1 diff --git a/setup.py b/setup.py index 0a4dfc1..febe200 100644 --- a/setup.py +++ b/setup.py @@ -45,7 +45,11 @@ # Until this issue is closed # https://github.com/googleapis/google-cloud-python/issues/10566 "google-api-core[grpc] >= 1.28.0, <3.0.0dev", - "proto-plus >= 1.4.0", + # Require proto-plus >= 1.20.1 which includes + # a fix to mitigate collisions in field names, specifically + # the `mapping` term which is a reserved term in proto-plus. + # https://github.com/googleapis/proto-plus-python/pull/295 + "proto-plus >= 1.20.1", ), python_requires=">=3.6", classifiers=[ diff --git a/testing/constraints-3.6.txt b/testing/constraints-3.6.txt index 6b451bc..f12a922 100644 --- a/testing/constraints-3.6.txt +++ b/testing/constraints-3.6.txt @@ -6,4 +6,4 @@ # e.g., if setup.py has "foo >= 1.14.0, < 2.0.0dev", # Then this file should have foo==1.14.0 google-api-core==1.28.0 -proto-plus==1.4.0 +proto-plus==1.20.1 diff --git a/tests/unit/gapic/transcoder_v1/test_transcoder_service.py b/tests/unit/gapic/transcoder_v1/test_transcoder_service.py index ef49f51..5b90a11 100644 --- a/tests/unit/gapic/transcoder_v1/test_transcoder_service.py +++ b/tests/unit/gapic/transcoder_v1/test_transcoder_service.py @@ -256,20 +256,20 @@ def test_transcoder_service_client_client_options( # unsupported value. with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): with pytest.raises(MutualTLSChannelError): - client = client_class() + client = client_class(transport=transport_name) # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. with mock.patch.dict( os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} ): with pytest.raises(ValueError): - client = client_class() + client = client_class(transport=transport_name) # Check the case quota_project_id is provided options = client_options.ClientOptions(quota_project_id="octopus") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(transport=transport_name, client_options=options) + client = client_class(client_options=options, transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -338,7 +338,7 @@ def test_transcoder_service_client_mtls_env_auto( ) with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(transport=transport_name, client_options=options) + client = client_class(client_options=options, transport=transport_name) if use_client_cert_env == "false": expected_client_cert_source = None @@ -433,7 +433,7 @@ def test_transcoder_service_client_client_options_scopes( options = client_options.ClientOptions(scopes=["1", "2"],) with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(transport=transport_name, client_options=options) + client = client_class(client_options=options, transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -464,7 +464,7 @@ def test_transcoder_service_client_client_options_credentials_file( options = client_options.ClientOptions(credentials_file="credentials.json") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(transport=transport_name, client_options=options) + client = client_class(client_options=options, transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file="credentials.json", @@ -497,7 +497,8 @@ def test_transcoder_service_client_client_options_from_dict(): ) -def test_create_job(transport: str = "grpc", request_type=services.CreateJobRequest): +@pytest.mark.parametrize("request_type", [services.CreateJobRequest, dict,]) +def test_create_job(request_type, transport: str = "grpc"): client = TranscoderServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -533,10 +534,6 @@ def test_create_job(transport: str = "grpc", request_type=services.CreateJobRequ assert response.ttl_after_completion_days == 2670 -def test_create_job_from_dict(): - test_create_job(request_type=dict) - - def test_create_job_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -732,7 +729,8 @@ async def test_create_job_flattened_error_async(): ) -def test_list_jobs(transport: str = "grpc", request_type=services.ListJobsRequest): +@pytest.mark.parametrize("request_type", [services.ListJobsRequest, dict,]) +def test_list_jobs(request_type, transport: str = "grpc"): client = TranscoderServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -760,10 +758,6 @@ def test_list_jobs(transport: str = "grpc", request_type=services.ListJobsReques assert response.unreachable == ["unreachable_value"] -def test_list_jobs_from_dict(): - test_list_jobs(request_type=dict) - - def test_list_jobs_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -943,8 +937,10 @@ async def test_list_jobs_flattened_error_async(): ) -def test_list_jobs_pager(): - client = TranscoderServiceClient(credentials=ga_credentials.AnonymousCredentials,) +def test_list_jobs_pager(transport_name: str = "grpc"): + client = TranscoderServiceClient( + credentials=ga_credentials.AnonymousCredentials, transport=transport_name, + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_jobs), "__call__") as call: @@ -973,8 +969,10 @@ def test_list_jobs_pager(): assert all(isinstance(i, resources.Job) for i in results) -def test_list_jobs_pages(): - client = TranscoderServiceClient(credentials=ga_credentials.AnonymousCredentials,) +def test_list_jobs_pages(transport_name: str = "grpc"): + client = TranscoderServiceClient( + credentials=ga_credentials.AnonymousCredentials, transport=transport_name, + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_jobs), "__call__") as call: @@ -1053,7 +1051,8 @@ async def test_list_jobs_async_pages(): assert page_.raw_page.next_page_token == token -def test_get_job(transport: str = "grpc", request_type=services.GetJobRequest): +@pytest.mark.parametrize("request_type", [services.GetJobRequest, dict,]) +def test_get_job(request_type, transport: str = "grpc"): client = TranscoderServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -1089,10 +1088,6 @@ def test_get_job(transport: str = "grpc", request_type=services.GetJobRequest): assert response.ttl_after_completion_days == 2670 -def test_get_job_from_dict(): - test_get_job(request_type=dict) - - def test_get_job_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -1274,7 +1269,8 @@ async def test_get_job_flattened_error_async(): ) -def test_delete_job(transport: str = "grpc", request_type=services.DeleteJobRequest): +@pytest.mark.parametrize("request_type", [services.DeleteJobRequest, dict,]) +def test_delete_job(request_type, transport: str = "grpc"): client = TranscoderServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -1298,10 +1294,6 @@ def test_delete_job(transport: str = "grpc", request_type=services.DeleteJobRequ assert response is None -def test_delete_job_from_dict(): - test_delete_job(request_type=dict) - - def test_delete_job_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -1470,9 +1462,8 @@ async def test_delete_job_flattened_error_async(): ) -def test_create_job_template( - transport: str = "grpc", request_type=services.CreateJobTemplateRequest -): +@pytest.mark.parametrize("request_type", [services.CreateJobTemplateRequest, dict,]) +def test_create_job_template(request_type, transport: str = "grpc"): client = TranscoderServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -1499,10 +1490,6 @@ def test_create_job_template( assert response.name == "name_value" -def test_create_job_template_from_dict(): - test_create_job_template(request_type=dict) - - def test_create_job_template_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -1716,9 +1703,8 @@ async def test_create_job_template_flattened_error_async(): ) -def test_list_job_templates( - transport: str = "grpc", request_type=services.ListJobTemplatesRequest -): +@pytest.mark.parametrize("request_type", [services.ListJobTemplatesRequest, dict,]) +def test_list_job_templates(request_type, transport: str = "grpc"): client = TranscoderServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -1748,10 +1734,6 @@ def test_list_job_templates( assert response.unreachable == ["unreachable_value"] -def test_list_job_templates_from_dict(): - test_list_job_templates(request_type=dict) - - def test_list_job_templates_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -1943,8 +1925,10 @@ async def test_list_job_templates_flattened_error_async(): ) -def test_list_job_templates_pager(): - client = TranscoderServiceClient(credentials=ga_credentials.AnonymousCredentials,) +def test_list_job_templates_pager(transport_name: str = "grpc"): + client = TranscoderServiceClient( + credentials=ga_credentials.AnonymousCredentials, transport=transport_name, + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1983,8 +1967,10 @@ def test_list_job_templates_pager(): assert all(isinstance(i, resources.JobTemplate) for i in results) -def test_list_job_templates_pages(): - client = TranscoderServiceClient(credentials=ga_credentials.AnonymousCredentials,) +def test_list_job_templates_pages(transport_name: str = "grpc"): + client = TranscoderServiceClient( + credentials=ga_credentials.AnonymousCredentials, transport=transport_name, + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -2093,9 +2079,8 @@ async def test_list_job_templates_async_pages(): assert page_.raw_page.next_page_token == token -def test_get_job_template( - transport: str = "grpc", request_type=services.GetJobTemplateRequest -): +@pytest.mark.parametrize("request_type", [services.GetJobTemplateRequest, dict,]) +def test_get_job_template(request_type, transport: str = "grpc"): client = TranscoderServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -2120,10 +2105,6 @@ def test_get_job_template( assert response.name == "name_value" -def test_get_job_template_from_dict(): - test_get_job_template(request_type=dict) - - def test_get_job_template_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -2299,9 +2280,8 @@ async def test_get_job_template_flattened_error_async(): ) -def test_delete_job_template( - transport: str = "grpc", request_type=services.DeleteJobTemplateRequest -): +@pytest.mark.parametrize("request_type", [services.DeleteJobTemplateRequest, dict,]) +def test_delete_job_template(request_type, transport: str = "grpc"): client = TranscoderServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -2327,10 +2307,6 @@ def test_delete_job_template( assert response is None -def test_delete_job_template_from_dict(): - test_delete_job_template(request_type=dict) - - def test_delete_job_template_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -3056,7 +3032,7 @@ def test_parse_common_location_path(): assert expected == actual -def test_client_withDEFAULT_CLIENT_INFO(): +def test_client_with_default_client_info(): client_info = gapic_v1.client_info.ClientInfo() with mock.patch.object( diff --git a/tests/unit/gapic/transcoder_v1beta1/test_transcoder_service.py b/tests/unit/gapic/transcoder_v1beta1/test_transcoder_service.py index 364c2ef..da0875d 100644 --- a/tests/unit/gapic/transcoder_v1beta1/test_transcoder_service.py +++ b/tests/unit/gapic/transcoder_v1beta1/test_transcoder_service.py @@ -254,20 +254,20 @@ def test_transcoder_service_client_client_options( # unsupported value. with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): with pytest.raises(MutualTLSChannelError): - client = client_class() + client = client_class(transport=transport_name) # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. with mock.patch.dict( os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} ): with pytest.raises(ValueError): - client = client_class() + client = client_class(transport=transport_name) # Check the case quota_project_id is provided options = client_options.ClientOptions(quota_project_id="octopus") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(transport=transport_name, client_options=options) + client = client_class(client_options=options, transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -336,7 +336,7 @@ def test_transcoder_service_client_mtls_env_auto( ) with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(transport=transport_name, client_options=options) + client = client_class(client_options=options, transport=transport_name) if use_client_cert_env == "false": expected_client_cert_source = None @@ -431,7 +431,7 @@ def test_transcoder_service_client_client_options_scopes( options = client_options.ClientOptions(scopes=["1", "2"],) with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(transport=transport_name, client_options=options) + client = client_class(client_options=options, transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file=None, @@ -462,7 +462,7 @@ def test_transcoder_service_client_client_options_credentials_file( options = client_options.ClientOptions(credentials_file="credentials.json") with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None - client = client_class(transport=transport_name, client_options=options) + client = client_class(client_options=options, transport=transport_name) patched.assert_called_once_with( credentials=None, credentials_file="credentials.json", @@ -495,7 +495,8 @@ def test_transcoder_service_client_client_options_from_dict(): ) -def test_create_job(transport: str = "grpc", request_type=services.CreateJobRequest): +@pytest.mark.parametrize("request_type", [services.CreateJobRequest, dict,]) +def test_create_job(request_type, transport: str = "grpc"): client = TranscoderServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -535,10 +536,6 @@ def test_create_job(transport: str = "grpc", request_type=services.CreateJobRequ assert response.ttl_after_completion_days == 2670 -def test_create_job_from_dict(): - test_create_job(request_type=dict) - - def test_create_job_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -738,7 +735,8 @@ async def test_create_job_flattened_error_async(): ) -def test_list_jobs(transport: str = "grpc", request_type=services.ListJobsRequest): +@pytest.mark.parametrize("request_type", [services.ListJobsRequest, dict,]) +def test_list_jobs(request_type, transport: str = "grpc"): client = TranscoderServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -765,10 +763,6 @@ def test_list_jobs(transport: str = "grpc", request_type=services.ListJobsReques assert response.next_page_token == "next_page_token_value" -def test_list_jobs_from_dict(): - test_list_jobs(request_type=dict) - - def test_list_jobs_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -944,8 +938,10 @@ async def test_list_jobs_flattened_error_async(): ) -def test_list_jobs_pager(): - client = TranscoderServiceClient(credentials=ga_credentials.AnonymousCredentials,) +def test_list_jobs_pager(transport_name: str = "grpc"): + client = TranscoderServiceClient( + credentials=ga_credentials.AnonymousCredentials, transport=transport_name, + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_jobs), "__call__") as call: @@ -974,8 +970,10 @@ def test_list_jobs_pager(): assert all(isinstance(i, resources.Job) for i in results) -def test_list_jobs_pages(): - client = TranscoderServiceClient(credentials=ga_credentials.AnonymousCredentials,) +def test_list_jobs_pages(transport_name: str = "grpc"): + client = TranscoderServiceClient( + credentials=ga_credentials.AnonymousCredentials, transport=transport_name, + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_jobs), "__call__") as call: @@ -1054,7 +1052,8 @@ async def test_list_jobs_async_pages(): assert page_.raw_page.next_page_token == token -def test_get_job(transport: str = "grpc", request_type=services.GetJobRequest): +@pytest.mark.parametrize("request_type", [services.GetJobRequest, dict,]) +def test_get_job(request_type, transport: str = "grpc"): client = TranscoderServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -1094,10 +1093,6 @@ def test_get_job(transport: str = "grpc", request_type=services.GetJobRequest): assert response.ttl_after_completion_days == 2670 -def test_get_job_from_dict(): - test_get_job(request_type=dict) - - def test_get_job_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -1283,7 +1278,8 @@ async def test_get_job_flattened_error_async(): ) -def test_delete_job(transport: str = "grpc", request_type=services.DeleteJobRequest): +@pytest.mark.parametrize("request_type", [services.DeleteJobRequest, dict,]) +def test_delete_job(request_type, transport: str = "grpc"): client = TranscoderServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -1307,10 +1303,6 @@ def test_delete_job(transport: str = "grpc", request_type=services.DeleteJobRequ assert response is None -def test_delete_job_from_dict(): - test_delete_job(request_type=dict) - - def test_delete_job_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -1479,9 +1471,8 @@ async def test_delete_job_flattened_error_async(): ) -def test_create_job_template( - transport: str = "grpc", request_type=services.CreateJobTemplateRequest -): +@pytest.mark.parametrize("request_type", [services.CreateJobTemplateRequest, dict,]) +def test_create_job_template(request_type, transport: str = "grpc"): client = TranscoderServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -1508,10 +1499,6 @@ def test_create_job_template( assert response.name == "name_value" -def test_create_job_template_from_dict(): - test_create_job_template(request_type=dict) - - def test_create_job_template_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -1725,9 +1712,8 @@ async def test_create_job_template_flattened_error_async(): ) -def test_list_job_templates( - transport: str = "grpc", request_type=services.ListJobTemplatesRequest -): +@pytest.mark.parametrize("request_type", [services.ListJobTemplatesRequest, dict,]) +def test_list_job_templates(request_type, transport: str = "grpc"): client = TranscoderServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -1756,10 +1742,6 @@ def test_list_job_templates( assert response.next_page_token == "next_page_token_value" -def test_list_job_templates_from_dict(): - test_list_job_templates(request_type=dict) - - def test_list_job_templates_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -1947,8 +1929,10 @@ async def test_list_job_templates_flattened_error_async(): ) -def test_list_job_templates_pager(): - client = TranscoderServiceClient(credentials=ga_credentials.AnonymousCredentials,) +def test_list_job_templates_pager(transport_name: str = "grpc"): + client = TranscoderServiceClient( + credentials=ga_credentials.AnonymousCredentials, transport=transport_name, + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1987,8 +1971,10 @@ def test_list_job_templates_pager(): assert all(isinstance(i, resources.JobTemplate) for i in results) -def test_list_job_templates_pages(): - client = TranscoderServiceClient(credentials=ga_credentials.AnonymousCredentials,) +def test_list_job_templates_pages(transport_name: str = "grpc"): + client = TranscoderServiceClient( + credentials=ga_credentials.AnonymousCredentials, transport=transport_name, + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -2097,9 +2083,8 @@ async def test_list_job_templates_async_pages(): assert page_.raw_page.next_page_token == token -def test_get_job_template( - transport: str = "grpc", request_type=services.GetJobTemplateRequest -): +@pytest.mark.parametrize("request_type", [services.GetJobTemplateRequest, dict,]) +def test_get_job_template(request_type, transport: str = "grpc"): client = TranscoderServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -2124,10 +2109,6 @@ def test_get_job_template( assert response.name == "name_value" -def test_get_job_template_from_dict(): - test_get_job_template(request_type=dict) - - def test_get_job_template_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -2303,9 +2284,8 @@ async def test_get_job_template_flattened_error_async(): ) -def test_delete_job_template( - transport: str = "grpc", request_type=services.DeleteJobTemplateRequest -): +@pytest.mark.parametrize("request_type", [services.DeleteJobTemplateRequest, dict,]) +def test_delete_job_template(request_type, transport: str = "grpc"): client = TranscoderServiceClient( credentials=ga_credentials.AnonymousCredentials(), transport=transport, ) @@ -2331,10 +2311,6 @@ def test_delete_job_template( assert response is None -def test_delete_job_template_from_dict(): - test_delete_job_template(request_type=dict) - - def test_delete_job_template_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. @@ -3060,7 +3036,7 @@ def test_parse_common_location_path(): assert expected == actual -def test_client_withDEFAULT_CLIENT_INFO(): +def test_client_with_default_client_info(): client_info = gapic_v1.client_info.ClientInfo() with mock.patch.object( From ac6a4031ac66a721d776c41885830023023f14f2 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Thu, 24 Feb 2022 15:45:00 +0000 Subject: [PATCH 14/20] feat: add api key support (#127) - [ ] Regenerate this pull request now. PiperOrigin-RevId: 423842556 Source-Link: https://github.com/googleapis/googleapis/commit/a616ca08f4b1416abbac7bc5dd6d61c791756a81 Source-Link: https://github.com/googleapis/googleapis-gen/commit/29b938c58c1e51d019f2ee539d55dc0a3c86a905 Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiMjliOTM4YzU4YzFlNTFkMDE5ZjJlZTUzOWQ1NWRjMGEzYzg2YTkwNSJ9 Fixes: #140 fix(deps): require proto-plus >= 1.20.3 --- .../transcoder_service/async_client.py | 38 ++++- .../services/transcoder_service/client.py | 127 +++++++++++------ .../transcoder_service/async_client.py | 38 ++++- .../services/transcoder_service/client.py | 127 +++++++++++------ setup.py | 6 +- testing/constraints-3.6.txt | 2 +- .../transcoder_v1/test_transcoder_service.py | 131 ++++++++++++++++++ .../test_transcoder_service.py | 131 ++++++++++++++++++ 8 files changed, 506 insertions(+), 94 deletions(-) diff --git a/google/cloud/video/transcoder_v1/services/transcoder_service/async_client.py b/google/cloud/video/transcoder_v1/services/transcoder_service/async_client.py index b7567db..709c4ac 100644 --- a/google/cloud/video/transcoder_v1/services/transcoder_service/async_client.py +++ b/google/cloud/video/transcoder_v1/services/transcoder_service/async_client.py @@ -16,7 +16,7 @@ from collections import OrderedDict import functools import re -from typing import Dict, Sequence, Tuple, Type, Union +from typing import Dict, Optional, Sequence, Tuple, Type, Union import pkg_resources from google.api_core.client_options import ClientOptions @@ -120,6 +120,42 @@ def from_service_account_file(cls, filename: str, *args, **kwargs): from_service_account_json = from_service_account_file + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variabel is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + return TranscoderServiceClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore + @property def transport(self) -> TranscoderServiceTransport: """Returns the transport used by the client instance. diff --git a/google/cloud/video/transcoder_v1/services/transcoder_service/client.py b/google/cloud/video/transcoder_v1/services/transcoder_service/client.py index 1e10da5..acb3543 100644 --- a/google/cloud/video/transcoder_v1/services/transcoder_service/client.py +++ b/google/cloud/video/transcoder_v1/services/transcoder_service/client.py @@ -262,6 +262,73 @@ def parse_common_location_path(path: str) -> Dict[str, str]: m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) return m.groupdict() if m else {} + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[client_options_lib.ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variabel is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + def __init__( self, *, @@ -312,57 +379,22 @@ def __init__( if client_options is None: client_options = client_options_lib.ClientOptions() - # Create SSL credentials for mutual TLS if needed. - if os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") not in ( - "true", - "false", - ): - raise ValueError( - "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" - ) - use_client_cert = ( - os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") == "true" + api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source( + client_options ) - client_cert_source_func = None - is_mtls = False - if use_client_cert: - if client_options.client_cert_source: - is_mtls = True - client_cert_source_func = client_options.client_cert_source - else: - is_mtls = mtls.has_default_client_cert_source() - if is_mtls: - client_cert_source_func = mtls.default_client_cert_source() - else: - client_cert_source_func = None - - # Figure out which api endpoint to use. - if client_options.api_endpoint is not None: - api_endpoint = client_options.api_endpoint - else: - use_mtls_env = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") - if use_mtls_env == "never": - api_endpoint = self.DEFAULT_ENDPOINT - elif use_mtls_env == "always": - api_endpoint = self.DEFAULT_MTLS_ENDPOINT - elif use_mtls_env == "auto": - if is_mtls: - api_endpoint = self.DEFAULT_MTLS_ENDPOINT - else: - api_endpoint = self.DEFAULT_ENDPOINT - else: - raise MutualTLSChannelError( - "Unsupported GOOGLE_API_USE_MTLS_ENDPOINT value. Accepted " - "values: never, auto, always" - ) + api_key_value = getattr(client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError( + "client_options.api_key and credentials are mutually exclusive" + ) # Save or instantiate the transport. # Ordinarily, we provide the transport, but allowing a custom transport # instance provides an extensibility point for unusual situations. if isinstance(transport, TranscoderServiceTransport): # transport is a TranscoderServiceTransport instance. - if credentials or client_options.credentials_file: + if credentials or client_options.credentials_file or api_key_value: raise ValueError( "When providing a transport instance, " "provide its credentials directly." @@ -374,6 +406,15 @@ def __init__( ) self._transport = transport else: + import google.auth._default # type: ignore + + if api_key_value and hasattr( + google.auth._default, "get_api_key_credentials" + ): + credentials = google.auth._default.get_api_key_credentials( + api_key_value + ) + Transport = type(self).get_transport_class(transport) self._transport = Transport( credentials=credentials, diff --git a/google/cloud/video/transcoder_v1beta1/services/transcoder_service/async_client.py b/google/cloud/video/transcoder_v1beta1/services/transcoder_service/async_client.py index 6f41c3f..bdffc5a 100644 --- a/google/cloud/video/transcoder_v1beta1/services/transcoder_service/async_client.py +++ b/google/cloud/video/transcoder_v1beta1/services/transcoder_service/async_client.py @@ -16,7 +16,7 @@ from collections import OrderedDict import functools import re -from typing import Dict, Sequence, Tuple, Type, Union +from typing import Dict, Optional, Sequence, Tuple, Type, Union import pkg_resources from google.api_core.client_options import ClientOptions @@ -119,6 +119,42 @@ def from_service_account_file(cls, filename: str, *args, **kwargs): from_service_account_json = from_service_account_file + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variabel is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + return TranscoderServiceClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore + @property def transport(self) -> TranscoderServiceTransport: """Returns the transport used by the client instance. diff --git a/google/cloud/video/transcoder_v1beta1/services/transcoder_service/client.py b/google/cloud/video/transcoder_v1beta1/services/transcoder_service/client.py index 8df1341..e427ee4 100644 --- a/google/cloud/video/transcoder_v1beta1/services/transcoder_service/client.py +++ b/google/cloud/video/transcoder_v1beta1/services/transcoder_service/client.py @@ -261,6 +261,73 @@ def parse_common_location_path(path: str) -> Dict[str, str]: m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) return m.groupdict() if m else {} + @classmethod + def get_mtls_endpoint_and_cert_source( + cls, client_options: Optional[client_options_lib.ClientOptions] = None + ): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variabel is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError( + "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" + ) + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError( + "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" + ) + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or ( + use_mtls_endpoint == "auto" and client_cert_source + ): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + def __init__( self, *, @@ -311,57 +378,22 @@ def __init__( if client_options is None: client_options = client_options_lib.ClientOptions() - # Create SSL credentials for mutual TLS if needed. - if os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") not in ( - "true", - "false", - ): - raise ValueError( - "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" - ) - use_client_cert = ( - os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") == "true" + api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source( + client_options ) - client_cert_source_func = None - is_mtls = False - if use_client_cert: - if client_options.client_cert_source: - is_mtls = True - client_cert_source_func = client_options.client_cert_source - else: - is_mtls = mtls.has_default_client_cert_source() - if is_mtls: - client_cert_source_func = mtls.default_client_cert_source() - else: - client_cert_source_func = None - - # Figure out which api endpoint to use. - if client_options.api_endpoint is not None: - api_endpoint = client_options.api_endpoint - else: - use_mtls_env = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") - if use_mtls_env == "never": - api_endpoint = self.DEFAULT_ENDPOINT - elif use_mtls_env == "always": - api_endpoint = self.DEFAULT_MTLS_ENDPOINT - elif use_mtls_env == "auto": - if is_mtls: - api_endpoint = self.DEFAULT_MTLS_ENDPOINT - else: - api_endpoint = self.DEFAULT_ENDPOINT - else: - raise MutualTLSChannelError( - "Unsupported GOOGLE_API_USE_MTLS_ENDPOINT value. Accepted " - "values: never, auto, always" - ) + api_key_value = getattr(client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError( + "client_options.api_key and credentials are mutually exclusive" + ) # Save or instantiate the transport. # Ordinarily, we provide the transport, but allowing a custom transport # instance provides an extensibility point for unusual situations. if isinstance(transport, TranscoderServiceTransport): # transport is a TranscoderServiceTransport instance. - if credentials or client_options.credentials_file: + if credentials or client_options.credentials_file or api_key_value: raise ValueError( "When providing a transport instance, " "provide its credentials directly." @@ -373,6 +405,15 @@ def __init__( ) self._transport = transport else: + import google.auth._default # type: ignore + + if api_key_value and hasattr( + google.auth._default, "get_api_key_credentials" + ): + credentials = google.auth._default.get_api_key_credentials( + api_key_value + ) + Transport = type(self).get_transport_class(transport) self._transport = Transport( credentials=credentials, diff --git a/setup.py b/setup.py index febe200..d638073 100644 --- a/setup.py +++ b/setup.py @@ -45,11 +45,7 @@ # Until this issue is closed # https://github.com/googleapis/google-cloud-python/issues/10566 "google-api-core[grpc] >= 1.28.0, <3.0.0dev", - # Require proto-plus >= 1.20.1 which includes - # a fix to mitigate collisions in field names, specifically - # the `mapping` term which is a reserved term in proto-plus. - # https://github.com/googleapis/proto-plus-python/pull/295 - "proto-plus >= 1.20.1", + "proto-plus >= 1.20.3", ), python_requires=">=3.6", classifiers=[ diff --git a/testing/constraints-3.6.txt b/testing/constraints-3.6.txt index f12a922..05bdbbf 100644 --- a/testing/constraints-3.6.txt +++ b/testing/constraints-3.6.txt @@ -6,4 +6,4 @@ # e.g., if setup.py has "foo >= 1.14.0, < 2.0.0dev", # Then this file should have foo==1.14.0 google-api-core==1.28.0 -proto-plus==1.20.1 +proto-plus==1.20.3 diff --git a/tests/unit/gapic/transcoder_v1/test_transcoder_service.py b/tests/unit/gapic/transcoder_v1/test_transcoder_service.py index 5b90a11..cd5a9b7 100644 --- a/tests/unit/gapic/transcoder_v1/test_transcoder_service.py +++ b/tests/unit/gapic/transcoder_v1/test_transcoder_service.py @@ -415,6 +415,87 @@ def test_transcoder_service_client_mtls_env_auto( ) +@pytest.mark.parametrize( + "client_class", [TranscoderServiceClient, TranscoderServiceAsyncClient] +) +@mock.patch.object( + TranscoderServiceClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(TranscoderServiceClient), +) +@mock.patch.object( + TranscoderServiceAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(TranscoderServiceAsyncClient), +) +def test_transcoder_service_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_client_cert_source, + ): + ( + api_endpoint, + cert_source, + ) = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + @pytest.mark.parametrize( "client_class,transport_class,transport_name", [ @@ -2507,6 +2588,23 @@ def test_credentials_transport_error(): transport=transport, ) + # It is an error to provide an api_key and a transport instance. + transport = transports.TranscoderServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = TranscoderServiceClient(client_options=options, transport=transport,) + + # It is an error to provide an api_key and a credential. + options = mock.Mock() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = TranscoderServiceClient( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + # It is an error to provide scopes and a transport instance. transport = transports.TranscoderServiceGrpcTransport( credentials=ga_credentials.AnonymousCredentials(), @@ -3097,3 +3195,36 @@ def test_client_ctx(): with client: pass close.assert_called() + + +@pytest.mark.parametrize( + "client_class,transport_class", + [ + (TranscoderServiceClient, transports.TranscoderServiceGrpcTransport), + ( + TranscoderServiceAsyncClient, + transports.TranscoderServiceGrpcAsyncIOTransport, + ), + ], +) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + ) diff --git a/tests/unit/gapic/transcoder_v1beta1/test_transcoder_service.py b/tests/unit/gapic/transcoder_v1beta1/test_transcoder_service.py index da0875d..724ead4 100644 --- a/tests/unit/gapic/transcoder_v1beta1/test_transcoder_service.py +++ b/tests/unit/gapic/transcoder_v1beta1/test_transcoder_service.py @@ -413,6 +413,87 @@ def test_transcoder_service_client_mtls_env_auto( ) +@pytest.mark.parametrize( + "client_class", [TranscoderServiceClient, TranscoderServiceAsyncClient] +) +@mock.patch.object( + TranscoderServiceClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(TranscoderServiceClient), +) +@mock.patch.object( + TranscoderServiceAsyncClient, + "DEFAULT_ENDPOINT", + modify_default_endpoint(TranscoderServiceAsyncClient), +) +def test_transcoder_service_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions( + client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint + ) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( + options + ) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=False, + ): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch( + "google.auth.transport.mtls.has_default_client_cert_source", + return_value=True, + ): + with mock.patch( + "google.auth.transport.mtls.default_client_cert_source", + return_value=mock_client_cert_source, + ): + ( + api_endpoint, + cert_source, + ) = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + @pytest.mark.parametrize( "client_class,transport_class,transport_name", [ @@ -2511,6 +2592,23 @@ def test_credentials_transport_error(): transport=transport, ) + # It is an error to provide an api_key and a transport instance. + transport = transports.TranscoderServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = TranscoderServiceClient(client_options=options, transport=transport,) + + # It is an error to provide an api_key and a credential. + options = mock.Mock() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = TranscoderServiceClient( + client_options=options, credentials=ga_credentials.AnonymousCredentials() + ) + # It is an error to provide scopes and a transport instance. transport = transports.TranscoderServiceGrpcTransport( credentials=ga_credentials.AnonymousCredentials(), @@ -3101,3 +3199,36 @@ def test_client_ctx(): with client: pass close.assert_called() + + +@pytest.mark.parametrize( + "client_class,transport_class", + [ + (TranscoderServiceClient, transports.TranscoderServiceGrpcTransport), + ( + TranscoderServiceAsyncClient, + transports.TranscoderServiceGrpcAsyncIOTransport, + ), + ], +) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + ) From 5729742e40ee6e447ab043a46b4d6b3398ec63ff Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Thu, 24 Feb 2022 17:00:41 +0100 Subject: [PATCH 15/20] chore(deps): update all dependencies (#132) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * chore(deps): update all dependencies * 🦉 Updates from OwlBot See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md Co-authored-by: Bu Sun Kim <8822365+busunkim96@users.noreply.github.com> Co-authored-by: Owl Bot Co-authored-by: Anthonios Partheniou --- samples/snippets/requirements-test.txt | 5 ++--- samples/snippets/requirements.txt | 2 +- 2 files changed, 3 insertions(+), 4 deletions(-) diff --git a/samples/snippets/requirements-test.txt b/samples/snippets/requirements-test.txt index 2996534..4255a60 100644 --- a/samples/snippets/requirements-test.txt +++ b/samples/snippets/requirements-test.txt @@ -1,4 +1,3 @@ backoff==1.11.1 -google-cloud-storage==1.43.0 -pytest==6.2.4 -proto-plus>=1.20.1 +google-cloud-storage==2.1.0 +pytest==7.0.0 diff --git a/samples/snippets/requirements.txt b/samples/snippets/requirements.txt index 7011d0b..3286e50 100644 --- a/samples/snippets/requirements.txt +++ b/samples/snippets/requirements.txt @@ -1,3 +1,3 @@ -google-api-python-client==2.34.0 +google-api-python-client==2.36.0 grpcio==1.43.0 google-cloud-video-transcoder==1.2.1 From 8cdb73a5c189c23e51d86356e542b9b8d044c47b Mon Sep 17 00:00:00 2001 From: WhiteSource Renovate Date: Thu, 24 Feb 2022 17:12:16 +0100 Subject: [PATCH 16/20] chore(deps): update all dependencies (#141) --- samples/snippets/requirements-test.txt | 2 +- samples/snippets/requirements.txt | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/samples/snippets/requirements-test.txt b/samples/snippets/requirements-test.txt index 4255a60..bc29a49 100644 --- a/samples/snippets/requirements-test.txt +++ b/samples/snippets/requirements-test.txt @@ -1,3 +1,3 @@ backoff==1.11.1 google-cloud-storage==2.1.0 -pytest==7.0.0 +pytest==7.0.1 diff --git a/samples/snippets/requirements.txt b/samples/snippets/requirements.txt index 3286e50..f219cba 100644 --- a/samples/snippets/requirements.txt +++ b/samples/snippets/requirements.txt @@ -1,3 +1,3 @@ -google-api-python-client==2.36.0 -grpcio==1.43.0 +google-api-python-client==2.38.0 +grpcio==1.44.0 google-cloud-video-transcoder==1.2.1 From 6774bd328f235894caf7343088c25cc2809d8932 Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Thu, 24 Feb 2022 16:24:52 +0000 Subject: [PATCH 17/20] chore: use gapic-generator-python 0.62.1 (#134) - [ ] Regenerate this pull request now. See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md fix: resolve DuplicateCredentialArgs error when using credentials_file committer: parthea PiperOrigin-RevId: 425964861 Source-Link: https://github.com/googleapis/googleapis/commit/84b1a5a4f6fb2d04905be58e586b8a7a4310a8cf Source-Link: https://github.com/googleapis/googleapis-gen/commit/4fb761bbd8506ac156f49bac5f18306aa8eb3aa8 Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiNGZiNzYxYmJkODUwNmFjMTU2ZjQ5YmFjNWYxODMwNmFhOGViM2FhOCJ9 --- .../transcoder_service/async_client.py | 16 ++-- .../services/transcoder_service/client.py | 16 ++-- .../transcoder_service/transports/grpc.py | 5 +- .../transports/grpc_asyncio.py | 5 +- .../transcoder_service/async_client.py | 16 ++-- .../services/transcoder_service/client.py | 16 ++-- .../transcoder_service/transports/grpc.py | 5 +- .../transports/grpc_asyncio.py | 5 +- .../transcoder_v1/test_transcoder_service.py | 79 ++++++++++++++++++- .../test_transcoder_service.py | 79 ++++++++++++++++++- 10 files changed, 200 insertions(+), 42 deletions(-) diff --git a/google/cloud/video/transcoder_v1/services/transcoder_service/async_client.py b/google/cloud/video/transcoder_v1/services/transcoder_service/async_client.py index 709c4ac..a2141e0 100644 --- a/google/cloud/video/transcoder_v1/services/transcoder_service/async_client.py +++ b/google/cloud/video/transcoder_v1/services/transcoder_service/async_client.py @@ -257,7 +257,7 @@ async def create_job( Transcoding job resource. """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([parent, job]) if request is not None and has_flattened_params: @@ -333,7 +333,7 @@ async def list_jobs( """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([parent]) if request is not None and has_flattened_params: @@ -408,7 +408,7 @@ async def get_job( Transcoding job resource. """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([name]) if request is not None and has_flattened_params: @@ -473,7 +473,7 @@ async def delete_job( sent along with the request as metadata. """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([name]) if request is not None and has_flattened_params: @@ -563,7 +563,7 @@ async def create_job_template( Transcoding job template resource. """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([parent, job_template, job_template_id]) if request is not None and has_flattened_params: @@ -641,7 +641,7 @@ async def list_job_templates( """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([parent]) if request is not None and has_flattened_params: @@ -717,7 +717,7 @@ async def get_job_template( Transcoding job template resource. """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([name]) if request is not None and has_flattened_params: @@ -782,7 +782,7 @@ async def delete_job_template( sent along with the request as metadata. """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([name]) if request is not None and has_flattened_params: diff --git a/google/cloud/video/transcoder_v1/services/transcoder_service/client.py b/google/cloud/video/transcoder_v1/services/transcoder_service/client.py index acb3543..407848e 100644 --- a/google/cloud/video/transcoder_v1/services/transcoder_service/client.py +++ b/google/cloud/video/transcoder_v1/services/transcoder_service/client.py @@ -468,7 +468,7 @@ def create_job( Transcoding job resource. """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([parent, job]) if request is not None and has_flattened_params: @@ -544,7 +544,7 @@ def list_jobs( """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([parent]) if request is not None and has_flattened_params: @@ -619,7 +619,7 @@ def get_job( Transcoding job resource. """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([name]) if request is not None and has_flattened_params: @@ -684,7 +684,7 @@ def delete_job( sent along with the request as metadata. """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([name]) if request is not None and has_flattened_params: @@ -774,7 +774,7 @@ def create_job_template( Transcoding job template resource. """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([parent, job_template, job_template_id]) if request is not None and has_flattened_params: @@ -852,7 +852,7 @@ def list_job_templates( """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([parent]) if request is not None and has_flattened_params: @@ -928,7 +928,7 @@ def get_job_template( Transcoding job template resource. """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([name]) if request is not None and has_flattened_params: @@ -993,7 +993,7 @@ def delete_job_template( sent along with the request as metadata. """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([name]) if request is not None and has_flattened_params: diff --git a/google/cloud/video/transcoder_v1/services/transcoder_service/transports/grpc.py b/google/cloud/video/transcoder_v1/services/transcoder_service/transports/grpc.py index 56d7343..d13582f 100644 --- a/google/cloud/video/transcoder_v1/services/transcoder_service/transports/grpc.py +++ b/google/cloud/video/transcoder_v1/services/transcoder_service/transports/grpc.py @@ -166,8 +166,11 @@ def __init__( if not self._grpc_channel: self._grpc_channel = type(self).create_channel( self._host, + # use the credentials which are saved credentials=self._credentials, - credentials_file=credentials_file, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, scopes=self._scopes, ssl_credentials=self._ssl_channel_credentials, quota_project_id=quota_project_id, diff --git a/google/cloud/video/transcoder_v1/services/transcoder_service/transports/grpc_asyncio.py b/google/cloud/video/transcoder_v1/services/transcoder_service/transports/grpc_asyncio.py index c0c87d1..d194152 100644 --- a/google/cloud/video/transcoder_v1/services/transcoder_service/transports/grpc_asyncio.py +++ b/google/cloud/video/transcoder_v1/services/transcoder_service/transports/grpc_asyncio.py @@ -211,8 +211,11 @@ def __init__( if not self._grpc_channel: self._grpc_channel = type(self).create_channel( self._host, + # use the credentials which are saved credentials=self._credentials, - credentials_file=credentials_file, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, scopes=self._scopes, ssl_credentials=self._ssl_channel_credentials, quota_project_id=quota_project_id, diff --git a/google/cloud/video/transcoder_v1beta1/services/transcoder_service/async_client.py b/google/cloud/video/transcoder_v1beta1/services/transcoder_service/async_client.py index bdffc5a..5919376 100644 --- a/google/cloud/video/transcoder_v1beta1/services/transcoder_service/async_client.py +++ b/google/cloud/video/transcoder_v1beta1/services/transcoder_service/async_client.py @@ -256,7 +256,7 @@ async def create_job( Transcoding job resource. """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([parent, job]) if request is not None and has_flattened_params: @@ -332,7 +332,7 @@ async def list_jobs( """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([parent]) if request is not None and has_flattened_params: @@ -407,7 +407,7 @@ async def get_job( Transcoding job resource. """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([name]) if request is not None and has_flattened_params: @@ -472,7 +472,7 @@ async def delete_job( sent along with the request as metadata. """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([name]) if request is not None and has_flattened_params: @@ -562,7 +562,7 @@ async def create_job_template( Transcoding job template resource. """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([parent, job_template, job_template_id]) if request is not None and has_flattened_params: @@ -640,7 +640,7 @@ async def list_job_templates( """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([parent]) if request is not None and has_flattened_params: @@ -716,7 +716,7 @@ async def get_job_template( Transcoding job template resource. """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([name]) if request is not None and has_flattened_params: @@ -781,7 +781,7 @@ async def delete_job_template( sent along with the request as metadata. """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([name]) if request is not None and has_flattened_params: diff --git a/google/cloud/video/transcoder_v1beta1/services/transcoder_service/client.py b/google/cloud/video/transcoder_v1beta1/services/transcoder_service/client.py index e427ee4..1c54c6a 100644 --- a/google/cloud/video/transcoder_v1beta1/services/transcoder_service/client.py +++ b/google/cloud/video/transcoder_v1beta1/services/transcoder_service/client.py @@ -467,7 +467,7 @@ def create_job( Transcoding job resource. """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([parent, job]) if request is not None and has_flattened_params: @@ -543,7 +543,7 @@ def list_jobs( """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([parent]) if request is not None and has_flattened_params: @@ -618,7 +618,7 @@ def get_job( Transcoding job resource. """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([name]) if request is not None and has_flattened_params: @@ -683,7 +683,7 @@ def delete_job( sent along with the request as metadata. """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([name]) if request is not None and has_flattened_params: @@ -773,7 +773,7 @@ def create_job_template( Transcoding job template resource. """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([parent, job_template, job_template_id]) if request is not None and has_flattened_params: @@ -851,7 +851,7 @@ def list_job_templates( """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([parent]) if request is not None and has_flattened_params: @@ -927,7 +927,7 @@ def get_job_template( Transcoding job template resource. """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([name]) if request is not None and has_flattened_params: @@ -992,7 +992,7 @@ def delete_job_template( sent along with the request as metadata. """ # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have + # Quick check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([name]) if request is not None and has_flattened_params: diff --git a/google/cloud/video/transcoder_v1beta1/services/transcoder_service/transports/grpc.py b/google/cloud/video/transcoder_v1beta1/services/transcoder_service/transports/grpc.py index 72b5df3..8c9ade7 100644 --- a/google/cloud/video/transcoder_v1beta1/services/transcoder_service/transports/grpc.py +++ b/google/cloud/video/transcoder_v1beta1/services/transcoder_service/transports/grpc.py @@ -166,8 +166,11 @@ def __init__( if not self._grpc_channel: self._grpc_channel = type(self).create_channel( self._host, + # use the credentials which are saved credentials=self._credentials, - credentials_file=credentials_file, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, scopes=self._scopes, ssl_credentials=self._ssl_channel_credentials, quota_project_id=quota_project_id, diff --git a/google/cloud/video/transcoder_v1beta1/services/transcoder_service/transports/grpc_asyncio.py b/google/cloud/video/transcoder_v1beta1/services/transcoder_service/transports/grpc_asyncio.py index 0e88ee2..b4ff788 100644 --- a/google/cloud/video/transcoder_v1beta1/services/transcoder_service/transports/grpc_asyncio.py +++ b/google/cloud/video/transcoder_v1beta1/services/transcoder_service/transports/grpc_asyncio.py @@ -211,8 +211,11 @@ def __init__( if not self._grpc_channel: self._grpc_channel = type(self).create_channel( self._host, + # use the credentials which are saved credentials=self._credentials, - credentials_file=credentials_file, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, scopes=self._scopes, ssl_credentials=self._ssl_channel_credentials, quota_project_id=quota_project_id, diff --git a/tests/unit/gapic/transcoder_v1/test_transcoder_service.py b/tests/unit/gapic/transcoder_v1/test_transcoder_service.py index cd5a9b7..8248f64 100644 --- a/tests/unit/gapic/transcoder_v1/test_transcoder_service.py +++ b/tests/unit/gapic/transcoder_v1/test_transcoder_service.py @@ -528,21 +528,28 @@ def test_transcoder_service_client_client_options_scopes( @pytest.mark.parametrize( - "client_class,transport_class,transport_name", + "client_class,transport_class,transport_name,grpc_helpers", [ - (TranscoderServiceClient, transports.TranscoderServiceGrpcTransport, "grpc"), + ( + TranscoderServiceClient, + transports.TranscoderServiceGrpcTransport, + "grpc", + grpc_helpers, + ), ( TranscoderServiceAsyncClient, transports.TranscoderServiceGrpcAsyncIOTransport, "grpc_asyncio", + grpc_helpers_async, ), ], ) def test_transcoder_service_client_client_options_credentials_file( - client_class, transport_class, transport_name + client_class, transport_class, transport_name, grpc_helpers ): # Check the case credentials file is provided. options = client_options.ClientOptions(credentials_file="credentials.json") + with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None client = client_class(client_options=options, transport=transport_name) @@ -578,6 +585,72 @@ def test_transcoder_service_client_client_options_from_dict(): ) +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + TranscoderServiceClient, + transports.TranscoderServiceGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + TranscoderServiceAsyncClient, + transports.TranscoderServiceGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + ], +) +def test_transcoder_service_client_create_channel_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + ) + + # test that the credentials from file are saved and used as the credentials. + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel" + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + file_creds = ga_credentials.AnonymousCredentials() + load_creds.return_value = (file_creds, None) + adc.return_value = (creds, None) + client = client_class(client_options=options, transport=transport_name) + create_channel.assert_called_with( + "transcoder.googleapis.com:443", + credentials=file_creds, + credentials_file=None, + quota_project_id=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + scopes=None, + default_host="transcoder.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + @pytest.mark.parametrize("request_type", [services.CreateJobRequest, dict,]) def test_create_job(request_type, transport: str = "grpc"): client = TranscoderServiceClient( diff --git a/tests/unit/gapic/transcoder_v1beta1/test_transcoder_service.py b/tests/unit/gapic/transcoder_v1beta1/test_transcoder_service.py index 724ead4..cd2c1b7 100644 --- a/tests/unit/gapic/transcoder_v1beta1/test_transcoder_service.py +++ b/tests/unit/gapic/transcoder_v1beta1/test_transcoder_service.py @@ -526,21 +526,28 @@ def test_transcoder_service_client_client_options_scopes( @pytest.mark.parametrize( - "client_class,transport_class,transport_name", + "client_class,transport_class,transport_name,grpc_helpers", [ - (TranscoderServiceClient, transports.TranscoderServiceGrpcTransport, "grpc"), + ( + TranscoderServiceClient, + transports.TranscoderServiceGrpcTransport, + "grpc", + grpc_helpers, + ), ( TranscoderServiceAsyncClient, transports.TranscoderServiceGrpcAsyncIOTransport, "grpc_asyncio", + grpc_helpers_async, ), ], ) def test_transcoder_service_client_client_options_credentials_file( - client_class, transport_class, transport_name + client_class, transport_class, transport_name, grpc_helpers ): # Check the case credentials file is provided. options = client_options.ClientOptions(credentials_file="credentials.json") + with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None client = client_class(client_options=options, transport=transport_name) @@ -576,6 +583,72 @@ def test_transcoder_service_client_client_options_from_dict(): ) +@pytest.mark.parametrize( + "client_class,transport_class,transport_name,grpc_helpers", + [ + ( + TranscoderServiceClient, + transports.TranscoderServiceGrpcTransport, + "grpc", + grpc_helpers, + ), + ( + TranscoderServiceAsyncClient, + transports.TranscoderServiceGrpcAsyncIOTransport, + "grpc_asyncio", + grpc_helpers_async, + ), + ], +) +def test_transcoder_service_client_create_channel_credentials_file( + client_class, transport_class, transport_name, grpc_helpers +): + # Check the case credentials file is provided. + options = client_options.ClientOptions(credentials_file="credentials.json") + + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + ) + + # test that the credentials from file are saved and used as the credentials. + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel" + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + file_creds = ga_credentials.AnonymousCredentials() + load_creds.return_value = (file_creds, None) + adc.return_value = (creds, None) + client = client_class(client_options=options, transport=transport_name) + create_channel.assert_called_with( + "transcoder.googleapis.com:443", + credentials=file_creds, + credentials_file=None, + quota_project_id=None, + default_scopes=("https://www.googleapis.com/auth/cloud-platform",), + scopes=None, + default_host="transcoder.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + @pytest.mark.parametrize("request_type", [services.CreateJobRequest, dict,]) def test_create_job(request_type, transport: str = "grpc"): client = TranscoderServiceClient( From e8a85da130a0b688167a1474c339c66af1c6760c Mon Sep 17 00:00:00 2001 From: "gcf-owl-bot[bot]" <78513119+gcf-owl-bot[bot]@users.noreply.github.com> Date: Thu, 24 Feb 2022 17:38:56 +0000 Subject: [PATCH 18/20] fix: Remove deprecated v1beta1 API that is no longer available (#138) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit - [ ] Regenerate this pull request now. Merge branch 'owl-bot-copy' of https://github.com/googleapis/python-video-transcoder into owl-bot-copy 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md 🦉 Updates from OwlBot See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md chore: use gapic-generator-python 0.63.2 docs: add generated snippets PiperOrigin-RevId: 427792504 Source-Link: https://github.com/googleapis/googleapis/commit/55b9e1e0b3106c850d13958352bc0751147b6b15 Source-Link: https://github.com/googleapis/googleapis-gen/commit/bf4e86b753f42cb0edb1fd51fbe840d7da0a1cde Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiYmY0ZTg2Yjc1M2Y0MmNiMGVkYjFmZDUxZmJlODQwZDdkYTBhMWNkZSJ9 --- docs/index.rst | 11 - docs/transcoder_v1beta1/services.rst | 6 - .../transcoder_v1beta1/transcoder_service.rst | 10 - docs/transcoder_v1beta1/types.rst | 7 - .../transcoder_service/async_client.py | 153 + .../services/transcoder_service/client.py | 161 + .../video/transcoder_v1beta1/__init__.py | 86 - .../transcoder_v1beta1/gapic_metadata.json | 103 - .../cloud/video/transcoder_v1beta1/py.typed | 2 - .../transcoder_v1beta1/services/__init__.py | 15 - .../services/transcoder_service/__init__.py | 22 - .../transcoder_service/async_client.py | 836 ----- .../services/transcoder_service/client.py | 1054 ------ .../services/transcoder_service/pagers.py | 284 -- .../transcoder_service/transports/__init__.py | 33 - .../transcoder_service/transports/base.py | 233 -- .../transcoder_service/transports/grpc.py | 447 --- .../transports/grpc_asyncio.py | 455 --- .../transcoder_v1beta1/types/__init__.py | 84 - .../transcoder_v1beta1/types/resources.py | 1366 ------- .../transcoder_v1beta1/types/services.py | 206 - .../snippet_metadata_transcoder_v1.json | 708 ++++ ..._v1_transcoder_service_create_job_async.py | 49 + ...r_v1_transcoder_service_create_job_sync.py | 49 + ...coder_service_create_job_template_async.py | 46 + ...scoder_service_create_job_template_sync.py | 46 + ..._v1_transcoder_service_delete_job_async.py | 43 + ...r_v1_transcoder_service_delete_job_sync.py | 43 + ...coder_service_delete_job_template_async.py | 43 + ...scoder_service_delete_job_template_sync.py | 43 + ...der_v1_transcoder_service_get_job_async.py | 45 + ...oder_v1_transcoder_service_get_job_sync.py | 45 + ...anscoder_service_get_job_template_async.py | 45 + ...ranscoder_service_get_job_template_sync.py | 45 + ...scoder_service_list_job_templates_async.py | 46 + ...nscoder_service_list_job_templates_sync.py | 46 + ...r_v1_transcoder_service_list_jobs_async.py | 46 + ...er_v1_transcoder_service_list_jobs_sync.py | 46 + ...te_job_with_periodic_images_spritesheet.py | 2 +- ..._job_with_set_number_images_spritesheet.py | 2 +- scripts/fixup_transcoder_v1beta1_keywords.py | 183 - .../unit/gapic/transcoder_v1beta1/__init__.py | 15 - .../test_transcoder_service.py | 3307 ----------------- 43 files changed, 1750 insertions(+), 8767 deletions(-) delete mode 100644 docs/transcoder_v1beta1/services.rst delete mode 100644 docs/transcoder_v1beta1/transcoder_service.rst delete mode 100644 docs/transcoder_v1beta1/types.rst delete mode 100644 google/cloud/video/transcoder_v1beta1/__init__.py delete mode 100644 google/cloud/video/transcoder_v1beta1/gapic_metadata.json delete mode 100644 google/cloud/video/transcoder_v1beta1/py.typed delete mode 100644 google/cloud/video/transcoder_v1beta1/services/__init__.py delete mode 100644 google/cloud/video/transcoder_v1beta1/services/transcoder_service/__init__.py delete mode 100644 google/cloud/video/transcoder_v1beta1/services/transcoder_service/async_client.py delete mode 100644 google/cloud/video/transcoder_v1beta1/services/transcoder_service/client.py delete mode 100644 google/cloud/video/transcoder_v1beta1/services/transcoder_service/pagers.py delete mode 100644 google/cloud/video/transcoder_v1beta1/services/transcoder_service/transports/__init__.py delete mode 100644 google/cloud/video/transcoder_v1beta1/services/transcoder_service/transports/base.py delete mode 100644 google/cloud/video/transcoder_v1beta1/services/transcoder_service/transports/grpc.py delete mode 100644 google/cloud/video/transcoder_v1beta1/services/transcoder_service/transports/grpc_asyncio.py delete mode 100644 google/cloud/video/transcoder_v1beta1/types/__init__.py delete mode 100644 google/cloud/video/transcoder_v1beta1/types/resources.py delete mode 100644 google/cloud/video/transcoder_v1beta1/types/services.py create mode 100644 samples/generated_samples/snippet_metadata_transcoder_v1.json create mode 100644 samples/generated_samples/transcoder_generated_transcoder_v1_transcoder_service_create_job_async.py create mode 100644 samples/generated_samples/transcoder_generated_transcoder_v1_transcoder_service_create_job_sync.py create mode 100644 samples/generated_samples/transcoder_generated_transcoder_v1_transcoder_service_create_job_template_async.py create mode 100644 samples/generated_samples/transcoder_generated_transcoder_v1_transcoder_service_create_job_template_sync.py create mode 100644 samples/generated_samples/transcoder_generated_transcoder_v1_transcoder_service_delete_job_async.py create mode 100644 samples/generated_samples/transcoder_generated_transcoder_v1_transcoder_service_delete_job_sync.py create mode 100644 samples/generated_samples/transcoder_generated_transcoder_v1_transcoder_service_delete_job_template_async.py create mode 100644 samples/generated_samples/transcoder_generated_transcoder_v1_transcoder_service_delete_job_template_sync.py create mode 100644 samples/generated_samples/transcoder_generated_transcoder_v1_transcoder_service_get_job_async.py create mode 100644 samples/generated_samples/transcoder_generated_transcoder_v1_transcoder_service_get_job_sync.py create mode 100644 samples/generated_samples/transcoder_generated_transcoder_v1_transcoder_service_get_job_template_async.py create mode 100644 samples/generated_samples/transcoder_generated_transcoder_v1_transcoder_service_get_job_template_sync.py create mode 100644 samples/generated_samples/transcoder_generated_transcoder_v1_transcoder_service_list_job_templates_async.py create mode 100644 samples/generated_samples/transcoder_generated_transcoder_v1_transcoder_service_list_job_templates_sync.py create mode 100644 samples/generated_samples/transcoder_generated_transcoder_v1_transcoder_service_list_jobs_async.py create mode 100644 samples/generated_samples/transcoder_generated_transcoder_v1_transcoder_service_list_jobs_sync.py delete mode 100644 scripts/fixup_transcoder_v1beta1_keywords.py delete mode 100644 tests/unit/gapic/transcoder_v1beta1/__init__.py delete mode 100644 tests/unit/gapic/transcoder_v1beta1/test_transcoder_service.py diff --git a/docs/index.rst b/docs/index.rst index b69c1f1..5698dc4 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -2,9 +2,6 @@ .. include:: multiprocessing.rst -This package includes clients for multiple versions of Transcoder. -By default, you will get version ``transcoder_v1``. - API Reference ------------- @@ -14,14 +11,6 @@ API Reference transcoder_v1/services transcoder_v1/types -API Reference -------------- -.. toctree:: - :maxdepth: 2 - - transcoder_v1beta1/services - transcoder_v1beta1/types - Changelog --------- diff --git a/docs/transcoder_v1beta1/services.rst b/docs/transcoder_v1beta1/services.rst deleted file mode 100644 index a3b6569..0000000 --- a/docs/transcoder_v1beta1/services.rst +++ /dev/null @@ -1,6 +0,0 @@ -Services for Google Cloud Video Transcoder v1beta1 API -====================================================== -.. toctree:: - :maxdepth: 2 - - transcoder_service diff --git a/docs/transcoder_v1beta1/transcoder_service.rst b/docs/transcoder_v1beta1/transcoder_service.rst deleted file mode 100644 index c631a53..0000000 --- a/docs/transcoder_v1beta1/transcoder_service.rst +++ /dev/null @@ -1,10 +0,0 @@ -TranscoderService ------------------------------------ - -.. automodule:: google.cloud.video.transcoder_v1beta1.services.transcoder_service - :members: - :inherited-members: - -.. automodule:: google.cloud.video.transcoder_v1beta1.services.transcoder_service.pagers - :members: - :inherited-members: diff --git a/docs/transcoder_v1beta1/types.rst b/docs/transcoder_v1beta1/types.rst deleted file mode 100644 index cb38b8a..0000000 --- a/docs/transcoder_v1beta1/types.rst +++ /dev/null @@ -1,7 +0,0 @@ -Types for Google Cloud Video Transcoder v1beta1 API -=================================================== - -.. automodule:: google.cloud.video.transcoder_v1beta1.types - :members: - :undoc-members: - :show-inheritance: diff --git a/google/cloud/video/transcoder_v1/services/transcoder_service/async_client.py b/google/cloud/video/transcoder_v1/services/transcoder_service/async_client.py index a2141e0..1522d5f 100644 --- a/google/cloud/video/transcoder_v1/services/transcoder_service/async_client.py +++ b/google/cloud/video/transcoder_v1/services/transcoder_service/async_client.py @@ -228,6 +228,29 @@ async def create_job( ) -> resources.Job: r"""Creates a job in the specified region. + .. code-block:: + + from google.cloud.video import transcoder_v1 + + def sample_create_job(): + # Create a client + client = transcoder_v1.TranscoderServiceClient() + + # Initialize request argument(s) + job = transcoder_v1.Job() + job.template_id = "template_id_value" + + request = transcoder_v1.CreateJobRequest( + parent="parent_value", + job=job, + ) + + # Make the request + response = client.create_job(request=request) + + # Handle the response + print(response) + Args: request (Union[google.cloud.video.transcoder_v1.types.CreateJobRequest, dict]): The request object. Request message for @@ -306,6 +329,26 @@ async def list_jobs( ) -> pagers.ListJobsAsyncPager: r"""Lists jobs in the specified region. + .. code-block:: + + from google.cloud.video import transcoder_v1 + + def sample_list_jobs(): + # Create a client + client = transcoder_v1.TranscoderServiceClient() + + # Initialize request argument(s) + request = transcoder_v1.ListJobsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_jobs(request=request) + + # Handle the response + for response in page_result: + print(response) + Args: request (Union[google.cloud.video.transcoder_v1.types.ListJobsRequest, dict]): The request object. Request message for @@ -386,6 +429,25 @@ async def get_job( ) -> resources.Job: r"""Returns the job data. + .. code-block:: + + from google.cloud.video import transcoder_v1 + + def sample_get_job(): + # Create a client + client = transcoder_v1.TranscoderServiceClient() + + # Initialize request argument(s) + request = transcoder_v1.GetJobRequest( + name="name_value", + ) + + # Make the request + response = client.get_job(request=request) + + # Handle the response + print(response) + Args: request (Union[google.cloud.video.transcoder_v1.types.GetJobRequest, dict]): The request object. Request message for @@ -455,6 +517,22 @@ async def delete_job( ) -> None: r"""Deletes a job. + .. code-block:: + + from google.cloud.video import transcoder_v1 + + def sample_delete_job(): + # Create a client + client = transcoder_v1.TranscoderServiceClient() + + # Initialize request argument(s) + request = transcoder_v1.DeleteJobRequest( + name="name_value", + ) + + # Make the request + client.delete_job(request=request) + Args: request (Union[google.cloud.video.transcoder_v1.types.DeleteJobRequest, dict]): The request object. Request message for @@ -521,6 +599,26 @@ async def create_job_template( ) -> resources.JobTemplate: r"""Creates a job template in the specified region. + .. code-block:: + + from google.cloud.video import transcoder_v1 + + def sample_create_job_template(): + # Create a client + client = transcoder_v1.TranscoderServiceClient() + + # Initialize request argument(s) + request = transcoder_v1.CreateJobTemplateRequest( + parent="parent_value", + job_template_id="job_template_id_value", + ) + + # Make the request + response = client.create_job_template(request=request) + + # Handle the response + print(response) + Args: request (Union[google.cloud.video.transcoder_v1.types.CreateJobTemplateRequest, dict]): The request object. Request message for @@ -614,6 +712,26 @@ async def list_job_templates( ) -> pagers.ListJobTemplatesAsyncPager: r"""Lists job templates in the specified region. + .. code-block:: + + from google.cloud.video import transcoder_v1 + + def sample_list_job_templates(): + # Create a client + client = transcoder_v1.TranscoderServiceClient() + + # Initialize request argument(s) + request = transcoder_v1.ListJobTemplatesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_job_templates(request=request) + + # Handle the response + for response in page_result: + print(response) + Args: request (Union[google.cloud.video.transcoder_v1.types.ListJobTemplatesRequest, dict]): The request object. Request message for @@ -694,6 +812,25 @@ async def get_job_template( ) -> resources.JobTemplate: r"""Returns the job template data. + .. code-block:: + + from google.cloud.video import transcoder_v1 + + def sample_get_job_template(): + # Create a client + client = transcoder_v1.TranscoderServiceClient() + + # Initialize request argument(s) + request = transcoder_v1.GetJobTemplateRequest( + name="name_value", + ) + + # Make the request + response = client.get_job_template(request=request) + + # Handle the response + print(response) + Args: request (Union[google.cloud.video.transcoder_v1.types.GetJobTemplateRequest, dict]): The request object. Request message for @@ -764,6 +901,22 @@ async def delete_job_template( ) -> None: r"""Deletes a job template. + .. code-block:: + + from google.cloud.video import transcoder_v1 + + def sample_delete_job_template(): + # Create a client + client = transcoder_v1.TranscoderServiceClient() + + # Initialize request argument(s) + request = transcoder_v1.DeleteJobTemplateRequest( + name="name_value", + ) + + # Make the request + client.delete_job_template(request=request) + Args: request (Union[google.cloud.video.transcoder_v1.types.DeleteJobTemplateRequest, dict]): The request object. Request message for diff --git a/google/cloud/video/transcoder_v1/services/transcoder_service/client.py b/google/cloud/video/transcoder_v1/services/transcoder_service/client.py index 407848e..4380c6a 100644 --- a/google/cloud/video/transcoder_v1/services/transcoder_service/client.py +++ b/google/cloud/video/transcoder_v1/services/transcoder_service/client.py @@ -439,6 +439,30 @@ def create_job( ) -> resources.Job: r"""Creates a job in the specified region. + + .. code-block:: + + from google.cloud.video import transcoder_v1 + + def sample_create_job(): + # Create a client + client = transcoder_v1.TranscoderServiceClient() + + # Initialize request argument(s) + job = transcoder_v1.Job() + job.template_id = "template_id_value" + + request = transcoder_v1.CreateJobRequest( + parent="parent_value", + job=job, + ) + + # Make the request + response = client.create_job(request=request) + + # Handle the response + print(response) + Args: request (Union[google.cloud.video.transcoder_v1.types.CreateJobRequest, dict]): The request object. Request message for @@ -517,6 +541,27 @@ def list_jobs( ) -> pagers.ListJobsPager: r"""Lists jobs in the specified region. + + .. code-block:: + + from google.cloud.video import transcoder_v1 + + def sample_list_jobs(): + # Create a client + client = transcoder_v1.TranscoderServiceClient() + + # Initialize request argument(s) + request = transcoder_v1.ListJobsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_jobs(request=request) + + # Handle the response + for response in page_result: + print(response) + Args: request (Union[google.cloud.video.transcoder_v1.types.ListJobsRequest, dict]): The request object. Request message for @@ -597,6 +642,26 @@ def get_job( ) -> resources.Job: r"""Returns the job data. + + .. code-block:: + + from google.cloud.video import transcoder_v1 + + def sample_get_job(): + # Create a client + client = transcoder_v1.TranscoderServiceClient() + + # Initialize request argument(s) + request = transcoder_v1.GetJobRequest( + name="name_value", + ) + + # Make the request + response = client.get_job(request=request) + + # Handle the response + print(response) + Args: request (Union[google.cloud.video.transcoder_v1.types.GetJobRequest, dict]): The request object. Request message for @@ -666,6 +731,23 @@ def delete_job( ) -> None: r"""Deletes a job. + + .. code-block:: + + from google.cloud.video import transcoder_v1 + + def sample_delete_job(): + # Create a client + client = transcoder_v1.TranscoderServiceClient() + + # Initialize request argument(s) + request = transcoder_v1.DeleteJobRequest( + name="name_value", + ) + + # Make the request + client.delete_job(request=request) + Args: request (Union[google.cloud.video.transcoder_v1.types.DeleteJobRequest, dict]): The request object. Request message for @@ -732,6 +814,27 @@ def create_job_template( ) -> resources.JobTemplate: r"""Creates a job template in the specified region. + + .. code-block:: + + from google.cloud.video import transcoder_v1 + + def sample_create_job_template(): + # Create a client + client = transcoder_v1.TranscoderServiceClient() + + # Initialize request argument(s) + request = transcoder_v1.CreateJobTemplateRequest( + parent="parent_value", + job_template_id="job_template_id_value", + ) + + # Make the request + response = client.create_job_template(request=request) + + # Handle the response + print(response) + Args: request (Union[google.cloud.video.transcoder_v1.types.CreateJobTemplateRequest, dict]): The request object. Request message for @@ -825,6 +928,27 @@ def list_job_templates( ) -> pagers.ListJobTemplatesPager: r"""Lists job templates in the specified region. + + .. code-block:: + + from google.cloud.video import transcoder_v1 + + def sample_list_job_templates(): + # Create a client + client = transcoder_v1.TranscoderServiceClient() + + # Initialize request argument(s) + request = transcoder_v1.ListJobTemplatesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_job_templates(request=request) + + # Handle the response + for response in page_result: + print(response) + Args: request (Union[google.cloud.video.transcoder_v1.types.ListJobTemplatesRequest, dict]): The request object. Request message for @@ -905,6 +1029,26 @@ def get_job_template( ) -> resources.JobTemplate: r"""Returns the job template data. + + .. code-block:: + + from google.cloud.video import transcoder_v1 + + def sample_get_job_template(): + # Create a client + client = transcoder_v1.TranscoderServiceClient() + + # Initialize request argument(s) + request = transcoder_v1.GetJobTemplateRequest( + name="name_value", + ) + + # Make the request + response = client.get_job_template(request=request) + + # Handle the response + print(response) + Args: request (Union[google.cloud.video.transcoder_v1.types.GetJobTemplateRequest, dict]): The request object. Request message for @@ -975,6 +1119,23 @@ def delete_job_template( ) -> None: r"""Deletes a job template. + + .. code-block:: + + from google.cloud.video import transcoder_v1 + + def sample_delete_job_template(): + # Create a client + client = transcoder_v1.TranscoderServiceClient() + + # Initialize request argument(s) + request = transcoder_v1.DeleteJobTemplateRequest( + name="name_value", + ) + + # Make the request + client.delete_job_template(request=request) + Args: request (Union[google.cloud.video.transcoder_v1.types.DeleteJobTemplateRequest, dict]): The request object. Request message for diff --git a/google/cloud/video/transcoder_v1beta1/__init__.py b/google/cloud/video/transcoder_v1beta1/__init__.py deleted file mode 100644 index 6ccbe1f..0000000 --- a/google/cloud/video/transcoder_v1beta1/__init__.py +++ /dev/null @@ -1,86 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -from .services.transcoder_service import TranscoderServiceClient -from .services.transcoder_service import TranscoderServiceAsyncClient - -from .types.resources import AdBreak -from .types.resources import AudioStream -from .types.resources import EditAtom -from .types.resources import ElementaryStream -from .types.resources import Encryption -from .types.resources import FailureDetail -from .types.resources import Input -from .types.resources import Job -from .types.resources import JobConfig -from .types.resources import JobTemplate -from .types.resources import Manifest -from .types.resources import MuxStream -from .types.resources import Output -from .types.resources import Overlay -from .types.resources import PreprocessingConfig -from .types.resources import Progress -from .types.resources import PubsubDestination -from .types.resources import SegmentSettings -from .types.resources import SpriteSheet -from .types.resources import TextStream -from .types.resources import VideoStream -from .types.services import CreateJobRequest -from .types.services import CreateJobTemplateRequest -from .types.services import DeleteJobRequest -from .types.services import DeleteJobTemplateRequest -from .types.services import GetJobRequest -from .types.services import GetJobTemplateRequest -from .types.services import ListJobsRequest -from .types.services import ListJobsResponse -from .types.services import ListJobTemplatesRequest -from .types.services import ListJobTemplatesResponse - -__all__ = ( - "TranscoderServiceAsyncClient", - "AdBreak", - "AudioStream", - "CreateJobRequest", - "CreateJobTemplateRequest", - "DeleteJobRequest", - "DeleteJobTemplateRequest", - "EditAtom", - "ElementaryStream", - "Encryption", - "FailureDetail", - "GetJobRequest", - "GetJobTemplateRequest", - "Input", - "Job", - "JobConfig", - "JobTemplate", - "ListJobTemplatesRequest", - "ListJobTemplatesResponse", - "ListJobsRequest", - "ListJobsResponse", - "Manifest", - "MuxStream", - "Output", - "Overlay", - "PreprocessingConfig", - "Progress", - "PubsubDestination", - "SegmentSettings", - "SpriteSheet", - "TextStream", - "TranscoderServiceClient", - "VideoStream", -) diff --git a/google/cloud/video/transcoder_v1beta1/gapic_metadata.json b/google/cloud/video/transcoder_v1beta1/gapic_metadata.json deleted file mode 100644 index ebf08b7..0000000 --- a/google/cloud/video/transcoder_v1beta1/gapic_metadata.json +++ /dev/null @@ -1,103 +0,0 @@ - { - "comment": "This file maps proto services/RPCs to the corresponding library clients/methods", - "language": "python", - "libraryPackage": "google.cloud.video.transcoder_v1beta1", - "protoPackage": "google.cloud.video.transcoder.v1beta1", - "schema": "1.0", - "services": { - "TranscoderService": { - "clients": { - "grpc": { - "libraryClient": "TranscoderServiceClient", - "rpcs": { - "CreateJob": { - "methods": [ - "create_job" - ] - }, - "CreateJobTemplate": { - "methods": [ - "create_job_template" - ] - }, - "DeleteJob": { - "methods": [ - "delete_job" - ] - }, - "DeleteJobTemplate": { - "methods": [ - "delete_job_template" - ] - }, - "GetJob": { - "methods": [ - "get_job" - ] - }, - "GetJobTemplate": { - "methods": [ - "get_job_template" - ] - }, - "ListJobTemplates": { - "methods": [ - "list_job_templates" - ] - }, - "ListJobs": { - "methods": [ - "list_jobs" - ] - } - } - }, - "grpc-async": { - "libraryClient": "TranscoderServiceAsyncClient", - "rpcs": { - "CreateJob": { - "methods": [ - "create_job" - ] - }, - "CreateJobTemplate": { - "methods": [ - "create_job_template" - ] - }, - "DeleteJob": { - "methods": [ - "delete_job" - ] - }, - "DeleteJobTemplate": { - "methods": [ - "delete_job_template" - ] - }, - "GetJob": { - "methods": [ - "get_job" - ] - }, - "GetJobTemplate": { - "methods": [ - "get_job_template" - ] - }, - "ListJobTemplates": { - "methods": [ - "list_job_templates" - ] - }, - "ListJobs": { - "methods": [ - "list_jobs" - ] - } - } - } - } - } - } -} diff --git a/google/cloud/video/transcoder_v1beta1/py.typed b/google/cloud/video/transcoder_v1beta1/py.typed deleted file mode 100644 index a2716a6..0000000 --- a/google/cloud/video/transcoder_v1beta1/py.typed +++ /dev/null @@ -1,2 +0,0 @@ -# Marker file for PEP 561. -# The google-cloud-video-transcoder package uses inline types. diff --git a/google/cloud/video/transcoder_v1beta1/services/__init__.py b/google/cloud/video/transcoder_v1beta1/services/__init__.py deleted file mode 100644 index 4de6597..0000000 --- a/google/cloud/video/transcoder_v1beta1/services/__init__.py +++ /dev/null @@ -1,15 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/google/cloud/video/transcoder_v1beta1/services/transcoder_service/__init__.py b/google/cloud/video/transcoder_v1beta1/services/transcoder_service/__init__.py deleted file mode 100644 index d853c3a..0000000 --- a/google/cloud/video/transcoder_v1beta1/services/transcoder_service/__init__.py +++ /dev/null @@ -1,22 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from .client import TranscoderServiceClient -from .async_client import TranscoderServiceAsyncClient - -__all__ = ( - "TranscoderServiceClient", - "TranscoderServiceAsyncClient", -) diff --git a/google/cloud/video/transcoder_v1beta1/services/transcoder_service/async_client.py b/google/cloud/video/transcoder_v1beta1/services/transcoder_service/async_client.py deleted file mode 100644 index 5919376..0000000 --- a/google/cloud/video/transcoder_v1beta1/services/transcoder_service/async_client.py +++ /dev/null @@ -1,836 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from collections import OrderedDict -import functools -import re -from typing import Dict, Optional, Sequence, Tuple, Type, Union -import pkg_resources - -from google.api_core.client_options import ClientOptions -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import retry as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.oauth2 import service_account # type: ignore - -try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object] # type: ignore - -from google.cloud.video.transcoder_v1beta1.services.transcoder_service import pagers -from google.cloud.video.transcoder_v1beta1.types import resources -from google.cloud.video.transcoder_v1beta1.types import services -from google.protobuf import timestamp_pb2 # type: ignore -from .transports.base import TranscoderServiceTransport, DEFAULT_CLIENT_INFO -from .transports.grpc_asyncio import TranscoderServiceGrpcAsyncIOTransport -from .client import TranscoderServiceClient - - -class TranscoderServiceAsyncClient: - """Using the Transcoder API, you can queue asynchronous jobs for - transcoding media into various output formats. Output formats - may include different streaming standards such as HTTP Live - Streaming (HLS) and Dynamic Adaptive Streaming over HTTP (DASH). - You can also customize jobs using advanced features such as - Digital Rights Management (DRM), audio equalization, content - concatenation, and digital ad-stitch ready content generation. - """ - - _client: TranscoderServiceClient - - DEFAULT_ENDPOINT = TranscoderServiceClient.DEFAULT_ENDPOINT - DEFAULT_MTLS_ENDPOINT = TranscoderServiceClient.DEFAULT_MTLS_ENDPOINT - - job_path = staticmethod(TranscoderServiceClient.job_path) - parse_job_path = staticmethod(TranscoderServiceClient.parse_job_path) - job_template_path = staticmethod(TranscoderServiceClient.job_template_path) - parse_job_template_path = staticmethod( - TranscoderServiceClient.parse_job_template_path - ) - common_billing_account_path = staticmethod( - TranscoderServiceClient.common_billing_account_path - ) - parse_common_billing_account_path = staticmethod( - TranscoderServiceClient.parse_common_billing_account_path - ) - common_folder_path = staticmethod(TranscoderServiceClient.common_folder_path) - parse_common_folder_path = staticmethod( - TranscoderServiceClient.parse_common_folder_path - ) - common_organization_path = staticmethod( - TranscoderServiceClient.common_organization_path - ) - parse_common_organization_path = staticmethod( - TranscoderServiceClient.parse_common_organization_path - ) - common_project_path = staticmethod(TranscoderServiceClient.common_project_path) - parse_common_project_path = staticmethod( - TranscoderServiceClient.parse_common_project_path - ) - common_location_path = staticmethod(TranscoderServiceClient.common_location_path) - parse_common_location_path = staticmethod( - TranscoderServiceClient.parse_common_location_path - ) - - @classmethod - def from_service_account_info(cls, info: dict, *args, **kwargs): - """Creates an instance of this client using the provided credentials - info. - - Args: - info (dict): The service account private key info. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - TranscoderServiceAsyncClient: The constructed client. - """ - return TranscoderServiceClient.from_service_account_info.__func__(TranscoderServiceAsyncClient, info, *args, **kwargs) # type: ignore - - @classmethod - def from_service_account_file(cls, filename: str, *args, **kwargs): - """Creates an instance of this client using the provided credentials - file. - - Args: - filename (str): The path to the service account private key json - file. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - TranscoderServiceAsyncClient: The constructed client. - """ - return TranscoderServiceClient.from_service_account_file.__func__(TranscoderServiceAsyncClient, filename, *args, **kwargs) # type: ignore - - from_service_account_json = from_service_account_file - - @classmethod - def get_mtls_endpoint_and_cert_source( - cls, client_options: Optional[ClientOptions] = None - ): - """Return the API endpoint and client cert source for mutual TLS. - - The client cert source is determined in the following order: - (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the - client cert source is None. - (2) if `client_options.client_cert_source` is provided, use the provided one; if the - default client cert source exists, use the default one; otherwise the client cert - source is None. - - The API endpoint is determined in the following order: - (1) if `client_options.api_endpoint` if provided, use the provided one. - (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the - default mTLS endpoint; if the environment variabel is "never", use the default API - endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise - use the default API endpoint. - - More details can be found at https://google.aip.dev/auth/4114. - - Args: - client_options (google.api_core.client_options.ClientOptions): Custom options for the - client. Only the `api_endpoint` and `client_cert_source` properties may be used - in this method. - - Returns: - Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the - client cert source to use. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If any errors happen. - """ - return TranscoderServiceClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore - - @property - def transport(self) -> TranscoderServiceTransport: - """Returns the transport used by the client instance. - - Returns: - TranscoderServiceTransport: The transport used by the client instance. - """ - return self._client.transport - - get_transport_class = functools.partial( - type(TranscoderServiceClient).get_transport_class, type(TranscoderServiceClient) - ) - - def __init__( - self, - *, - credentials: ga_credentials.Credentials = None, - transport: Union[str, TranscoderServiceTransport] = "grpc_asyncio", - client_options: ClientOptions = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - ) -> None: - """Instantiates the transcoder service client. - - Args: - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - transport (Union[str, ~.TranscoderServiceTransport]): The - transport to use. If set to None, a transport is chosen - automatically. - client_options (ClientOptions): Custom options for the client. It - won't take effect if a ``transport`` instance is provided. - (1) The ``api_endpoint`` property can be used to override the - default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT - environment variable can also be used to override the endpoint: - "always" (always use the default mTLS endpoint), "never" (always - use the default regular endpoint) and "auto" (auto switch to the - default mTLS endpoint if client certificate is present, this is - the default value). However, the ``api_endpoint`` property takes - precedence if provided. - (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable - is "true", then the ``client_cert_source`` property can be used - to provide client certificate for mutual TLS transport. If - not provided, the default SSL client certificate will be used if - present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not - set, no client certificate will be used. - - Raises: - google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport - creation failed for any reason. - """ - self._client = TranscoderServiceClient( - credentials=credentials, - transport=transport, - client_options=client_options, - client_info=client_info, - ) - - async def create_job( - self, - request: Union[services.CreateJobRequest, dict] = None, - *, - parent: str = None, - job: resources.Job = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> resources.Job: - r"""Creates a job in the specified region. - - Args: - request (Union[google.cloud.video.transcoder_v1beta1.types.CreateJobRequest, dict]): - The request object. Request message for - `TranscoderService.CreateJob`. - parent (:class:`str`): - Required. The parent location to create and process this - job. Format: ``projects/{project}/locations/{location}`` - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - job (:class:`google.cloud.video.transcoder_v1beta1.types.Job`): - Required. Parameters for creating - transcoding job. - - This corresponds to the ``job`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.video.transcoder_v1beta1.types.Job: - Transcoding job resource. - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, job]) - if request is not None and has_flattened_params: - raise ValueError( - "If the `request` argument is set, then none of " - "the individual field arguments should be set." - ) - - request = services.CreateJobRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if job is not None: - request.job = job - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.create_job, - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), - ) - - # Send the request. - response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - - async def list_jobs( - self, - request: Union[services.ListJobsRequest, dict] = None, - *, - parent: str = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListJobsAsyncPager: - r"""Lists jobs in the specified region. - - Args: - request (Union[google.cloud.video.transcoder_v1beta1.types.ListJobsRequest, dict]): - The request object. Request message for - `TranscoderService.ListJobs`. The parent location from - which to retrieve the collection of jobs. - parent (:class:`str`): - Required. Format: - ``projects/{project}/locations/{location}`` - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.video.transcoder_v1beta1.services.transcoder_service.pagers.ListJobsAsyncPager: - Response message for TranscoderService.ListJobs. - - Iterating over this object will yield results and - resolve additional pages automatically. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) - if request is not None and has_flattened_params: - raise ValueError( - "If the `request` argument is set, then none of " - "the individual field arguments should be set." - ) - - request = services.ListJobsRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.list_jobs, - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), - ) - - # Send the request. - response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) - - # This method is paged; wrap the response in a pager, which provides - # an `__aiter__` convenience method. - response = pagers.ListJobsAsyncPager( - method=rpc, request=request, response=response, metadata=metadata, - ) - - # Done; return the response. - return response - - async def get_job( - self, - request: Union[services.GetJobRequest, dict] = None, - *, - name: str = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> resources.Job: - r"""Returns the job data. - - Args: - request (Union[google.cloud.video.transcoder_v1beta1.types.GetJobRequest, dict]): - The request object. Request message for - `TranscoderService.GetJob`. - name (:class:`str`): - Required. The name of the job to retrieve. Format: - ``projects/{project}/locations/{location}/jobs/{job}`` - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.video.transcoder_v1beta1.types.Job: - Transcoding job resource. - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError( - "If the `request` argument is set, then none of " - "the individual field arguments should be set." - ) - - request = services.GetJobRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.get_job, - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), - ) - - # Send the request. - response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - - async def delete_job( - self, - request: Union[services.DeleteJobRequest, dict] = None, - *, - name: str = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Deletes a job. - - Args: - request (Union[google.cloud.video.transcoder_v1beta1.types.DeleteJobRequest, dict]): - The request object. Request message for - `TranscoderService.DeleteJob`. - name (:class:`str`): - Required. The name of the job to delete. Format: - ``projects/{project}/locations/{location}/jobs/{job}`` - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError( - "If the `request` argument is set, then none of " - "the individual field arguments should be set." - ) - - request = services.DeleteJobRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.delete_job, - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), - ) - - # Send the request. - await rpc( - request, retry=retry, timeout=timeout, metadata=metadata, - ) - - async def create_job_template( - self, - request: Union[services.CreateJobTemplateRequest, dict] = None, - *, - parent: str = None, - job_template: resources.JobTemplate = None, - job_template_id: str = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> resources.JobTemplate: - r"""Creates a job template in the specified region. - - Args: - request (Union[google.cloud.video.transcoder_v1beta1.types.CreateJobTemplateRequest, dict]): - The request object. Request message for - `TranscoderService.CreateJobTemplate`. - parent (:class:`str`): - Required. The parent location to create this job - template. Format: - ``projects/{project}/locations/{location}`` - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - job_template (:class:`google.cloud.video.transcoder_v1beta1.types.JobTemplate`): - Required. Parameters for creating job - template. - - This corresponds to the ``job_template`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - job_template_id (:class:`str`): - Required. The ID to use for the job template, which will - become the final component of the job template's - resource name. - - This value should be 4-63 characters, and valid - characters must match the regular expression - ``[a-zA-Z][a-zA-Z0-9_-]*``. - - This corresponds to the ``job_template_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.video.transcoder_v1beta1.types.JobTemplate: - Transcoding job template resource. - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, job_template, job_template_id]) - if request is not None and has_flattened_params: - raise ValueError( - "If the `request` argument is set, then none of " - "the individual field arguments should be set." - ) - - request = services.CreateJobTemplateRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if job_template is not None: - request.job_template = job_template - if job_template_id is not None: - request.job_template_id = job_template_id - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.create_job_template, - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), - ) - - # Send the request. - response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - - async def list_job_templates( - self, - request: Union[services.ListJobTemplatesRequest, dict] = None, - *, - parent: str = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListJobTemplatesAsyncPager: - r"""Lists job templates in the specified region. - - Args: - request (Union[google.cloud.video.transcoder_v1beta1.types.ListJobTemplatesRequest, dict]): - The request object. Request message for - `TranscoderService.ListJobTemplates`. - parent (:class:`str`): - Required. The parent location from which to retrieve the - collection of job templates. Format: - ``projects/{project}/locations/{location}`` - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.video.transcoder_v1beta1.services.transcoder_service.pagers.ListJobTemplatesAsyncPager: - Response message for TranscoderService.ListJobTemplates. - - Iterating over this object will yield results and - resolve additional pages automatically. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) - if request is not None and has_flattened_params: - raise ValueError( - "If the `request` argument is set, then none of " - "the individual field arguments should be set." - ) - - request = services.ListJobTemplatesRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.list_job_templates, - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), - ) - - # Send the request. - response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) - - # This method is paged; wrap the response in a pager, which provides - # an `__aiter__` convenience method. - response = pagers.ListJobTemplatesAsyncPager( - method=rpc, request=request, response=response, metadata=metadata, - ) - - # Done; return the response. - return response - - async def get_job_template( - self, - request: Union[services.GetJobTemplateRequest, dict] = None, - *, - name: str = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> resources.JobTemplate: - r"""Returns the job template data. - - Args: - request (Union[google.cloud.video.transcoder_v1beta1.types.GetJobTemplateRequest, dict]): - The request object. Request message for - `TranscoderService.GetJobTemplate`. - name (:class:`str`): - Required. The name of the job template to retrieve. - Format: - ``projects/{project}/locations/{location}/jobTemplates/{job_template}`` - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.video.transcoder_v1beta1.types.JobTemplate: - Transcoding job template resource. - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError( - "If the `request` argument is set, then none of " - "the individual field arguments should be set." - ) - - request = services.GetJobTemplateRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.get_job_template, - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), - ) - - # Send the request. - response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - - async def delete_job_template( - self, - request: Union[services.DeleteJobTemplateRequest, dict] = None, - *, - name: str = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Deletes a job template. - - Args: - request (Union[google.cloud.video.transcoder_v1beta1.types.DeleteJobTemplateRequest, dict]): - The request object. Request message for - `TranscoderService.DeleteJobTemplate`. - name (:class:`str`): - Required. The name of the job template to delete. - ``projects/{project}/locations/{location}/jobTemplates/{job_template}`` - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError( - "If the `request` argument is set, then none of " - "the individual field arguments should be set." - ) - - request = services.DeleteJobTemplateRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.delete_job_template, - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), - ) - - # Send the request. - await rpc( - request, retry=retry, timeout=timeout, metadata=metadata, - ) - - async def __aenter__(self): - return self - - async def __aexit__(self, exc_type, exc, tb): - await self.transport.close() - - -try: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=pkg_resources.get_distribution( - "google-cloud-video-transcoder", - ).version, - ) -except pkg_resources.DistributionNotFound: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() - - -__all__ = ("TranscoderServiceAsyncClient",) diff --git a/google/cloud/video/transcoder_v1beta1/services/transcoder_service/client.py b/google/cloud/video/transcoder_v1beta1/services/transcoder_service/client.py deleted file mode 100644 index 1c54c6a..0000000 --- a/google/cloud/video/transcoder_v1beta1/services/transcoder_service/client.py +++ /dev/null @@ -1,1054 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from collections import OrderedDict -import os -import re -from typing import Dict, Optional, Sequence, Tuple, Type, Union -import pkg_resources - -from google.api_core import client_options as client_options_lib -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import retry as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport import mtls # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -from google.auth.exceptions import MutualTLSChannelError # type: ignore -from google.oauth2 import service_account # type: ignore - -try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object] # type: ignore - -from google.cloud.video.transcoder_v1beta1.services.transcoder_service import pagers -from google.cloud.video.transcoder_v1beta1.types import resources -from google.cloud.video.transcoder_v1beta1.types import services -from google.protobuf import timestamp_pb2 # type: ignore -from .transports.base import TranscoderServiceTransport, DEFAULT_CLIENT_INFO -from .transports.grpc import TranscoderServiceGrpcTransport -from .transports.grpc_asyncio import TranscoderServiceGrpcAsyncIOTransport - - -class TranscoderServiceClientMeta(type): - """Metaclass for the TranscoderService client. - - This provides class-level methods for building and retrieving - support objects (e.g. transport) without polluting the client instance - objects. - """ - - _transport_registry = ( - OrderedDict() - ) # type: Dict[str, Type[TranscoderServiceTransport]] - _transport_registry["grpc"] = TranscoderServiceGrpcTransport - _transport_registry["grpc_asyncio"] = TranscoderServiceGrpcAsyncIOTransport - - def get_transport_class( - cls, label: str = None, - ) -> Type[TranscoderServiceTransport]: - """Returns an appropriate transport class. - - Args: - label: The name of the desired transport. If none is - provided, then the first transport in the registry is used. - - Returns: - The transport class to use. - """ - # If a specific transport is requested, return that one. - if label: - return cls._transport_registry[label] - - # No transport is requested; return the default (that is, the first one - # in the dictionary). - return next(iter(cls._transport_registry.values())) - - -class TranscoderServiceClient(metaclass=TranscoderServiceClientMeta): - """Using the Transcoder API, you can queue asynchronous jobs for - transcoding media into various output formats. Output formats - may include different streaming standards such as HTTP Live - Streaming (HLS) and Dynamic Adaptive Streaming over HTTP (DASH). - You can also customize jobs using advanced features such as - Digital Rights Management (DRM), audio equalization, content - concatenation, and digital ad-stitch ready content generation. - """ - - @staticmethod - def _get_default_mtls_endpoint(api_endpoint): - """Converts api endpoint to mTLS endpoint. - - Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to - "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. - Args: - api_endpoint (Optional[str]): the api endpoint to convert. - Returns: - str: converted mTLS api endpoint. - """ - if not api_endpoint: - return api_endpoint - - mtls_endpoint_re = re.compile( - r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" - ) - - m = mtls_endpoint_re.match(api_endpoint) - name, mtls, sandbox, googledomain = m.groups() - if mtls or not googledomain: - return api_endpoint - - if sandbox: - return api_endpoint.replace( - "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" - ) - - return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") - - DEFAULT_ENDPOINT = "transcoder.googleapis.com" - DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore - DEFAULT_ENDPOINT - ) - - @classmethod - def from_service_account_info(cls, info: dict, *args, **kwargs): - """Creates an instance of this client using the provided credentials - info. - - Args: - info (dict): The service account private key info. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - TranscoderServiceClient: The constructed client. - """ - credentials = service_account.Credentials.from_service_account_info(info) - kwargs["credentials"] = credentials - return cls(*args, **kwargs) - - @classmethod - def from_service_account_file(cls, filename: str, *args, **kwargs): - """Creates an instance of this client using the provided credentials - file. - - Args: - filename (str): The path to the service account private key json - file. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - TranscoderServiceClient: The constructed client. - """ - credentials = service_account.Credentials.from_service_account_file(filename) - kwargs["credentials"] = credentials - return cls(*args, **kwargs) - - from_service_account_json = from_service_account_file - - @property - def transport(self) -> TranscoderServiceTransport: - """Returns the transport used by the client instance. - - Returns: - TranscoderServiceTransport: The transport used by the client - instance. - """ - return self._transport - - @staticmethod - def job_path(project: str, location: str, job: str,) -> str: - """Returns a fully-qualified job string.""" - return "projects/{project}/locations/{location}/jobs/{job}".format( - project=project, location=location, job=job, - ) - - @staticmethod - def parse_job_path(path: str) -> Dict[str, str]: - """Parses a job path into its component segments.""" - m = re.match( - r"^projects/(?P.+?)/locations/(?P.+?)/jobs/(?P.+?)$", - path, - ) - return m.groupdict() if m else {} - - @staticmethod - def job_template_path(project: str, location: str, job_template: str,) -> str: - """Returns a fully-qualified job_template string.""" - return "projects/{project}/locations/{location}/jobTemplates/{job_template}".format( - project=project, location=location, job_template=job_template, - ) - - @staticmethod - def parse_job_template_path(path: str) -> Dict[str, str]: - """Parses a job_template path into its component segments.""" - m = re.match( - r"^projects/(?P.+?)/locations/(?P.+?)/jobTemplates/(?P.+?)$", - path, - ) - return m.groupdict() if m else {} - - @staticmethod - def common_billing_account_path(billing_account: str,) -> str: - """Returns a fully-qualified billing_account string.""" - return "billingAccounts/{billing_account}".format( - billing_account=billing_account, - ) - - @staticmethod - def parse_common_billing_account_path(path: str) -> Dict[str, str]: - """Parse a billing_account path into its component segments.""" - m = re.match(r"^billingAccounts/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_folder_path(folder: str,) -> str: - """Returns a fully-qualified folder string.""" - return "folders/{folder}".format(folder=folder,) - - @staticmethod - def parse_common_folder_path(path: str) -> Dict[str, str]: - """Parse a folder path into its component segments.""" - m = re.match(r"^folders/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_organization_path(organization: str,) -> str: - """Returns a fully-qualified organization string.""" - return "organizations/{organization}".format(organization=organization,) - - @staticmethod - def parse_common_organization_path(path: str) -> Dict[str, str]: - """Parse a organization path into its component segments.""" - m = re.match(r"^organizations/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_project_path(project: str,) -> str: - """Returns a fully-qualified project string.""" - return "projects/{project}".format(project=project,) - - @staticmethod - def parse_common_project_path(path: str) -> Dict[str, str]: - """Parse a project path into its component segments.""" - m = re.match(r"^projects/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_location_path(project: str, location: str,) -> str: - """Returns a fully-qualified location string.""" - return "projects/{project}/locations/{location}".format( - project=project, location=location, - ) - - @staticmethod - def parse_common_location_path(path: str) -> Dict[str, str]: - """Parse a location path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) - return m.groupdict() if m else {} - - @classmethod - def get_mtls_endpoint_and_cert_source( - cls, client_options: Optional[client_options_lib.ClientOptions] = None - ): - """Return the API endpoint and client cert source for mutual TLS. - - The client cert source is determined in the following order: - (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the - client cert source is None. - (2) if `client_options.client_cert_source` is provided, use the provided one; if the - default client cert source exists, use the default one; otherwise the client cert - source is None. - - The API endpoint is determined in the following order: - (1) if `client_options.api_endpoint` if provided, use the provided one. - (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the - default mTLS endpoint; if the environment variabel is "never", use the default API - endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise - use the default API endpoint. - - More details can be found at https://google.aip.dev/auth/4114. - - Args: - client_options (google.api_core.client_options.ClientOptions): Custom options for the - client. Only the `api_endpoint` and `client_cert_source` properties may be used - in this method. - - Returns: - Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the - client cert source to use. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If any errors happen. - """ - if client_options is None: - client_options = client_options_lib.ClientOptions() - use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") - use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") - if use_client_cert not in ("true", "false"): - raise ValueError( - "Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`" - ) - if use_mtls_endpoint not in ("auto", "never", "always"): - raise MutualTLSChannelError( - "Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`" - ) - - # Figure out the client cert source to use. - client_cert_source = None - if use_client_cert == "true": - if client_options.client_cert_source: - client_cert_source = client_options.client_cert_source - elif mtls.has_default_client_cert_source(): - client_cert_source = mtls.default_client_cert_source() - - # Figure out which api endpoint to use. - if client_options.api_endpoint is not None: - api_endpoint = client_options.api_endpoint - elif use_mtls_endpoint == "always" or ( - use_mtls_endpoint == "auto" and client_cert_source - ): - api_endpoint = cls.DEFAULT_MTLS_ENDPOINT - else: - api_endpoint = cls.DEFAULT_ENDPOINT - - return api_endpoint, client_cert_source - - def __init__( - self, - *, - credentials: Optional[ga_credentials.Credentials] = None, - transport: Union[str, TranscoderServiceTransport, None] = None, - client_options: Optional[client_options_lib.ClientOptions] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - ) -> None: - """Instantiates the transcoder service client. - - Args: - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - transport (Union[str, TranscoderServiceTransport]): The - transport to use. If set to None, a transport is chosen - automatically. - client_options (google.api_core.client_options.ClientOptions): Custom options for the - client. It won't take effect if a ``transport`` instance is provided. - (1) The ``api_endpoint`` property can be used to override the - default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT - environment variable can also be used to override the endpoint: - "always" (always use the default mTLS endpoint), "never" (always - use the default regular endpoint) and "auto" (auto switch to the - default mTLS endpoint if client certificate is present, this is - the default value). However, the ``api_endpoint`` property takes - precedence if provided. - (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable - is "true", then the ``client_cert_source`` property can be used - to provide client certificate for mutual TLS transport. If - not provided, the default SSL client certificate will be used if - present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not - set, no client certificate will be used. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport - creation failed for any reason. - """ - if isinstance(client_options, dict): - client_options = client_options_lib.from_dict(client_options) - if client_options is None: - client_options = client_options_lib.ClientOptions() - - api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source( - client_options - ) - - api_key_value = getattr(client_options, "api_key", None) - if api_key_value and credentials: - raise ValueError( - "client_options.api_key and credentials are mutually exclusive" - ) - - # Save or instantiate the transport. - # Ordinarily, we provide the transport, but allowing a custom transport - # instance provides an extensibility point for unusual situations. - if isinstance(transport, TranscoderServiceTransport): - # transport is a TranscoderServiceTransport instance. - if credentials or client_options.credentials_file or api_key_value: - raise ValueError( - "When providing a transport instance, " - "provide its credentials directly." - ) - if client_options.scopes: - raise ValueError( - "When providing a transport instance, provide its scopes " - "directly." - ) - self._transport = transport - else: - import google.auth._default # type: ignore - - if api_key_value and hasattr( - google.auth._default, "get_api_key_credentials" - ): - credentials = google.auth._default.get_api_key_credentials( - api_key_value - ) - - Transport = type(self).get_transport_class(transport) - self._transport = Transport( - credentials=credentials, - credentials_file=client_options.credentials_file, - host=api_endpoint, - scopes=client_options.scopes, - client_cert_source_for_mtls=client_cert_source_func, - quota_project_id=client_options.quota_project_id, - client_info=client_info, - always_use_jwt_access=True, - ) - - def create_job( - self, - request: Union[services.CreateJobRequest, dict] = None, - *, - parent: str = None, - job: resources.Job = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> resources.Job: - r"""Creates a job in the specified region. - - Args: - request (Union[google.cloud.video.transcoder_v1beta1.types.CreateJobRequest, dict]): - The request object. Request message for - `TranscoderService.CreateJob`. - parent (str): - Required. The parent location to create and process this - job. Format: ``projects/{project}/locations/{location}`` - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - job (google.cloud.video.transcoder_v1beta1.types.Job): - Required. Parameters for creating - transcoding job. - - This corresponds to the ``job`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.video.transcoder_v1beta1.types.Job: - Transcoding job resource. - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, job]) - if request is not None and has_flattened_params: - raise ValueError( - "If the `request` argument is set, then none of " - "the individual field arguments should be set." - ) - - # Minor optimization to avoid making a copy if the user passes - # in a services.CreateJobRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, services.CreateJobRequest): - request = services.CreateJobRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if job is not None: - request.job = job - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.create_job] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), - ) - - # Send the request. - response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - - def list_jobs( - self, - request: Union[services.ListJobsRequest, dict] = None, - *, - parent: str = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListJobsPager: - r"""Lists jobs in the specified region. - - Args: - request (Union[google.cloud.video.transcoder_v1beta1.types.ListJobsRequest, dict]): - The request object. Request message for - `TranscoderService.ListJobs`. The parent location from - which to retrieve the collection of jobs. - parent (str): - Required. Format: - ``projects/{project}/locations/{location}`` - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.video.transcoder_v1beta1.services.transcoder_service.pagers.ListJobsPager: - Response message for TranscoderService.ListJobs. - - Iterating over this object will yield results and - resolve additional pages automatically. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) - if request is not None and has_flattened_params: - raise ValueError( - "If the `request` argument is set, then none of " - "the individual field arguments should be set." - ) - - # Minor optimization to avoid making a copy if the user passes - # in a services.ListJobsRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, services.ListJobsRequest): - request = services.ListJobsRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_jobs] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), - ) - - # Send the request. - response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) - - # This method is paged; wrap the response in a pager, which provides - # an `__iter__` convenience method. - response = pagers.ListJobsPager( - method=rpc, request=request, response=response, metadata=metadata, - ) - - # Done; return the response. - return response - - def get_job( - self, - request: Union[services.GetJobRequest, dict] = None, - *, - name: str = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> resources.Job: - r"""Returns the job data. - - Args: - request (Union[google.cloud.video.transcoder_v1beta1.types.GetJobRequest, dict]): - The request object. Request message for - `TranscoderService.GetJob`. - name (str): - Required. The name of the job to retrieve. Format: - ``projects/{project}/locations/{location}/jobs/{job}`` - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.video.transcoder_v1beta1.types.Job: - Transcoding job resource. - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError( - "If the `request` argument is set, then none of " - "the individual field arguments should be set." - ) - - # Minor optimization to avoid making a copy if the user passes - # in a services.GetJobRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, services.GetJobRequest): - request = services.GetJobRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_job] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), - ) - - # Send the request. - response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - - def delete_job( - self, - request: Union[services.DeleteJobRequest, dict] = None, - *, - name: str = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Deletes a job. - - Args: - request (Union[google.cloud.video.transcoder_v1beta1.types.DeleteJobRequest, dict]): - The request object. Request message for - `TranscoderService.DeleteJob`. - name (str): - Required. The name of the job to delete. Format: - ``projects/{project}/locations/{location}/jobs/{job}`` - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError( - "If the `request` argument is set, then none of " - "the individual field arguments should be set." - ) - - # Minor optimization to avoid making a copy if the user passes - # in a services.DeleteJobRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, services.DeleteJobRequest): - request = services.DeleteJobRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.delete_job] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), - ) - - # Send the request. - rpc( - request, retry=retry, timeout=timeout, metadata=metadata, - ) - - def create_job_template( - self, - request: Union[services.CreateJobTemplateRequest, dict] = None, - *, - parent: str = None, - job_template: resources.JobTemplate = None, - job_template_id: str = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> resources.JobTemplate: - r"""Creates a job template in the specified region. - - Args: - request (Union[google.cloud.video.transcoder_v1beta1.types.CreateJobTemplateRequest, dict]): - The request object. Request message for - `TranscoderService.CreateJobTemplate`. - parent (str): - Required. The parent location to create this job - template. Format: - ``projects/{project}/locations/{location}`` - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - job_template (google.cloud.video.transcoder_v1beta1.types.JobTemplate): - Required. Parameters for creating job - template. - - This corresponds to the ``job_template`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - job_template_id (str): - Required. The ID to use for the job template, which will - become the final component of the job template's - resource name. - - This value should be 4-63 characters, and valid - characters must match the regular expression - ``[a-zA-Z][a-zA-Z0-9_-]*``. - - This corresponds to the ``job_template_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.video.transcoder_v1beta1.types.JobTemplate: - Transcoding job template resource. - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, job_template, job_template_id]) - if request is not None and has_flattened_params: - raise ValueError( - "If the `request` argument is set, then none of " - "the individual field arguments should be set." - ) - - # Minor optimization to avoid making a copy if the user passes - # in a services.CreateJobTemplateRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, services.CreateJobTemplateRequest): - request = services.CreateJobTemplateRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if job_template is not None: - request.job_template = job_template - if job_template_id is not None: - request.job_template_id = job_template_id - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.create_job_template] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), - ) - - # Send the request. - response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - - def list_job_templates( - self, - request: Union[services.ListJobTemplatesRequest, dict] = None, - *, - parent: str = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListJobTemplatesPager: - r"""Lists job templates in the specified region. - - Args: - request (Union[google.cloud.video.transcoder_v1beta1.types.ListJobTemplatesRequest, dict]): - The request object. Request message for - `TranscoderService.ListJobTemplates`. - parent (str): - Required. The parent location from which to retrieve the - collection of job templates. Format: - ``projects/{project}/locations/{location}`` - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.video.transcoder_v1beta1.services.transcoder_service.pagers.ListJobTemplatesPager: - Response message for TranscoderService.ListJobTemplates. - - Iterating over this object will yield results and - resolve additional pages automatically. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) - if request is not None and has_flattened_params: - raise ValueError( - "If the `request` argument is set, then none of " - "the individual field arguments should be set." - ) - - # Minor optimization to avoid making a copy if the user passes - # in a services.ListJobTemplatesRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, services.ListJobTemplatesRequest): - request = services.ListJobTemplatesRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_job_templates] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), - ) - - # Send the request. - response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) - - # This method is paged; wrap the response in a pager, which provides - # an `__iter__` convenience method. - response = pagers.ListJobTemplatesPager( - method=rpc, request=request, response=response, metadata=metadata, - ) - - # Done; return the response. - return response - - def get_job_template( - self, - request: Union[services.GetJobTemplateRequest, dict] = None, - *, - name: str = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> resources.JobTemplate: - r"""Returns the job template data. - - Args: - request (Union[google.cloud.video.transcoder_v1beta1.types.GetJobTemplateRequest, dict]): - The request object. Request message for - `TranscoderService.GetJobTemplate`. - name (str): - Required. The name of the job template to retrieve. - Format: - ``projects/{project}/locations/{location}/jobTemplates/{job_template}`` - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.video.transcoder_v1beta1.types.JobTemplate: - Transcoding job template resource. - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError( - "If the `request` argument is set, then none of " - "the individual field arguments should be set." - ) - - # Minor optimization to avoid making a copy if the user passes - # in a services.GetJobTemplateRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, services.GetJobTemplateRequest): - request = services.GetJobTemplateRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_job_template] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), - ) - - # Send the request. - response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) - - # Done; return the response. - return response - - def delete_job_template( - self, - request: Union[services.DeleteJobTemplateRequest, dict] = None, - *, - name: str = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Deletes a job template. - - Args: - request (Union[google.cloud.video.transcoder_v1beta1.types.DeleteJobTemplateRequest, dict]): - The request object. Request message for - `TranscoderService.DeleteJobTemplate`. - name (str): - Required. The name of the job template to delete. - ``projects/{project}/locations/{location}/jobTemplates/{job_template}`` - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError( - "If the `request` argument is set, then none of " - "the individual field arguments should be set." - ) - - # Minor optimization to avoid making a copy if the user passes - # in a services.DeleteJobTemplateRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, services.DeleteJobTemplateRequest): - request = services.DeleteJobTemplateRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.delete_job_template] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), - ) - - # Send the request. - rpc( - request, retry=retry, timeout=timeout, metadata=metadata, - ) - - def __enter__(self): - return self - - def __exit__(self, type, value, traceback): - """Releases underlying transport's resources. - - .. warning:: - ONLY use as a context manager if the transport is NOT shared - with other clients! Exiting the with block will CLOSE the transport - and may cause errors in other clients! - """ - self.transport.close() - - -try: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=pkg_resources.get_distribution( - "google-cloud-video-transcoder", - ).version, - ) -except pkg_resources.DistributionNotFound: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() - - -__all__ = ("TranscoderServiceClient",) diff --git a/google/cloud/video/transcoder_v1beta1/services/transcoder_service/pagers.py b/google/cloud/video/transcoder_v1beta1/services/transcoder_service/pagers.py deleted file mode 100644 index c845501..0000000 --- a/google/cloud/video/transcoder_v1beta1/services/transcoder_service/pagers.py +++ /dev/null @@ -1,284 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from typing import ( - Any, - AsyncIterator, - Awaitable, - Callable, - Sequence, - Tuple, - Optional, - Iterator, -) - -from google.cloud.video.transcoder_v1beta1.types import resources -from google.cloud.video.transcoder_v1beta1.types import services - - -class ListJobsPager: - """A pager for iterating through ``list_jobs`` requests. - - This class thinly wraps an initial - :class:`google.cloud.video.transcoder_v1beta1.types.ListJobsResponse` object, and - provides an ``__iter__`` method to iterate through its - ``jobs`` field. - - If there are more pages, the ``__iter__`` method will make additional - ``ListJobs`` requests and continue to iterate - through the ``jobs`` field on the - corresponding responses. - - All the usual :class:`google.cloud.video.transcoder_v1beta1.types.ListJobsResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - - def __init__( - self, - method: Callable[..., services.ListJobsResponse], - request: services.ListJobsRequest, - response: services.ListJobsResponse, - *, - metadata: Sequence[Tuple[str, str]] = () - ): - """Instantiate the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.video.transcoder_v1beta1.types.ListJobsRequest): - The initial request object. - response (google.cloud.video.transcoder_v1beta1.types.ListJobsResponse): - The initial response object. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - self._method = method - self._request = services.ListJobsRequest(request) - self._response = response - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - def pages(self) -> Iterator[services.ListJobsResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, metadata=self._metadata) - yield self._response - - def __iter__(self) -> Iterator[resources.Job]: - for page in self.pages: - yield from page.jobs - - def __repr__(self) -> str: - return "{0}<{1!r}>".format(self.__class__.__name__, self._response) - - -class ListJobsAsyncPager: - """A pager for iterating through ``list_jobs`` requests. - - This class thinly wraps an initial - :class:`google.cloud.video.transcoder_v1beta1.types.ListJobsResponse` object, and - provides an ``__aiter__`` method to iterate through its - ``jobs`` field. - - If there are more pages, the ``__aiter__`` method will make additional - ``ListJobs`` requests and continue to iterate - through the ``jobs`` field on the - corresponding responses. - - All the usual :class:`google.cloud.video.transcoder_v1beta1.types.ListJobsResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - - def __init__( - self, - method: Callable[..., Awaitable[services.ListJobsResponse]], - request: services.ListJobsRequest, - response: services.ListJobsResponse, - *, - metadata: Sequence[Tuple[str, str]] = () - ): - """Instantiates the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.video.transcoder_v1beta1.types.ListJobsRequest): - The initial request object. - response (google.cloud.video.transcoder_v1beta1.types.ListJobsResponse): - The initial response object. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - self._method = method - self._request = services.ListJobsRequest(request) - self._response = response - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - async def pages(self) -> AsyncIterator[services.ListJobsResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, metadata=self._metadata) - yield self._response - - def __aiter__(self) -> AsyncIterator[resources.Job]: - async def async_generator(): - async for page in self.pages: - for response in page.jobs: - yield response - - return async_generator() - - def __repr__(self) -> str: - return "{0}<{1!r}>".format(self.__class__.__name__, self._response) - - -class ListJobTemplatesPager: - """A pager for iterating through ``list_job_templates`` requests. - - This class thinly wraps an initial - :class:`google.cloud.video.transcoder_v1beta1.types.ListJobTemplatesResponse` object, and - provides an ``__iter__`` method to iterate through its - ``job_templates`` field. - - If there are more pages, the ``__iter__`` method will make additional - ``ListJobTemplates`` requests and continue to iterate - through the ``job_templates`` field on the - corresponding responses. - - All the usual :class:`google.cloud.video.transcoder_v1beta1.types.ListJobTemplatesResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - - def __init__( - self, - method: Callable[..., services.ListJobTemplatesResponse], - request: services.ListJobTemplatesRequest, - response: services.ListJobTemplatesResponse, - *, - metadata: Sequence[Tuple[str, str]] = () - ): - """Instantiate the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.video.transcoder_v1beta1.types.ListJobTemplatesRequest): - The initial request object. - response (google.cloud.video.transcoder_v1beta1.types.ListJobTemplatesResponse): - The initial response object. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - self._method = method - self._request = services.ListJobTemplatesRequest(request) - self._response = response - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - def pages(self) -> Iterator[services.ListJobTemplatesResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, metadata=self._metadata) - yield self._response - - def __iter__(self) -> Iterator[resources.JobTemplate]: - for page in self.pages: - yield from page.job_templates - - def __repr__(self) -> str: - return "{0}<{1!r}>".format(self.__class__.__name__, self._response) - - -class ListJobTemplatesAsyncPager: - """A pager for iterating through ``list_job_templates`` requests. - - This class thinly wraps an initial - :class:`google.cloud.video.transcoder_v1beta1.types.ListJobTemplatesResponse` object, and - provides an ``__aiter__`` method to iterate through its - ``job_templates`` field. - - If there are more pages, the ``__aiter__`` method will make additional - ``ListJobTemplates`` requests and continue to iterate - through the ``job_templates`` field on the - corresponding responses. - - All the usual :class:`google.cloud.video.transcoder_v1beta1.types.ListJobTemplatesResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - - def __init__( - self, - method: Callable[..., Awaitable[services.ListJobTemplatesResponse]], - request: services.ListJobTemplatesRequest, - response: services.ListJobTemplatesResponse, - *, - metadata: Sequence[Tuple[str, str]] = () - ): - """Instantiates the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.video.transcoder_v1beta1.types.ListJobTemplatesRequest): - The initial request object. - response (google.cloud.video.transcoder_v1beta1.types.ListJobTemplatesResponse): - The initial response object. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - self._method = method - self._request = services.ListJobTemplatesRequest(request) - self._response = response - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - async def pages(self) -> AsyncIterator[services.ListJobTemplatesResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, metadata=self._metadata) - yield self._response - - def __aiter__(self) -> AsyncIterator[resources.JobTemplate]: - async def async_generator(): - async for page in self.pages: - for response in page.job_templates: - yield response - - return async_generator() - - def __repr__(self) -> str: - return "{0}<{1!r}>".format(self.__class__.__name__, self._response) diff --git a/google/cloud/video/transcoder_v1beta1/services/transcoder_service/transports/__init__.py b/google/cloud/video/transcoder_v1beta1/services/transcoder_service/transports/__init__.py deleted file mode 100644 index f7496c0..0000000 --- a/google/cloud/video/transcoder_v1beta1/services/transcoder_service/transports/__init__.py +++ /dev/null @@ -1,33 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from collections import OrderedDict -from typing import Dict, Type - -from .base import TranscoderServiceTransport -from .grpc import TranscoderServiceGrpcTransport -from .grpc_asyncio import TranscoderServiceGrpcAsyncIOTransport - - -# Compile a registry of transports. -_transport_registry = OrderedDict() # type: Dict[str, Type[TranscoderServiceTransport]] -_transport_registry["grpc"] = TranscoderServiceGrpcTransport -_transport_registry["grpc_asyncio"] = TranscoderServiceGrpcAsyncIOTransport - -__all__ = ( - "TranscoderServiceTransport", - "TranscoderServiceGrpcTransport", - "TranscoderServiceGrpcAsyncIOTransport", -) diff --git a/google/cloud/video/transcoder_v1beta1/services/transcoder_service/transports/base.py b/google/cloud/video/transcoder_v1beta1/services/transcoder_service/transports/base.py deleted file mode 100644 index 4f2167b..0000000 --- a/google/cloud/video/transcoder_v1beta1/services/transcoder_service/transports/base.py +++ /dev/null @@ -1,233 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import abc -from typing import Awaitable, Callable, Dict, Optional, Sequence, Union -import pkg_resources - -import google.auth # type: ignore -import google.api_core -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import retry as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.oauth2 import service_account # type: ignore - -from google.cloud.video.transcoder_v1beta1.types import resources -from google.cloud.video.transcoder_v1beta1.types import services -from google.protobuf import empty_pb2 # type: ignore - -try: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=pkg_resources.get_distribution( - "google-cloud-video-transcoder", - ).version, - ) -except pkg_resources.DistributionNotFound: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() - - -class TranscoderServiceTransport(abc.ABC): - """Abstract transport class for TranscoderService.""" - - AUTH_SCOPES = ("https://www.googleapis.com/auth/cloud-platform",) - - DEFAULT_HOST: str = "transcoder.googleapis.com" - - def __init__( - self, - *, - host: str = DEFAULT_HOST, - credentials: ga_credentials.Credentials = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - **kwargs, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to. - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is mutually exclusive with credentials. - scopes (Optional[Sequence[str]]): A list of scopes. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - """ - # Save the hostname. Default to port 443 (HTTPS) if none is specified. - if ":" not in host: - host += ":443" - self._host = host - - scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} - - # Save the scopes. - self._scopes = scopes - - # If no credentials are provided, then determine the appropriate - # defaults. - if credentials and credentials_file: - raise core_exceptions.DuplicateCredentialArgs( - "'credentials_file' and 'credentials' are mutually exclusive" - ) - - if credentials_file is not None: - credentials, _ = google.auth.load_credentials_from_file( - credentials_file, **scopes_kwargs, quota_project_id=quota_project_id - ) - elif credentials is None: - credentials, _ = google.auth.default( - **scopes_kwargs, quota_project_id=quota_project_id - ) - - # If the credentials are service account credentials, then always try to use self signed JWT. - if ( - always_use_jwt_access - and isinstance(credentials, service_account.Credentials) - and hasattr(service_account.Credentials, "with_always_use_jwt_access") - ): - credentials = credentials.with_always_use_jwt_access(True) - - # Save the credentials. - self._credentials = credentials - - def _prep_wrapped_messages(self, client_info): - # Precompute the wrapped methods. - self._wrapped_methods = { - self.create_job: gapic_v1.method.wrap_method( - self.create_job, default_timeout=60.0, client_info=client_info, - ), - self.list_jobs: gapic_v1.method.wrap_method( - self.list_jobs, default_timeout=60.0, client_info=client_info, - ), - self.get_job: gapic_v1.method.wrap_method( - self.get_job, default_timeout=60.0, client_info=client_info, - ), - self.delete_job: gapic_v1.method.wrap_method( - self.delete_job, default_timeout=60.0, client_info=client_info, - ), - self.create_job_template: gapic_v1.method.wrap_method( - self.create_job_template, default_timeout=60.0, client_info=client_info, - ), - self.list_job_templates: gapic_v1.method.wrap_method( - self.list_job_templates, default_timeout=60.0, client_info=client_info, - ), - self.get_job_template: gapic_v1.method.wrap_method( - self.get_job_template, default_timeout=60.0, client_info=client_info, - ), - self.delete_job_template: gapic_v1.method.wrap_method( - self.delete_job_template, default_timeout=60.0, client_info=client_info, - ), - } - - def close(self): - """Closes resources associated with the transport. - - .. warning:: - Only call this method if the transport is NOT shared - with other clients - this may cause errors in other clients! - """ - raise NotImplementedError() - - @property - def create_job( - self, - ) -> Callable[ - [services.CreateJobRequest], Union[resources.Job, Awaitable[resources.Job]] - ]: - raise NotImplementedError() - - @property - def list_jobs( - self, - ) -> Callable[ - [services.ListJobsRequest], - Union[services.ListJobsResponse, Awaitable[services.ListJobsResponse]], - ]: - raise NotImplementedError() - - @property - def get_job( - self, - ) -> Callable[ - [services.GetJobRequest], Union[resources.Job, Awaitable[resources.Job]] - ]: - raise NotImplementedError() - - @property - def delete_job( - self, - ) -> Callable[ - [services.DeleteJobRequest], Union[empty_pb2.Empty, Awaitable[empty_pb2.Empty]] - ]: - raise NotImplementedError() - - @property - def create_job_template( - self, - ) -> Callable[ - [services.CreateJobTemplateRequest], - Union[resources.JobTemplate, Awaitable[resources.JobTemplate]], - ]: - raise NotImplementedError() - - @property - def list_job_templates( - self, - ) -> Callable[ - [services.ListJobTemplatesRequest], - Union[ - services.ListJobTemplatesResponse, - Awaitable[services.ListJobTemplatesResponse], - ], - ]: - raise NotImplementedError() - - @property - def get_job_template( - self, - ) -> Callable[ - [services.GetJobTemplateRequest], - Union[resources.JobTemplate, Awaitable[resources.JobTemplate]], - ]: - raise NotImplementedError() - - @property - def delete_job_template( - self, - ) -> Callable[ - [services.DeleteJobTemplateRequest], - Union[empty_pb2.Empty, Awaitable[empty_pb2.Empty]], - ]: - raise NotImplementedError() - - -__all__ = ("TranscoderServiceTransport",) diff --git a/google/cloud/video/transcoder_v1beta1/services/transcoder_service/transports/grpc.py b/google/cloud/video/transcoder_v1beta1/services/transcoder_service/transports/grpc.py deleted file mode 100644 index 8c9ade7..0000000 --- a/google/cloud/video/transcoder_v1beta1/services/transcoder_service/transports/grpc.py +++ /dev/null @@ -1,447 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import warnings -from typing import Callable, Dict, Optional, Sequence, Tuple, Union - -from google.api_core import grpc_helpers -from google.api_core import gapic_v1 -import google.auth # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore - -import grpc # type: ignore - -from google.cloud.video.transcoder_v1beta1.types import resources -from google.cloud.video.transcoder_v1beta1.types import services -from google.protobuf import empty_pb2 # type: ignore -from .base import TranscoderServiceTransport, DEFAULT_CLIENT_INFO - - -class TranscoderServiceGrpcTransport(TranscoderServiceTransport): - """gRPC backend transport for TranscoderService. - - Using the Transcoder API, you can queue asynchronous jobs for - transcoding media into various output formats. Output formats - may include different streaming standards such as HTTP Live - Streaming (HLS) and Dynamic Adaptive Streaming over HTTP (DASH). - You can also customize jobs using advanced features such as - Digital Rights Management (DRM), audio equalization, content - concatenation, and digital ad-stitch ready content generation. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends protocol buffers over the wire using gRPC (which is built on - top of HTTP/2); the ``grpcio`` package must be installed. - """ - - _stubs: Dict[str, Callable] - - def __init__( - self, - *, - host: str = "transcoder.googleapis.com", - credentials: ga_credentials.Credentials = None, - credentials_file: str = None, - scopes: Sequence[str] = None, - channel: grpc.Channel = None, - api_mtls_endpoint: str = None, - client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, - ssl_channel_credentials: grpc.ChannelCredentials = None, - client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to. - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - This argument is ignored if ``channel`` is provided. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. - scopes (Optional(Sequence[str])): A list of scopes. This argument is - ignored if ``channel`` is provided. - channel (Optional[grpc.Channel]): A ``Channel`` instance through - which to make calls. - api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. - If provided, it overrides the ``host`` argument and tries to create - a mutual TLS channel with client SSL credentials from - ``client_cert_source`` or application default SSL credentials. - client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): - Deprecated. A callback to provide client SSL certificate bytes and - private key bytes, both in PEM format. It is ignored if - ``api_mtls_endpoint`` is None. - ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for the grpc channel. It is ignored if ``channel`` is provided. - client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): - A callback to provide client certificate bytes and private key bytes, - both in PEM format. It is used to configure a mutual TLS channel. It is - ignored if ``channel`` or ``ssl_channel_credentials`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport - creation failed for any reason. - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - self._grpc_channel = None - self._ssl_channel_credentials = ssl_channel_credentials - self._stubs: Dict[str, Callable] = {} - - if api_mtls_endpoint: - warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) - if client_cert_source: - warnings.warn("client_cert_source is deprecated", DeprecationWarning) - - if channel: - # Ignore credentials if a channel was passed. - credentials = False - # If a channel was explicitly provided, set it. - self._grpc_channel = channel - self._ssl_channel_credentials = None - - else: - if api_mtls_endpoint: - host = api_mtls_endpoint - - # Create SSL credentials with client_cert_source or application - # default SSL credentials. - if client_cert_source: - cert, key = client_cert_source() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - else: - self._ssl_channel_credentials = SslCredentials().ssl_credentials - - else: - if client_cert_source_for_mtls and not ssl_channel_credentials: - cert, key = client_cert_source_for_mtls() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - - # The base transport sets the host, credentials and scopes - super().__init__( - host=host, - credentials=credentials, - credentials_file=credentials_file, - scopes=scopes, - quota_project_id=quota_project_id, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - ) - - if not self._grpc_channel: - self._grpc_channel = type(self).create_channel( - self._host, - # use the credentials which are saved - credentials=self._credentials, - # Set ``credentials_file`` to ``None`` here as - # the credentials that we saved earlier should be used. - credentials_file=None, - scopes=self._scopes, - ssl_credentials=self._ssl_channel_credentials, - quota_project_id=quota_project_id, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - # Wrap messages. This must be done after self._grpc_channel exists - self._prep_wrapped_messages(client_info) - - @classmethod - def create_channel( - cls, - host: str = "transcoder.googleapis.com", - credentials: ga_credentials.Credentials = None, - credentials_file: str = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - **kwargs, - ) -> grpc.Channel: - """Create and return a gRPC channel object. - Args: - host (Optional[str]): The host for the channel to use. - credentials (Optional[~.Credentials]): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If - none are specified, the client will attempt to ascertain - the credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is mutually exclusive with credentials. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - kwargs (Optional[dict]): Keyword arguments, which are passed to the - channel creation. - Returns: - grpc.Channel: A gRPC channel object. - - Raises: - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - - return grpc_helpers.create_channel( - host, - credentials=credentials, - credentials_file=credentials_file, - quota_project_id=quota_project_id, - default_scopes=cls.AUTH_SCOPES, - scopes=scopes, - default_host=cls.DEFAULT_HOST, - **kwargs, - ) - - @property - def grpc_channel(self) -> grpc.Channel: - """Return the channel designed to connect to this service. - """ - return self._grpc_channel - - @property - def create_job(self) -> Callable[[services.CreateJobRequest], resources.Job]: - r"""Return a callable for the create job method over gRPC. - - Creates a job in the specified region. - - Returns: - Callable[[~.CreateJobRequest], - ~.Job]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "create_job" not in self._stubs: - self._stubs["create_job"] = self.grpc_channel.unary_unary( - "/google.cloud.video.transcoder.v1beta1.TranscoderService/CreateJob", - request_serializer=services.CreateJobRequest.serialize, - response_deserializer=resources.Job.deserialize, - ) - return self._stubs["create_job"] - - @property - def list_jobs( - self, - ) -> Callable[[services.ListJobsRequest], services.ListJobsResponse]: - r"""Return a callable for the list jobs method over gRPC. - - Lists jobs in the specified region. - - Returns: - Callable[[~.ListJobsRequest], - ~.ListJobsResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "list_jobs" not in self._stubs: - self._stubs["list_jobs"] = self.grpc_channel.unary_unary( - "/google.cloud.video.transcoder.v1beta1.TranscoderService/ListJobs", - request_serializer=services.ListJobsRequest.serialize, - response_deserializer=services.ListJobsResponse.deserialize, - ) - return self._stubs["list_jobs"] - - @property - def get_job(self) -> Callable[[services.GetJobRequest], resources.Job]: - r"""Return a callable for the get job method over gRPC. - - Returns the job data. - - Returns: - Callable[[~.GetJobRequest], - ~.Job]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "get_job" not in self._stubs: - self._stubs["get_job"] = self.grpc_channel.unary_unary( - "/google.cloud.video.transcoder.v1beta1.TranscoderService/GetJob", - request_serializer=services.GetJobRequest.serialize, - response_deserializer=resources.Job.deserialize, - ) - return self._stubs["get_job"] - - @property - def delete_job(self) -> Callable[[services.DeleteJobRequest], empty_pb2.Empty]: - r"""Return a callable for the delete job method over gRPC. - - Deletes a job. - - Returns: - Callable[[~.DeleteJobRequest], - ~.Empty]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "delete_job" not in self._stubs: - self._stubs["delete_job"] = self.grpc_channel.unary_unary( - "/google.cloud.video.transcoder.v1beta1.TranscoderService/DeleteJob", - request_serializer=services.DeleteJobRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs["delete_job"] - - @property - def create_job_template( - self, - ) -> Callable[[services.CreateJobTemplateRequest], resources.JobTemplate]: - r"""Return a callable for the create job template method over gRPC. - - Creates a job template in the specified region. - - Returns: - Callable[[~.CreateJobTemplateRequest], - ~.JobTemplate]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "create_job_template" not in self._stubs: - self._stubs["create_job_template"] = self.grpc_channel.unary_unary( - "/google.cloud.video.transcoder.v1beta1.TranscoderService/CreateJobTemplate", - request_serializer=services.CreateJobTemplateRequest.serialize, - response_deserializer=resources.JobTemplate.deserialize, - ) - return self._stubs["create_job_template"] - - @property - def list_job_templates( - self, - ) -> Callable[ - [services.ListJobTemplatesRequest], services.ListJobTemplatesResponse - ]: - r"""Return a callable for the list job templates method over gRPC. - - Lists job templates in the specified region. - - Returns: - Callable[[~.ListJobTemplatesRequest], - ~.ListJobTemplatesResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "list_job_templates" not in self._stubs: - self._stubs["list_job_templates"] = self.grpc_channel.unary_unary( - "/google.cloud.video.transcoder.v1beta1.TranscoderService/ListJobTemplates", - request_serializer=services.ListJobTemplatesRequest.serialize, - response_deserializer=services.ListJobTemplatesResponse.deserialize, - ) - return self._stubs["list_job_templates"] - - @property - def get_job_template( - self, - ) -> Callable[[services.GetJobTemplateRequest], resources.JobTemplate]: - r"""Return a callable for the get job template method over gRPC. - - Returns the job template data. - - Returns: - Callable[[~.GetJobTemplateRequest], - ~.JobTemplate]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "get_job_template" not in self._stubs: - self._stubs["get_job_template"] = self.grpc_channel.unary_unary( - "/google.cloud.video.transcoder.v1beta1.TranscoderService/GetJobTemplate", - request_serializer=services.GetJobTemplateRequest.serialize, - response_deserializer=resources.JobTemplate.deserialize, - ) - return self._stubs["get_job_template"] - - @property - def delete_job_template( - self, - ) -> Callable[[services.DeleteJobTemplateRequest], empty_pb2.Empty]: - r"""Return a callable for the delete job template method over gRPC. - - Deletes a job template. - - Returns: - Callable[[~.DeleteJobTemplateRequest], - ~.Empty]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "delete_job_template" not in self._stubs: - self._stubs["delete_job_template"] = self.grpc_channel.unary_unary( - "/google.cloud.video.transcoder.v1beta1.TranscoderService/DeleteJobTemplate", - request_serializer=services.DeleteJobTemplateRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs["delete_job_template"] - - def close(self): - self.grpc_channel.close() - - -__all__ = ("TranscoderServiceGrpcTransport",) diff --git a/google/cloud/video/transcoder_v1beta1/services/transcoder_service/transports/grpc_asyncio.py b/google/cloud/video/transcoder_v1beta1/services/transcoder_service/transports/grpc_asyncio.py deleted file mode 100644 index b4ff788..0000000 --- a/google/cloud/video/transcoder_v1beta1/services/transcoder_service/transports/grpc_asyncio.py +++ /dev/null @@ -1,455 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import warnings -from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union - -from google.api_core import gapic_v1 -from google.api_core import grpc_helpers_async -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore - -import grpc # type: ignore -from grpc.experimental import aio # type: ignore - -from google.cloud.video.transcoder_v1beta1.types import resources -from google.cloud.video.transcoder_v1beta1.types import services -from google.protobuf import empty_pb2 # type: ignore -from .base import TranscoderServiceTransport, DEFAULT_CLIENT_INFO -from .grpc import TranscoderServiceGrpcTransport - - -class TranscoderServiceGrpcAsyncIOTransport(TranscoderServiceTransport): - """gRPC AsyncIO backend transport for TranscoderService. - - Using the Transcoder API, you can queue asynchronous jobs for - transcoding media into various output formats. Output formats - may include different streaming standards such as HTTP Live - Streaming (HLS) and Dynamic Adaptive Streaming over HTTP (DASH). - You can also customize jobs using advanced features such as - Digital Rights Management (DRM), audio equalization, content - concatenation, and digital ad-stitch ready content generation. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends protocol buffers over the wire using gRPC (which is built on - top of HTTP/2); the ``grpcio`` package must be installed. - """ - - _grpc_channel: aio.Channel - _stubs: Dict[str, Callable] = {} - - @classmethod - def create_channel( - cls, - host: str = "transcoder.googleapis.com", - credentials: ga_credentials.Credentials = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - **kwargs, - ) -> aio.Channel: - """Create and return a gRPC AsyncIO channel object. - Args: - host (Optional[str]): The host for the channel to use. - credentials (Optional[~.Credentials]): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If - none are specified, the client will attempt to ascertain - the credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - kwargs (Optional[dict]): Keyword arguments, which are passed to the - channel creation. - Returns: - aio.Channel: A gRPC AsyncIO channel object. - """ - - return grpc_helpers_async.create_channel( - host, - credentials=credentials, - credentials_file=credentials_file, - quota_project_id=quota_project_id, - default_scopes=cls.AUTH_SCOPES, - scopes=scopes, - default_host=cls.DEFAULT_HOST, - **kwargs, - ) - - def __init__( - self, - *, - host: str = "transcoder.googleapis.com", - credentials: ga_credentials.Credentials = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - channel: aio.Channel = None, - api_mtls_endpoint: str = None, - client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, - ssl_channel_credentials: grpc.ChannelCredentials = None, - client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, - quota_project_id=None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to. - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - This argument is ignored if ``channel`` is provided. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - channel (Optional[aio.Channel]): A ``Channel`` instance through - which to make calls. - api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. - If provided, it overrides the ``host`` argument and tries to create - a mutual TLS channel with client SSL credentials from - ``client_cert_source`` or application default SSL credentials. - client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): - Deprecated. A callback to provide client SSL certificate bytes and - private key bytes, both in PEM format. It is ignored if - ``api_mtls_endpoint`` is None. - ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for the grpc channel. It is ignored if ``channel`` is provided. - client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): - A callback to provide client certificate bytes and private key bytes, - both in PEM format. It is used to configure a mutual TLS channel. It is - ignored if ``channel`` or ``ssl_channel_credentials`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - - Raises: - google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport - creation failed for any reason. - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - self._grpc_channel = None - self._ssl_channel_credentials = ssl_channel_credentials - self._stubs: Dict[str, Callable] = {} - - if api_mtls_endpoint: - warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) - if client_cert_source: - warnings.warn("client_cert_source is deprecated", DeprecationWarning) - - if channel: - # Ignore credentials if a channel was passed. - credentials = False - # If a channel was explicitly provided, set it. - self._grpc_channel = channel - self._ssl_channel_credentials = None - else: - if api_mtls_endpoint: - host = api_mtls_endpoint - - # Create SSL credentials with client_cert_source or application - # default SSL credentials. - if client_cert_source: - cert, key = client_cert_source() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - else: - self._ssl_channel_credentials = SslCredentials().ssl_credentials - - else: - if client_cert_source_for_mtls and not ssl_channel_credentials: - cert, key = client_cert_source_for_mtls() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - - # The base transport sets the host, credentials and scopes - super().__init__( - host=host, - credentials=credentials, - credentials_file=credentials_file, - scopes=scopes, - quota_project_id=quota_project_id, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - ) - - if not self._grpc_channel: - self._grpc_channel = type(self).create_channel( - self._host, - # use the credentials which are saved - credentials=self._credentials, - # Set ``credentials_file`` to ``None`` here as - # the credentials that we saved earlier should be used. - credentials_file=None, - scopes=self._scopes, - ssl_credentials=self._ssl_channel_credentials, - quota_project_id=quota_project_id, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - # Wrap messages. This must be done after self._grpc_channel exists - self._prep_wrapped_messages(client_info) - - @property - def grpc_channel(self) -> aio.Channel: - """Create the channel designed to connect to this service. - - This property caches on the instance; repeated calls return - the same channel. - """ - # Return the channel from cache. - return self._grpc_channel - - @property - def create_job( - self, - ) -> Callable[[services.CreateJobRequest], Awaitable[resources.Job]]: - r"""Return a callable for the create job method over gRPC. - - Creates a job in the specified region. - - Returns: - Callable[[~.CreateJobRequest], - Awaitable[~.Job]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "create_job" not in self._stubs: - self._stubs["create_job"] = self.grpc_channel.unary_unary( - "/google.cloud.video.transcoder.v1beta1.TranscoderService/CreateJob", - request_serializer=services.CreateJobRequest.serialize, - response_deserializer=resources.Job.deserialize, - ) - return self._stubs["create_job"] - - @property - def list_jobs( - self, - ) -> Callable[[services.ListJobsRequest], Awaitable[services.ListJobsResponse]]: - r"""Return a callable for the list jobs method over gRPC. - - Lists jobs in the specified region. - - Returns: - Callable[[~.ListJobsRequest], - Awaitable[~.ListJobsResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "list_jobs" not in self._stubs: - self._stubs["list_jobs"] = self.grpc_channel.unary_unary( - "/google.cloud.video.transcoder.v1beta1.TranscoderService/ListJobs", - request_serializer=services.ListJobsRequest.serialize, - response_deserializer=services.ListJobsResponse.deserialize, - ) - return self._stubs["list_jobs"] - - @property - def get_job(self) -> Callable[[services.GetJobRequest], Awaitable[resources.Job]]: - r"""Return a callable for the get job method over gRPC. - - Returns the job data. - - Returns: - Callable[[~.GetJobRequest], - Awaitable[~.Job]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "get_job" not in self._stubs: - self._stubs["get_job"] = self.grpc_channel.unary_unary( - "/google.cloud.video.transcoder.v1beta1.TranscoderService/GetJob", - request_serializer=services.GetJobRequest.serialize, - response_deserializer=resources.Job.deserialize, - ) - return self._stubs["get_job"] - - @property - def delete_job( - self, - ) -> Callable[[services.DeleteJobRequest], Awaitable[empty_pb2.Empty]]: - r"""Return a callable for the delete job method over gRPC. - - Deletes a job. - - Returns: - Callable[[~.DeleteJobRequest], - Awaitable[~.Empty]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "delete_job" not in self._stubs: - self._stubs["delete_job"] = self.grpc_channel.unary_unary( - "/google.cloud.video.transcoder.v1beta1.TranscoderService/DeleteJob", - request_serializer=services.DeleteJobRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs["delete_job"] - - @property - def create_job_template( - self, - ) -> Callable[ - [services.CreateJobTemplateRequest], Awaitable[resources.JobTemplate] - ]: - r"""Return a callable for the create job template method over gRPC. - - Creates a job template in the specified region. - - Returns: - Callable[[~.CreateJobTemplateRequest], - Awaitable[~.JobTemplate]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "create_job_template" not in self._stubs: - self._stubs["create_job_template"] = self.grpc_channel.unary_unary( - "/google.cloud.video.transcoder.v1beta1.TranscoderService/CreateJobTemplate", - request_serializer=services.CreateJobTemplateRequest.serialize, - response_deserializer=resources.JobTemplate.deserialize, - ) - return self._stubs["create_job_template"] - - @property - def list_job_templates( - self, - ) -> Callable[ - [services.ListJobTemplatesRequest], Awaitable[services.ListJobTemplatesResponse] - ]: - r"""Return a callable for the list job templates method over gRPC. - - Lists job templates in the specified region. - - Returns: - Callable[[~.ListJobTemplatesRequest], - Awaitable[~.ListJobTemplatesResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "list_job_templates" not in self._stubs: - self._stubs["list_job_templates"] = self.grpc_channel.unary_unary( - "/google.cloud.video.transcoder.v1beta1.TranscoderService/ListJobTemplates", - request_serializer=services.ListJobTemplatesRequest.serialize, - response_deserializer=services.ListJobTemplatesResponse.deserialize, - ) - return self._stubs["list_job_templates"] - - @property - def get_job_template( - self, - ) -> Callable[[services.GetJobTemplateRequest], Awaitable[resources.JobTemplate]]: - r"""Return a callable for the get job template method over gRPC. - - Returns the job template data. - - Returns: - Callable[[~.GetJobTemplateRequest], - Awaitable[~.JobTemplate]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "get_job_template" not in self._stubs: - self._stubs["get_job_template"] = self.grpc_channel.unary_unary( - "/google.cloud.video.transcoder.v1beta1.TranscoderService/GetJobTemplate", - request_serializer=services.GetJobTemplateRequest.serialize, - response_deserializer=resources.JobTemplate.deserialize, - ) - return self._stubs["get_job_template"] - - @property - def delete_job_template( - self, - ) -> Callable[[services.DeleteJobTemplateRequest], Awaitable[empty_pb2.Empty]]: - r"""Return a callable for the delete job template method over gRPC. - - Deletes a job template. - - Returns: - Callable[[~.DeleteJobTemplateRequest], - Awaitable[~.Empty]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if "delete_job_template" not in self._stubs: - self._stubs["delete_job_template"] = self.grpc_channel.unary_unary( - "/google.cloud.video.transcoder.v1beta1.TranscoderService/DeleteJobTemplate", - request_serializer=services.DeleteJobTemplateRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs["delete_job_template"] - - def close(self): - return self.grpc_channel.close() - - -__all__ = ("TranscoderServiceGrpcAsyncIOTransport",) diff --git a/google/cloud/video/transcoder_v1beta1/types/__init__.py b/google/cloud/video/transcoder_v1beta1/types/__init__.py deleted file mode 100644 index b216dc5..0000000 --- a/google/cloud/video/transcoder_v1beta1/types/__init__.py +++ /dev/null @@ -1,84 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from .resources import ( - AdBreak, - AudioStream, - EditAtom, - ElementaryStream, - Encryption, - FailureDetail, - Input, - Job, - JobConfig, - JobTemplate, - Manifest, - MuxStream, - Output, - Overlay, - PreprocessingConfig, - Progress, - PubsubDestination, - SegmentSettings, - SpriteSheet, - TextStream, - VideoStream, -) -from .services import ( - CreateJobRequest, - CreateJobTemplateRequest, - DeleteJobRequest, - DeleteJobTemplateRequest, - GetJobRequest, - GetJobTemplateRequest, - ListJobsRequest, - ListJobsResponse, - ListJobTemplatesRequest, - ListJobTemplatesResponse, -) - -__all__ = ( - "AdBreak", - "AudioStream", - "EditAtom", - "ElementaryStream", - "Encryption", - "FailureDetail", - "Input", - "Job", - "JobConfig", - "JobTemplate", - "Manifest", - "MuxStream", - "Output", - "Overlay", - "PreprocessingConfig", - "Progress", - "PubsubDestination", - "SegmentSettings", - "SpriteSheet", - "TextStream", - "VideoStream", - "CreateJobRequest", - "CreateJobTemplateRequest", - "DeleteJobRequest", - "DeleteJobTemplateRequest", - "GetJobRequest", - "GetJobTemplateRequest", - "ListJobsRequest", - "ListJobsResponse", - "ListJobTemplatesRequest", - "ListJobTemplatesResponse", -) diff --git a/google/cloud/video/transcoder_v1beta1/types/resources.py b/google/cloud/video/transcoder_v1beta1/types/resources.py deleted file mode 100644 index 41ac5c7..0000000 --- a/google/cloud/video/transcoder_v1beta1/types/resources.py +++ /dev/null @@ -1,1366 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import proto # type: ignore - -from google.protobuf import duration_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore - - -__protobuf__ = proto.module( - package="google.cloud.video.transcoder.v1beta1", - manifest={ - "Job", - "JobTemplate", - "JobConfig", - "Input", - "Output", - "EditAtom", - "AdBreak", - "ElementaryStream", - "MuxStream", - "Manifest", - "PubsubDestination", - "SpriteSheet", - "Overlay", - "PreprocessingConfig", - "VideoStream", - "AudioStream", - "TextStream", - "SegmentSettings", - "Encryption", - "Progress", - "FailureDetail", - }, -) - - -class Job(proto.Message): - r"""Transcoding job resource. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - name (str): - The resource name of the job. Format: - ``projects/{project}/locations/{location}/jobs/{job}`` - input_uri (str): - Input only. Specify the ``input_uri`` to populate empty - ``uri`` fields in each element of ``Job.config.inputs`` or - ``JobTemplate.config.inputs`` when using template. URI of - the media. Input files must be at least 5 seconds in - duration and stored in Cloud Storage (for example, - ``gs://bucket/inputs/file.mp4``). - output_uri (str): - Input only. Specify the ``output_uri`` to populate an empty - ``Job.config.output.uri`` or - ``JobTemplate.config.output.uri`` when using template. URI - for the output file(s). For example, - ``gs://my-bucket/outputs/``. - template_id (str): - Input only. Specify the ``template_id`` to use for - populating ``Job.config``. The default is ``preset/web-hd``. - - Preset Transcoder templates: - - - ``preset/{preset_id}`` - - - User defined JobTemplate: ``{job_template_id}`` - - This field is a member of `oneof`_ ``job_config``. - config (google.cloud.video.transcoder_v1beta1.types.JobConfig): - The configuration for this job. - - This field is a member of `oneof`_ ``job_config``. - priority (int): - Specify the priority of the job. Enter a - value between 0 and 100, where 0 is the lowest - priority and 100 is the highest priority. The - default is 0. - origin_uri (google.cloud.video.transcoder_v1beta1.types.Job.OriginUri): - Output only. The origin URI. - - state (google.cloud.video.transcoder_v1beta1.types.Job.ProcessingState): - Output only. The current state of the job. - progress (google.cloud.video.transcoder_v1beta1.types.Progress): - Output only. Estimated fractional progress, from ``0`` to - ``1`` for each step. - - .. raw:: html - - - failure_reason (str): - Output only. A description of the reason for the failure. - This property is always present when ``state`` is - ``FAILED``. - failure_details (Sequence[google.cloud.video.transcoder_v1beta1.types.FailureDetail]): - Output only. List of failure details. This property may - contain additional information about the failure when - ``failure_reason`` is present. - - .. raw:: html - - - create_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The time the job was created. - start_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The time the transcoding - started. - end_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The time the transcoding - finished. - ttl_after_completion_days (int): - Job time to live value in days, which will be - effective after job completion. Job should be - deleted automatically after the given TTL. Enter - a value between 1 and 90. The default is 30. - """ - - class ProcessingState(proto.Enum): - r"""The current state of the job.""" - PROCESSING_STATE_UNSPECIFIED = 0 - PENDING = 1 - RUNNING = 2 - SUCCEEDED = 3 - FAILED = 4 - - class OriginUri(proto.Message): - r"""The origin URI. - - Attributes: - hls (str): - HLS manifest URI per - https://tools.ietf.org/html/rfc8216#section-4.3.4. - If multiple HLS manifests are created, only the - first one is listed. - dash (str): - Dash manifest URI. If multiple Dash manifests - are created, only the first one is listed. - """ - - hls = proto.Field(proto.STRING, number=1,) - dash = proto.Field(proto.STRING, number=2,) - - name = proto.Field(proto.STRING, number=1,) - input_uri = proto.Field(proto.STRING, number=2,) - output_uri = proto.Field(proto.STRING, number=3,) - template_id = proto.Field(proto.STRING, number=4, oneof="job_config",) - config = proto.Field( - proto.MESSAGE, number=5, oneof="job_config", message="JobConfig", - ) - priority = proto.Field(proto.INT32, number=6,) - origin_uri = proto.Field(proto.MESSAGE, number=7, message=OriginUri,) - state = proto.Field(proto.ENUM, number=8, enum=ProcessingState,) - progress = proto.Field(proto.MESSAGE, number=9, message="Progress",) - failure_reason = proto.Field(proto.STRING, number=10,) - failure_details = proto.RepeatedField( - proto.MESSAGE, number=11, message="FailureDetail", - ) - create_time = proto.Field( - proto.MESSAGE, number=12, message=timestamp_pb2.Timestamp, - ) - start_time = proto.Field(proto.MESSAGE, number=13, message=timestamp_pb2.Timestamp,) - end_time = proto.Field(proto.MESSAGE, number=14, message=timestamp_pb2.Timestamp,) - ttl_after_completion_days = proto.Field(proto.INT32, number=15,) - - -class JobTemplate(proto.Message): - r"""Transcoding job template resource. - - Attributes: - name (str): - The resource name of the job template. Format: - ``projects/{project}/locations/{location}/jobTemplates/{job_template}`` - config (google.cloud.video.transcoder_v1beta1.types.JobConfig): - The configuration for this template. - """ - - name = proto.Field(proto.STRING, number=1,) - config = proto.Field(proto.MESSAGE, number=2, message="JobConfig",) - - -class JobConfig(proto.Message): - r"""Job configuration - - Attributes: - inputs (Sequence[google.cloud.video.transcoder_v1beta1.types.Input]): - List of input assets stored in Cloud Storage. - edit_list (Sequence[google.cloud.video.transcoder_v1beta1.types.EditAtom]): - List of ``Edit atom``\ s. Defines the ultimate timeline of - the resulting file or manifest. - elementary_streams (Sequence[google.cloud.video.transcoder_v1beta1.types.ElementaryStream]): - List of elementary streams. - mux_streams (Sequence[google.cloud.video.transcoder_v1beta1.types.MuxStream]): - List of multiplexing settings for output - streams. - manifests (Sequence[google.cloud.video.transcoder_v1beta1.types.Manifest]): - List of output manifests. - output (google.cloud.video.transcoder_v1beta1.types.Output): - Output configuration. - ad_breaks (Sequence[google.cloud.video.transcoder_v1beta1.types.AdBreak]): - List of ad breaks. Specifies where to insert - ad break tags in the output manifests. - pubsub_destination (google.cloud.video.transcoder_v1beta1.types.PubsubDestination): - Destination on Pub/Sub. - sprite_sheets (Sequence[google.cloud.video.transcoder_v1beta1.types.SpriteSheet]): - List of output sprite sheets. - overlays (Sequence[google.cloud.video.transcoder_v1beta1.types.Overlay]): - List of overlays on the output video, in - descending Z-order. - """ - - inputs = proto.RepeatedField(proto.MESSAGE, number=1, message="Input",) - edit_list = proto.RepeatedField(proto.MESSAGE, number=2, message="EditAtom",) - elementary_streams = proto.RepeatedField( - proto.MESSAGE, number=3, message="ElementaryStream", - ) - mux_streams = proto.RepeatedField(proto.MESSAGE, number=4, message="MuxStream",) - manifests = proto.RepeatedField(proto.MESSAGE, number=5, message="Manifest",) - output = proto.Field(proto.MESSAGE, number=6, message="Output",) - ad_breaks = proto.RepeatedField(proto.MESSAGE, number=7, message="AdBreak",) - pubsub_destination = proto.Field( - proto.MESSAGE, number=8, message="PubsubDestination", - ) - sprite_sheets = proto.RepeatedField(proto.MESSAGE, number=9, message="SpriteSheet",) - overlays = proto.RepeatedField(proto.MESSAGE, number=10, message="Overlay",) - - -class Input(proto.Message): - r"""Input asset. - - Attributes: - key (str): - A unique key for this input. Must be - specified when using advanced mapping and edit - lists. - uri (str): - URI of the media. Input files must be at least 5 seconds in - duration and stored in Cloud Storage (for example, - ``gs://bucket/inputs/file.mp4``). If empty, the value will - be populated from ``Job.input_uri``. - preprocessing_config (google.cloud.video.transcoder_v1beta1.types.PreprocessingConfig): - Preprocessing configurations. - """ - - key = proto.Field(proto.STRING, number=1,) - uri = proto.Field(proto.STRING, number=2,) - preprocessing_config = proto.Field( - proto.MESSAGE, number=3, message="PreprocessingConfig", - ) - - -class Output(proto.Message): - r"""Location of output file(s) in a Cloud Storage bucket. - - Attributes: - uri (str): - URI for the output file(s). For example, - ``gs://my-bucket/outputs/``. If empty the value is populated - from ``Job.output_uri``. - """ - - uri = proto.Field(proto.STRING, number=1,) - - -class EditAtom(proto.Message): - r"""Edit atom. - - Attributes: - key (str): - A unique key for this atom. Must be specified - when using advanced mapping. - inputs (Sequence[str]): - List of ``Input.key``\ s identifying files that should be - used in this atom. The listed ``inputs`` must have the same - timeline. - end_time_offset (google.protobuf.duration_pb2.Duration): - End time in seconds for the atom, relative to the input file - timeline. When ``end_time_offset`` is not specified, the - ``inputs`` are used until the end of the atom. - start_time_offset (google.protobuf.duration_pb2.Duration): - Start time in seconds for the atom, relative to the input - file timeline. The default is ``0s``. - """ - - key = proto.Field(proto.STRING, number=1,) - inputs = proto.RepeatedField(proto.STRING, number=2,) - end_time_offset = proto.Field( - proto.MESSAGE, number=3, message=duration_pb2.Duration, - ) - start_time_offset = proto.Field( - proto.MESSAGE, number=4, message=duration_pb2.Duration, - ) - - -class AdBreak(proto.Message): - r"""Ad break. - - Attributes: - start_time_offset (google.protobuf.duration_pb2.Duration): - Start time in seconds for the ad break, relative to the - output file timeline. The default is ``0s``. - """ - - start_time_offset = proto.Field( - proto.MESSAGE, number=1, message=duration_pb2.Duration, - ) - - -class ElementaryStream(proto.Message): - r"""Encoding of an input file such as an audio, video, or text - track. Elementary streams must be packaged before - mapping and sharing between different output formats. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - key (str): - A unique key for this elementary stream. - video_stream (google.cloud.video.transcoder_v1beta1.types.VideoStream): - Encoding of a video stream. - - This field is a member of `oneof`_ ``elementary_stream``. - audio_stream (google.cloud.video.transcoder_v1beta1.types.AudioStream): - Encoding of an audio stream. - - This field is a member of `oneof`_ ``elementary_stream``. - text_stream (google.cloud.video.transcoder_v1beta1.types.TextStream): - Encoding of a text stream. For example, - closed captions or subtitles. - - This field is a member of `oneof`_ ``elementary_stream``. - """ - - key = proto.Field(proto.STRING, number=4,) - video_stream = proto.Field( - proto.MESSAGE, number=1, oneof="elementary_stream", message="VideoStream", - ) - audio_stream = proto.Field( - proto.MESSAGE, number=2, oneof="elementary_stream", message="AudioStream", - ) - text_stream = proto.Field( - proto.MESSAGE, number=3, oneof="elementary_stream", message="TextStream", - ) - - -class MuxStream(proto.Message): - r"""Multiplexing settings for output stream. - - Attributes: - key (str): - A unique key for this multiplexed stream. HLS media - manifests will be named ``MuxStream.key`` with the - ``".m3u8"`` extension suffix. - file_name (str): - The name of the generated file. The default is - ``MuxStream.key`` with the extension suffix corresponding to - the ``MuxStream.container``. - - Individual segments also have an incremental 10-digit - zero-padded suffix starting from 0 before the extension, - such as ``"mux_stream0000000123.ts"``. - container (str): - The container format. The default is ``"mp4"`` - - Supported container formats: - - - 'ts' - - 'fmp4'- the corresponding file extension is ``".m4s"`` - - 'mp4' - - 'vtt' - elementary_streams (Sequence[str]): - List of ``ElementaryStream.key``\ s multiplexed in this - stream. - segment_settings (google.cloud.video.transcoder_v1beta1.types.SegmentSettings): - Segment settings for ``"ts"``, ``"fmp4"`` and ``"vtt"``. - encryption (google.cloud.video.transcoder_v1beta1.types.Encryption): - Encryption settings. - """ - - key = proto.Field(proto.STRING, number=1,) - file_name = proto.Field(proto.STRING, number=2,) - container = proto.Field(proto.STRING, number=3,) - elementary_streams = proto.RepeatedField(proto.STRING, number=4,) - segment_settings = proto.Field(proto.MESSAGE, number=5, message="SegmentSettings",) - encryption = proto.Field(proto.MESSAGE, number=6, message="Encryption",) - - -class Manifest(proto.Message): - r"""Manifest configuration. - - Attributes: - file_name (str): - The name of the generated file. The default is - ``"manifest"`` with the extension suffix corresponding to - the ``Manifest.type``. - type_ (google.cloud.video.transcoder_v1beta1.types.Manifest.ManifestType): - Required. Type of the manifest, can be "HLS" - or "DASH". - mux_streams (Sequence[str]): - Required. List of user given ``MuxStream.key``\ s that - should appear in this manifest. - - When ``Manifest.type`` is ``HLS``, a media manifest with - name ``MuxStream.key`` and ``.m3u8`` extension is generated - for each element of the ``Manifest.mux_streams``. - """ - - class ManifestType(proto.Enum): - r"""The manifest type can be either ``"HLS"`` or ``"DASH"``.""" - MANIFEST_TYPE_UNSPECIFIED = 0 - HLS = 1 - DASH = 2 - - file_name = proto.Field(proto.STRING, number=1,) - type_ = proto.Field(proto.ENUM, number=2, enum=ManifestType,) - mux_streams = proto.RepeatedField(proto.STRING, number=3,) - - -class PubsubDestination(proto.Message): - r"""A Pub/Sub destination. - - Attributes: - topic (str): - The name of the Pub/Sub topic to publish job completion - notification to. For example: - ``projects/{project}/topics/{topic}``. - """ - - topic = proto.Field(proto.STRING, number=1,) - - -class SpriteSheet(proto.Message): - r"""Sprite sheet configuration. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - format_ (str): - Format type. The default is ``"jpeg"``. - - Supported formats: - - - 'jpeg' - file_prefix (str): - Required. File name prefix for the generated sprite sheets. - - Each sprite sheet has an incremental 10-digit zero-padded - suffix starting from 0 before the extension, such as - ``"sprite_sheet0000000123.jpeg"``. - sprite_width_pixels (int): - Required. The width of sprite in pixels. Must be an even - integer. To preserve the source aspect ratio, set the - [SpriteSheet.sprite_width_pixels][google.cloud.video.transcoder.v1beta1.SpriteSheet.sprite_width_pixels] - field or the - [SpriteSheet.sprite_height_pixels][google.cloud.video.transcoder.v1beta1.SpriteSheet.sprite_height_pixels] - field, but not both (the API will automatically calculate - the missing field). - sprite_height_pixels (int): - Required. The height of sprite in pixels. Must be an even - integer. To preserve the source aspect ratio, set the - [SpriteSheet.sprite_height_pixels][google.cloud.video.transcoder.v1beta1.SpriteSheet.sprite_height_pixels] - field or the - [SpriteSheet.sprite_width_pixels][google.cloud.video.transcoder.v1beta1.SpriteSheet.sprite_width_pixels] - field, but not both (the API will automatically calculate - the missing field). - column_count (int): - The maximum number of sprites per row in a - sprite sheet. The default is 0, which indicates - no maximum limit. - row_count (int): - The maximum number of rows per sprite sheet. - When the sprite sheet is full, a new sprite - sheet is created. The default is 0, which - indicates no maximum limit. - start_time_offset (google.protobuf.duration_pb2.Duration): - Start time in seconds, relative to the output file timeline. - Determines the first sprite to pick. The default is ``0s``. - end_time_offset (google.protobuf.duration_pb2.Duration): - End time in seconds, relative to the output file timeline. - When ``end_time_offset`` is not specified, the sprites are - generated until the end of the output file. - total_count (int): - Total number of sprites. Create the specified - number of sprites distributed evenly across the - timeline of the output media. The default is - 100. - - This field is a member of `oneof`_ ``extraction_strategy``. - interval (google.protobuf.duration_pb2.Duration): - Starting from ``0s``, create sprites at regular intervals. - Specify the interval value in seconds. - - This field is a member of `oneof`_ ``extraction_strategy``. - quality (int): - The quality of the generated sprite sheet. - Enter a value between 1 and 100, where 1 is the - lowest quality and 100 is the highest quality. - The default is 100. A high quality value - corresponds to a low image data compression - ratio. - """ - - format_ = proto.Field(proto.STRING, number=1,) - file_prefix = proto.Field(proto.STRING, number=2,) - sprite_width_pixels = proto.Field(proto.INT32, number=3,) - sprite_height_pixels = proto.Field(proto.INT32, number=4,) - column_count = proto.Field(proto.INT32, number=5,) - row_count = proto.Field(proto.INT32, number=6,) - start_time_offset = proto.Field( - proto.MESSAGE, number=7, message=duration_pb2.Duration, - ) - end_time_offset = proto.Field( - proto.MESSAGE, number=8, message=duration_pb2.Duration, - ) - total_count = proto.Field(proto.INT32, number=9, oneof="extraction_strategy",) - interval = proto.Field( - proto.MESSAGE, - number=10, - oneof="extraction_strategy", - message=duration_pb2.Duration, - ) - quality = proto.Field(proto.INT32, number=11,) - - -class Overlay(proto.Message): - r"""Overlay configuration. - - Attributes: - image (google.cloud.video.transcoder_v1beta1.types.Overlay.Image): - Image overlay. - animations (Sequence[google.cloud.video.transcoder_v1beta1.types.Overlay.Animation]): - List of Animations. The list should be - chronological, without any time overlap. - """ - - class FadeType(proto.Enum): - r"""Fade type for the overlay: ``FADE_IN`` or ``FADE_OUT``.""" - FADE_TYPE_UNSPECIFIED = 0 - FADE_IN = 1 - FADE_OUT = 2 - - class NormalizedCoordinate(proto.Message): - r"""2D normalized coordinates. Default: ``{0.0, 0.0}`` - - Attributes: - x (float): - Normalized x coordinate. - y (float): - Normalized y coordinate. - """ - - x = proto.Field(proto.DOUBLE, number=1,) - y = proto.Field(proto.DOUBLE, number=2,) - - class Image(proto.Message): - r"""Overlaid jpeg image. - - Attributes: - uri (str): - Required. URI of the JPEG image in Cloud Storage. For - example, ``gs://bucket/inputs/image.jpeg``. JPEG is the only - supported image type. - resolution (google.cloud.video.transcoder_v1beta1.types.Overlay.NormalizedCoordinate): - Normalized image resolution, based on output video - resolution. Valid values: ``0.0``–``1.0``. To respect the - original image aspect ratio, set either ``x`` or ``y`` to - ``0.0``. To use the original image resolution, set both - ``x`` and ``y`` to ``0.0``. - alpha (float): - Target image opacity. Valid values are from ``1.0`` (solid, - default) to ``0.0`` (transparent), exclusive. Set this to a - value greater than ``0.0``. - """ - - uri = proto.Field(proto.STRING, number=1,) - resolution = proto.Field( - proto.MESSAGE, number=2, message="Overlay.NormalizedCoordinate", - ) - alpha = proto.Field(proto.DOUBLE, number=3,) - - class AnimationStatic(proto.Message): - r"""Display static overlay object. - - Attributes: - xy (google.cloud.video.transcoder_v1beta1.types.Overlay.NormalizedCoordinate): - Normalized coordinates based on output video resolution. - Valid values: ``0.0``–``1.0``. ``xy`` is the upper-left - coordinate of the overlay object. For example, use the x and - y coordinates {0,0} to position the top-left corner of the - overlay animation in the top-left corner of the output - video. - start_time_offset (google.protobuf.duration_pb2.Duration): - The time to start displaying the overlay - object, in seconds. Default: 0 - """ - - xy = proto.Field( - proto.MESSAGE, number=1, message="Overlay.NormalizedCoordinate", - ) - start_time_offset = proto.Field( - proto.MESSAGE, number=2, message=duration_pb2.Duration, - ) - - class AnimationFade(proto.Message): - r"""Display overlay object with fade animation. - - Attributes: - fade_type (google.cloud.video.transcoder_v1beta1.types.Overlay.FadeType): - Required. Type of fade animation: ``FADE_IN`` or - ``FADE_OUT``. - xy (google.cloud.video.transcoder_v1beta1.types.Overlay.NormalizedCoordinate): - Normalized coordinates based on output video resolution. - Valid values: ``0.0``–``1.0``. ``xy`` is the upper-left - coordinate of the overlay object. For example, use the x and - y coordinates {0,0} to position the top-left corner of the - overlay animation in the top-left corner of the output - video. - start_time_offset (google.protobuf.duration_pb2.Duration): - The time to start the fade animation, in - seconds. Default: 0 - end_time_offset (google.protobuf.duration_pb2.Duration): - The time to end the fade animation, in seconds. Default: - ``start_time_offset`` + 1s - """ - - fade_type = proto.Field(proto.ENUM, number=1, enum="Overlay.FadeType",) - xy = proto.Field( - proto.MESSAGE, number=2, message="Overlay.NormalizedCoordinate", - ) - start_time_offset = proto.Field( - proto.MESSAGE, number=3, message=duration_pb2.Duration, - ) - end_time_offset = proto.Field( - proto.MESSAGE, number=4, message=duration_pb2.Duration, - ) - - class AnimationEnd(proto.Message): - r"""End previous overlay animation from the video. Without - AnimationEnd, the overlay object will keep the state of previous - animation until the end of the video. - - Attributes: - start_time_offset (google.protobuf.duration_pb2.Duration): - The time to end overlay object, in seconds. - Default: 0 - """ - - start_time_offset = proto.Field( - proto.MESSAGE, number=1, message=duration_pb2.Duration, - ) - - class Animation(proto.Message): - r"""Animation types. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - animation_static (google.cloud.video.transcoder_v1beta1.types.Overlay.AnimationStatic): - Display static overlay object. - - This field is a member of `oneof`_ ``animation_type``. - animation_fade (google.cloud.video.transcoder_v1beta1.types.Overlay.AnimationFade): - Display overlay object with fade animation. - - This field is a member of `oneof`_ ``animation_type``. - animation_end (google.cloud.video.transcoder_v1beta1.types.Overlay.AnimationEnd): - End previous animation. - - This field is a member of `oneof`_ ``animation_type``. - """ - - animation_static = proto.Field( - proto.MESSAGE, - number=1, - oneof="animation_type", - message="Overlay.AnimationStatic", - ) - animation_fade = proto.Field( - proto.MESSAGE, - number=2, - oneof="animation_type", - message="Overlay.AnimationFade", - ) - animation_end = proto.Field( - proto.MESSAGE, - number=3, - oneof="animation_type", - message="Overlay.AnimationEnd", - ) - - image = proto.Field(proto.MESSAGE, number=1, message=Image,) - animations = proto.RepeatedField(proto.MESSAGE, number=2, message=Animation,) - - -class PreprocessingConfig(proto.Message): - r"""Preprocessing configurations. - - Attributes: - color (google.cloud.video.transcoder_v1beta1.types.PreprocessingConfig.Color): - Color preprocessing configuration. - denoise (google.cloud.video.transcoder_v1beta1.types.PreprocessingConfig.Denoise): - Denoise preprocessing configuration. - deblock (google.cloud.video.transcoder_v1beta1.types.PreprocessingConfig.Deblock): - Deblock preprocessing configuration. - audio (google.cloud.video.transcoder_v1beta1.types.PreprocessingConfig.Audio): - Audio preprocessing configuration. - crop (google.cloud.video.transcoder_v1beta1.types.PreprocessingConfig.Crop): - Specify the video cropping configuration. - pad (google.cloud.video.transcoder_v1beta1.types.PreprocessingConfig.Pad): - Specify the video pad filter configuration. - """ - - class Color(proto.Message): - r"""Color preprocessing configuration. - - Attributes: - saturation (float): - Control color saturation of the video. Enter - a value between -1 and 1, where -1 is fully - desaturated and 1 is maximum saturation. 0 is no - change. The default is 0. - contrast (float): - Control black and white contrast of the - video. Enter a value between -1 and 1, where -1 - is minimum contrast and 1 is maximum contrast. 0 - is no change. The default is 0. - brightness (float): - Control brightness of the video. Enter a - value between -1 and 1, where -1 is minimum - brightness and 1 is maximum brightness. 0 is no - change. The default is 0. - """ - - saturation = proto.Field(proto.DOUBLE, number=1,) - contrast = proto.Field(proto.DOUBLE, number=2,) - brightness = proto.Field(proto.DOUBLE, number=3,) - - class Denoise(proto.Message): - r"""Denoise preprocessing configuration. - - Attributes: - strength (float): - Set strength of the denoise. Enter a value - between 0 and 1. The higher the value, the - smoother the image. 0 is no denoising. The - default is 0. - tune (str): - Set the denoiser mode. The default is ``"standard"``. - - Supported denoiser modes: - - - 'standard' - - 'grain' - """ - - strength = proto.Field(proto.DOUBLE, number=1,) - tune = proto.Field(proto.STRING, number=2,) - - class Deblock(proto.Message): - r"""Deblock preprocessing configuration. - - Attributes: - strength (float): - Set strength of the deblocker. Enter a value - between 0 and 1. The higher the value, the - stronger the block removal. 0 is no deblocking. - The default is 0. - enabled (bool): - Enable deblocker. The default is ``false``. - """ - - strength = proto.Field(proto.DOUBLE, number=1,) - enabled = proto.Field(proto.BOOL, number=2,) - - class Audio(proto.Message): - r"""Audio preprocessing configuration. - - Attributes: - lufs (float): - Specify audio loudness normalization in loudness units - relative to full scale (LUFS). Enter a value between -24 and - 0 (the default), where: - - - -24 is the Advanced Television Systems Committee (ATSC - A/85) standard - - -23 is the EU R128 broadcast standard - - -19 is the prior standard for online mono audio - - -18 is the ReplayGain standard - - -16 is the prior standard for stereo audio - - -14 is the new online audio standard recommended by - Spotify, as well as Amazon Echo - - 0 disables normalization - high_boost (bool): - Enable boosting high frequency components. The default is - ``false``. - low_boost (bool): - Enable boosting low frequency components. The default is - ``false``. - """ - - lufs = proto.Field(proto.DOUBLE, number=1,) - high_boost = proto.Field(proto.BOOL, number=2,) - low_boost = proto.Field(proto.BOOL, number=3,) - - class Crop(proto.Message): - r"""Video cropping configuration for the input video. The cropped - input video is scaled to match the output resolution. - - Attributes: - top_pixels (int): - The number of pixels to crop from the top. - The default is 0. - bottom_pixels (int): - The number of pixels to crop from the bottom. - The default is 0. - left_pixels (int): - The number of pixels to crop from the left. - The default is 0. - right_pixels (int): - The number of pixels to crop from the right. - The default is 0. - """ - - top_pixels = proto.Field(proto.INT32, number=1,) - bottom_pixels = proto.Field(proto.INT32, number=2,) - left_pixels = proto.Field(proto.INT32, number=3,) - right_pixels = proto.Field(proto.INT32, number=4,) - - class Pad(proto.Message): - r"""Pad filter configuration for the input video. The padded - input video is scaled after padding with black to match the - output resolution. - - Attributes: - top_pixels (int): - The number of pixels to add to the top. The - default is 0. - bottom_pixels (int): - The number of pixels to add to the bottom. - The default is 0. - left_pixels (int): - The number of pixels to add to the left. The - default is 0. - right_pixels (int): - The number of pixels to add to the right. The - default is 0. - """ - - top_pixels = proto.Field(proto.INT32, number=1,) - bottom_pixels = proto.Field(proto.INT32, number=2,) - left_pixels = proto.Field(proto.INT32, number=3,) - right_pixels = proto.Field(proto.INT32, number=4,) - - color = proto.Field(proto.MESSAGE, number=1, message=Color,) - denoise = proto.Field(proto.MESSAGE, number=2, message=Denoise,) - deblock = proto.Field(proto.MESSAGE, number=3, message=Deblock,) - audio = proto.Field(proto.MESSAGE, number=4, message=Audio,) - crop = proto.Field(proto.MESSAGE, number=5, message=Crop,) - pad = proto.Field(proto.MESSAGE, number=6, message=Pad,) - - -class VideoStream(proto.Message): - r"""Video stream resource. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - codec (str): - Codec type. The following codecs are supported: - - - ``h264`` (default) - - ``h265`` - - ``vp9`` - profile (str): - Enforces the specified codec profile. The following profiles - are supported: - - - ``baseline`` - - ``main`` - - ``high`` (default) - - The available options are FFmpeg-compatible. Note that - certain values for this field may cause the transcoder to - override other fields you set in the ``VideoStream`` - message. - tune (str): - Enforces the specified codec tune. The available options are - FFmpeg-compatible. Note that certain values for this field - may cause the transcoder to override other fields you set in - the ``VideoStream`` message. - preset (str): - Enforces the specified codec preset. The default is - ``veryfast``. The available options are FFmpeg-compatible. - Note that certain values for this field may cause the - transcoder to override other fields you set in the - ``VideoStream`` message. - height_pixels (int): - The height of the video in pixels. Must be an - even integer. When not specified, the height is - adjusted to match the specified width and input - aspect ratio. If both are omitted, the input - height is used. - width_pixels (int): - The width of the video in pixels. Must be an - even integer. When not specified, the width is - adjusted to match the specified height and input - aspect ratio. If both are omitted, the input - width is used. - pixel_format (str): - Pixel format to use. The default is ``"yuv420p"``. - - Supported pixel formats: - - - 'yuv420p' pixel format. - - 'yuv422p' pixel format. - - 'yuv444p' pixel format. - - 'yuv420p10' 10-bit HDR pixel format. - - 'yuv422p10' 10-bit HDR pixel format. - - 'yuv444p10' 10-bit HDR pixel format. - - 'yuv420p12' 12-bit HDR pixel format. - - 'yuv422p12' 12-bit HDR pixel format. - - 'yuv444p12' 12-bit HDR pixel format. - bitrate_bps (int): - Required. The video bitrate in bits per - second. The minimum value is 1,000. The maximum - value for H264/H265 is 800,000,000. The maximum - value for VP9 is 480,000,000. - rate_control_mode (str): - Specify the ``rate_control_mode``. The default is ``"vbr"``. - - Supported rate control modes: - - - 'vbr' - variable bitrate - - 'crf' - constant rate factor - enable_two_pass (bool): - Use two-pass encoding strategy to achieve better video - quality. ``VideoStream.rate_control_mode`` must be - ``"vbr"``. The default is ``false``. - crf_level (int): - Target CRF level. Must be between 10 and 36, - where 10 is the highest quality and 36 is the - most efficient compression. The default is 21. - vbv_size_bits (int): - Size of the Video Buffering Verifier (VBV) buffer in bits. - Must be greater than zero. The default is equal to - ``VideoStream.bitrate_bps``. - vbv_fullness_bits (int): - Initial fullness of the Video Buffering Verifier (VBV) - buffer in bits. Must be greater than zero. The default is - equal to 90% of ``VideoStream.vbv_size_bits``. - allow_open_gop (bool): - Specifies whether an open Group of Pictures (GOP) structure - should be allowed or not. The default is ``false``. - gop_frame_count (int): - Select the GOP size based on the specified - frame count. Must be greater than zero. - - This field is a member of `oneof`_ ``gop_mode``. - gop_duration (google.protobuf.duration_pb2.Duration): - Select the GOP size based on the specified duration. The - default is ``"3s"``. Note that ``gopDuration`` must be less - than or equal to ```segmentDuration`` <#SegmentSettings>`__, - and ```segmentDuration`` <#SegmentSettings>`__ must be - divisible by ``gopDuration``. - - This field is a member of `oneof`_ ``gop_mode``. - entropy_coder (str): - The entropy coder to use. The default is ``"cabac"``. - - Supported entropy coders: - - - 'cavlc' - - 'cabac' - b_pyramid (bool): - Allow B-pyramid for reference frame selection. This may not - be supported on all decoders. The default is ``false``. - b_frame_count (int): - The number of consecutive B-frames. Must be greater than or - equal to zero. Must be less than - ``VideoStream.gop_frame_count`` if set. The default is 0. - frame_rate (float): - Required. The target video frame rate in frames per second - (FPS). Must be less than or equal to 120. Will default to - the input frame rate if larger than the input frame rate. - The API will generate an output FPS that is divisible by the - input FPS, and smaller or equal to the target FPS. See - `Calculate frame - rate `__ - for more information. - aq_strength (float): - Specify the intensity of the adaptive - quantizer (AQ). Must be between 0 and 1, where 0 - disables the quantizer and 1 maximizes the - quantizer. A higher value equals a lower bitrate - but smoother image. The default is 0. - """ - - codec = proto.Field(proto.STRING, number=1,) - profile = proto.Field(proto.STRING, number=2,) - tune = proto.Field(proto.STRING, number=3,) - preset = proto.Field(proto.STRING, number=4,) - height_pixels = proto.Field(proto.INT32, number=5,) - width_pixels = proto.Field(proto.INT32, number=6,) - pixel_format = proto.Field(proto.STRING, number=7,) - bitrate_bps = proto.Field(proto.INT32, number=8,) - rate_control_mode = proto.Field(proto.STRING, number=9,) - enable_two_pass = proto.Field(proto.BOOL, number=10,) - crf_level = proto.Field(proto.INT32, number=11,) - vbv_size_bits = proto.Field(proto.INT32, number=12,) - vbv_fullness_bits = proto.Field(proto.INT32, number=13,) - allow_open_gop = proto.Field(proto.BOOL, number=14,) - gop_frame_count = proto.Field(proto.INT32, number=15, oneof="gop_mode",) - gop_duration = proto.Field( - proto.MESSAGE, number=16, oneof="gop_mode", message=duration_pb2.Duration, - ) - entropy_coder = proto.Field(proto.STRING, number=17,) - b_pyramid = proto.Field(proto.BOOL, number=18,) - b_frame_count = proto.Field(proto.INT32, number=19,) - frame_rate = proto.Field(proto.DOUBLE, number=20,) - aq_strength = proto.Field(proto.DOUBLE, number=21,) - - -class AudioStream(proto.Message): - r"""Audio stream resource. - - Attributes: - codec (str): - The codec for this audio stream. The default is ``"aac"``. - - Supported audio codecs: - - - 'aac' - - 'aac-he' - - 'aac-he-v2' - - 'mp3' - - 'ac3' - - 'eac3' - bitrate_bps (int): - Required. Audio bitrate in bits per second. - Must be between 1 and 10,000,000. - channel_count (int): - Number of audio channels. Must be between 1 - and 6. The default is 2. - channel_layout (Sequence[str]): - A list of channel names specifying layout of the audio - channels. This only affects the metadata embedded in the - container headers, if supported by the specified format. The - default is ``["fl", "fr"]``. - - Supported channel names: - - - 'fl' - Front left channel - - 'fr' - Front right channel - - 'sl' - Side left channel - - 'sr' - Side right channel - - 'fc' - Front center channel - - 'lfe' - Low frequency - mapping_ (Sequence[google.cloud.video.transcoder_v1beta1.types.AudioStream.AudioAtom]): - The mapping for the ``Job.edit_list`` atoms with audio - ``EditAtom.inputs``. - sample_rate_hertz (int): - The audio sample rate in Hertz. The default - is 48000 Hertz. - """ - - class AudioAtom(proto.Message): - r"""The mapping for the ``Job.edit_list`` atoms with audio - ``EditAtom.inputs``. - - Attributes: - key (str): - Required. The ``EditAtom.key`` that references the atom with - audio inputs in the ``Job.edit_list``. - channels (Sequence[google.cloud.video.transcoder_v1beta1.types.AudioStream.AudioAtom.AudioChannel]): - List of ``Channel``\ s for this audio stream. for in-depth - explanation. - """ - - class AudioChannel(proto.Message): - r"""The audio channel. - - Attributes: - inputs (Sequence[google.cloud.video.transcoder_v1beta1.types.AudioStream.AudioAtom.AudioChannel.AudioChannelInput]): - List of ``Job.inputs`` for this audio channel. - """ - - class AudioChannelInput(proto.Message): - r"""Identifies which input file, track, and channel should be - used. - - Attributes: - key (str): - Required. The ``Input.key`` that identifies the input file. - track (int): - Required. The zero-based index of the track - in the input file. - channel (int): - Required. The zero-based index of the channel - in the input file. - gain_db (float): - Audio volume control in dB. Negative values - decrease volume, positive values increase. The - default is 0. - """ - - key = proto.Field(proto.STRING, number=1,) - track = proto.Field(proto.INT32, number=2,) - channel = proto.Field(proto.INT32, number=3,) - gain_db = proto.Field(proto.DOUBLE, number=4,) - - inputs = proto.RepeatedField( - proto.MESSAGE, - number=2, - message="AudioStream.AudioAtom.AudioChannel.AudioChannelInput", - ) - - key = proto.Field(proto.STRING, number=1,) - channels = proto.RepeatedField( - proto.MESSAGE, number=2, message="AudioStream.AudioAtom.AudioChannel", - ) - - codec = proto.Field(proto.STRING, number=1,) - bitrate_bps = proto.Field(proto.INT32, number=2,) - channel_count = proto.Field(proto.INT32, number=3,) - channel_layout = proto.RepeatedField(proto.STRING, number=4,) - mapping_ = proto.RepeatedField(proto.MESSAGE, number=5, message=AudioAtom,) - sample_rate_hertz = proto.Field(proto.INT32, number=6,) - - -class TextStream(proto.Message): - r"""Encoding of a text stream. For example, closed captions or - subtitles. - - Attributes: - codec (str): - The codec for this text stream. The default is ``"webvtt"``. - - Supported text codecs: - - - 'srt' - - 'ttml' - - 'cea608' - - 'cea708' - - 'webvtt' - language_code (str): - Required. The BCP-47 language code, such as ``"en-US"`` or - ``"sr-Latn"``. For more information, see - https://www.unicode.org/reports/tr35/#Unicode_locale_identifier. - mapping_ (Sequence[google.cloud.video.transcoder_v1beta1.types.TextStream.TextAtom]): - The mapping for the ``Job.edit_list`` atoms with text - ``EditAtom.inputs``. - """ - - class TextAtom(proto.Message): - r"""The mapping for the ``Job.edit_list`` atoms with text - ``EditAtom.inputs``. - - Attributes: - key (str): - Required. The ``EditAtom.key`` that references atom with - text inputs in the ``Job.edit_list``. - inputs (Sequence[google.cloud.video.transcoder_v1beta1.types.TextStream.TextAtom.TextInput]): - List of ``Job.inputs`` that should be embedded in this atom. - Only one input is supported. - """ - - class TextInput(proto.Message): - r"""Identifies which input file and track should be used. - - Attributes: - key (str): - Required. The ``Input.key`` that identifies the input file. - track (int): - Required. The zero-based index of the track - in the input file. - """ - - key = proto.Field(proto.STRING, number=1,) - track = proto.Field(proto.INT32, number=2,) - - key = proto.Field(proto.STRING, number=1,) - inputs = proto.RepeatedField( - proto.MESSAGE, number=2, message="TextStream.TextAtom.TextInput", - ) - - codec = proto.Field(proto.STRING, number=1,) - language_code = proto.Field(proto.STRING, number=2,) - mapping_ = proto.RepeatedField(proto.MESSAGE, number=3, message=TextAtom,) - - -class SegmentSettings(proto.Message): - r"""Segment settings for ``"ts"``, ``"fmp4"`` and ``"vtt"``. - - Attributes: - segment_duration (google.protobuf.duration_pb2.Duration): - Duration of the segments in seconds. The default is - ``"6.0s"``. Note that ``segmentDuration`` must be greater - than or equal to ```gopDuration`` <#videostream>`__, and - ``segmentDuration`` must be divisible by - ```gopDuration`` <#videostream>`__. - individual_segments (bool): - Required. Create an individual segment file. The default is - ``false``. - """ - - segment_duration = proto.Field( - proto.MESSAGE, number=1, message=duration_pb2.Duration, - ) - individual_segments = proto.Field(proto.BOOL, number=3,) - - -class Encryption(proto.Message): - r"""Encryption settings. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - key (str): - Required. 128 bit encryption key represented - as lowercase hexadecimal digits. - iv (str): - Required. 128 bit Initialization Vector (IV) - represented as lowercase hexadecimal digits. - aes_128 (google.cloud.video.transcoder_v1beta1.types.Encryption.Aes128Encryption): - Configuration for AES-128 encryption. - - This field is a member of `oneof`_ ``encryption_mode``. - sample_aes (google.cloud.video.transcoder_v1beta1.types.Encryption.SampleAesEncryption): - Configuration for SAMPLE-AES encryption. - - This field is a member of `oneof`_ ``encryption_mode``. - mpeg_cenc (google.cloud.video.transcoder_v1beta1.types.Encryption.MpegCommonEncryption): - Configuration for MPEG Common Encryption - (MPEG-CENC). - - This field is a member of `oneof`_ ``encryption_mode``. - """ - - class Aes128Encryption(proto.Message): - r"""Configuration for AES-128 encryption. - - Attributes: - key_uri (str): - Required. URI of the key delivery service. - This URI is inserted into the M3U8 header. - """ - - key_uri = proto.Field(proto.STRING, number=1,) - - class SampleAesEncryption(proto.Message): - r"""Configuration for SAMPLE-AES encryption. - - Attributes: - key_uri (str): - Required. URI of the key delivery service. - This URI is inserted into the M3U8 header. - """ - - key_uri = proto.Field(proto.STRING, number=1,) - - class MpegCommonEncryption(proto.Message): - r"""Configuration for MPEG Common Encryption (MPEG-CENC). - - Attributes: - key_id (str): - Required. 128 bit Key ID represented as - lowercase hexadecimal digits for use with common - encryption. - scheme (str): - Required. Specify the encryption scheme. - Supported encryption schemes: - - 'cenc' - - 'cbcs' - """ - - key_id = proto.Field(proto.STRING, number=1,) - scheme = proto.Field(proto.STRING, number=2,) - - key = proto.Field(proto.STRING, number=1,) - iv = proto.Field(proto.STRING, number=2,) - aes_128 = proto.Field( - proto.MESSAGE, number=3, oneof="encryption_mode", message=Aes128Encryption, - ) - sample_aes = proto.Field( - proto.MESSAGE, number=4, oneof="encryption_mode", message=SampleAesEncryption, - ) - mpeg_cenc = proto.Field( - proto.MESSAGE, number=5, oneof="encryption_mode", message=MpegCommonEncryption, - ) - - -class Progress(proto.Message): - r"""Estimated fractional progress for each step, from ``0`` to ``1``. - - Attributes: - analyzed (float): - Estimated fractional progress for ``analyzing`` step. - encoded (float): - Estimated fractional progress for ``encoding`` step. - uploaded (float): - Estimated fractional progress for ``uploading`` step. - notified (float): - Estimated fractional progress for ``notifying`` step. - """ - - analyzed = proto.Field(proto.DOUBLE, number=1,) - encoded = proto.Field(proto.DOUBLE, number=2,) - uploaded = proto.Field(proto.DOUBLE, number=3,) - notified = proto.Field(proto.DOUBLE, number=4,) - - -class FailureDetail(proto.Message): - r"""Additional information about the reasons for the failure. - - Attributes: - description (str): - A description of the failure. - """ - - description = proto.Field(proto.STRING, number=1,) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/google/cloud/video/transcoder_v1beta1/types/services.py b/google/cloud/video/transcoder_v1beta1/types/services.py deleted file mode 100644 index fe23b03..0000000 --- a/google/cloud/video/transcoder_v1beta1/types/services.py +++ /dev/null @@ -1,206 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import proto # type: ignore - -from google.cloud.video.transcoder_v1beta1.types import resources - - -__protobuf__ = proto.module( - package="google.cloud.video.transcoder.v1beta1", - manifest={ - "CreateJobRequest", - "ListJobsRequest", - "GetJobRequest", - "DeleteJobRequest", - "ListJobsResponse", - "CreateJobTemplateRequest", - "ListJobTemplatesRequest", - "GetJobTemplateRequest", - "DeleteJobTemplateRequest", - "ListJobTemplatesResponse", - }, -) - - -class CreateJobRequest(proto.Message): - r"""Request message for ``TranscoderService.CreateJob``. - - Attributes: - parent (str): - Required. The parent location to create and process this - job. Format: ``projects/{project}/locations/{location}`` - job (google.cloud.video.transcoder_v1beta1.types.Job): - Required. Parameters for creating transcoding - job. - """ - - parent = proto.Field(proto.STRING, number=1,) - job = proto.Field(proto.MESSAGE, number=2, message=resources.Job,) - - -class ListJobsRequest(proto.Message): - r"""Request message for ``TranscoderService.ListJobs``. The parent - location from which to retrieve the collection of jobs. - - Attributes: - parent (str): - Required. Format: - ``projects/{project}/locations/{location}`` - page_size (int): - The maximum number of items to return. - page_token (str): - The ``next_page_token`` value returned from a previous List - request, if any. - """ - - parent = proto.Field(proto.STRING, number=1,) - page_size = proto.Field(proto.INT32, number=2,) - page_token = proto.Field(proto.STRING, number=3,) - - -class GetJobRequest(proto.Message): - r"""Request message for ``TranscoderService.GetJob``. - - Attributes: - name (str): - Required. The name of the job to retrieve. Format: - ``projects/{project}/locations/{location}/jobs/{job}`` - """ - - name = proto.Field(proto.STRING, number=1,) - - -class DeleteJobRequest(proto.Message): - r"""Request message for ``TranscoderService.DeleteJob``. - - Attributes: - name (str): - Required. The name of the job to delete. Format: - ``projects/{project}/locations/{location}/jobs/{job}`` - """ - - name = proto.Field(proto.STRING, number=1,) - - -class ListJobsResponse(proto.Message): - r"""Response message for ``TranscoderService.ListJobs``. - - Attributes: - jobs (Sequence[google.cloud.video.transcoder_v1beta1.types.Job]): - List of jobs in the specified region. - next_page_token (str): - The pagination token. - """ - - @property - def raw_page(self): - return self - - jobs = proto.RepeatedField(proto.MESSAGE, number=1, message=resources.Job,) - next_page_token = proto.Field(proto.STRING, number=2,) - - -class CreateJobTemplateRequest(proto.Message): - r"""Request message for ``TranscoderService.CreateJobTemplate``. - - Attributes: - parent (str): - Required. The parent location to create this job template. - Format: ``projects/{project}/locations/{location}`` - job_template (google.cloud.video.transcoder_v1beta1.types.JobTemplate): - Required. Parameters for creating job - template. - job_template_id (str): - Required. The ID to use for the job template, which will - become the final component of the job template's resource - name. - - This value should be 4-63 characters, and valid characters - must match the regular expression - ``[a-zA-Z][a-zA-Z0-9_-]*``. - """ - - parent = proto.Field(proto.STRING, number=1,) - job_template = proto.Field(proto.MESSAGE, number=2, message=resources.JobTemplate,) - job_template_id = proto.Field(proto.STRING, number=3,) - - -class ListJobTemplatesRequest(proto.Message): - r"""Request message for ``TranscoderService.ListJobTemplates``. - - Attributes: - parent (str): - Required. The parent location from which to retrieve the - collection of job templates. Format: - ``projects/{project}/locations/{location}`` - page_size (int): - The maximum number of items to return. - page_token (str): - The ``next_page_token`` value returned from a previous List - request, if any. - """ - - parent = proto.Field(proto.STRING, number=1,) - page_size = proto.Field(proto.INT32, number=2,) - page_token = proto.Field(proto.STRING, number=3,) - - -class GetJobTemplateRequest(proto.Message): - r"""Request message for ``TranscoderService.GetJobTemplate``. - - Attributes: - name (str): - Required. The name of the job template to retrieve. Format: - ``projects/{project}/locations/{location}/jobTemplates/{job_template}`` - """ - - name = proto.Field(proto.STRING, number=1,) - - -class DeleteJobTemplateRequest(proto.Message): - r"""Request message for ``TranscoderService.DeleteJobTemplate``. - - Attributes: - name (str): - Required. The name of the job template to delete. - ``projects/{project}/locations/{location}/jobTemplates/{job_template}`` - """ - - name = proto.Field(proto.STRING, number=1,) - - -class ListJobTemplatesResponse(proto.Message): - r"""Response message for ``TranscoderService.ListJobTemplates``. - - Attributes: - job_templates (Sequence[google.cloud.video.transcoder_v1beta1.types.JobTemplate]): - List of job templates in the specified - region. - next_page_token (str): - The pagination token. - """ - - @property - def raw_page(self): - return self - - job_templates = proto.RepeatedField( - proto.MESSAGE, number=1, message=resources.JobTemplate, - ) - next_page_token = proto.Field(proto.STRING, number=2,) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/samples/generated_samples/snippet_metadata_transcoder_v1.json b/samples/generated_samples/snippet_metadata_transcoder_v1.json new file mode 100644 index 0000000..68940f6 --- /dev/null +++ b/samples/generated_samples/snippet_metadata_transcoder_v1.json @@ -0,0 +1,708 @@ +{ + "snippets": [ + { + "clientMethod": { + "async": true, + "method": { + "service": { + "shortName": "TranscoderService" + }, + "shortName": "CreateJobTemplate" + } + }, + "file": "transcoder_generated_transcoder_v1_transcoder_service_create_job_template_async.py", + "regionTag": "transcoder_generated_transcoder_v1_TranscoderService_CreateJobTemplate_async", + "segments": [ + { + "end": 45, + "start": 27, + "type": "FULL" + }, + { + "end": 45, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 39, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 42, + "start": 40, + "type": "REQUEST_EXECUTION" + }, + { + "end": 46, + "start": 43, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "method": { + "service": { + "shortName": "TranscoderService" + }, + "shortName": "CreateJobTemplate" + } + }, + "file": "transcoder_generated_transcoder_v1_transcoder_service_create_job_template_sync.py", + "regionTag": "transcoder_generated_transcoder_v1_TranscoderService_CreateJobTemplate_sync", + "segments": [ + { + "end": 45, + "start": 27, + "type": "FULL" + }, + { + "end": 45, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 39, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 42, + "start": 40, + "type": "REQUEST_EXECUTION" + }, + { + "end": 46, + "start": 43, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "async": true, + "method": { + "service": { + "shortName": "TranscoderService" + }, + "shortName": "CreateJob" + } + }, + "file": "transcoder_generated_transcoder_v1_transcoder_service_create_job_async.py", + "regionTag": "transcoder_generated_transcoder_v1_TranscoderService_CreateJob_async", + "segments": [ + { + "end": 48, + "start": 27, + "type": "FULL" + }, + { + "end": 48, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 42, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 45, + "start": 43, + "type": "REQUEST_EXECUTION" + }, + { + "end": 49, + "start": 46, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "method": { + "service": { + "shortName": "TranscoderService" + }, + "shortName": "CreateJob" + } + }, + "file": "transcoder_generated_transcoder_v1_transcoder_service_create_job_sync.py", + "regionTag": "transcoder_generated_transcoder_v1_TranscoderService_CreateJob_sync", + "segments": [ + { + "end": 48, + "start": 27, + "type": "FULL" + }, + { + "end": 48, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 42, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 45, + "start": 43, + "type": "REQUEST_EXECUTION" + }, + { + "end": 49, + "start": 46, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "async": true, + "method": { + "service": { + "shortName": "TranscoderService" + }, + "shortName": "DeleteJobTemplate" + } + }, + "file": "transcoder_generated_transcoder_v1_transcoder_service_delete_job_template_async.py", + "regionTag": "transcoder_generated_transcoder_v1_TranscoderService_DeleteJobTemplate_async", + "segments": [ + { + "end": 42, + "start": 27, + "type": "FULL" + }, + { + "end": 42, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 43, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "method": { + "service": { + "shortName": "TranscoderService" + }, + "shortName": "DeleteJobTemplate" + } + }, + "file": "transcoder_generated_transcoder_v1_transcoder_service_delete_job_template_sync.py", + "regionTag": "transcoder_generated_transcoder_v1_TranscoderService_DeleteJobTemplate_sync", + "segments": [ + { + "end": 42, + "start": 27, + "type": "FULL" + }, + { + "end": 42, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 43, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "async": true, + "method": { + "service": { + "shortName": "TranscoderService" + }, + "shortName": "DeleteJob" + } + }, + "file": "transcoder_generated_transcoder_v1_transcoder_service_delete_job_async.py", + "regionTag": "transcoder_generated_transcoder_v1_TranscoderService_DeleteJob_async", + "segments": [ + { + "end": 42, + "start": 27, + "type": "FULL" + }, + { + "end": 42, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 43, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "method": { + "service": { + "shortName": "TranscoderService" + }, + "shortName": "DeleteJob" + } + }, + "file": "transcoder_generated_transcoder_v1_transcoder_service_delete_job_sync.py", + "regionTag": "transcoder_generated_transcoder_v1_TranscoderService_DeleteJob_sync", + "segments": [ + { + "end": 42, + "start": 27, + "type": "FULL" + }, + { + "end": 42, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 43, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "async": true, + "method": { + "service": { + "shortName": "TranscoderService" + }, + "shortName": "GetJobTemplate" + } + }, + "file": "transcoder_generated_transcoder_v1_transcoder_service_get_job_template_async.py", + "regionTag": "transcoder_generated_transcoder_v1_TranscoderService_GetJobTemplate_async", + "segments": [ + { + "end": 44, + "start": 27, + "type": "FULL" + }, + { + "end": 44, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 41, + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 45, + "start": 42, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "method": { + "service": { + "shortName": "TranscoderService" + }, + "shortName": "GetJobTemplate" + } + }, + "file": "transcoder_generated_transcoder_v1_transcoder_service_get_job_template_sync.py", + "regionTag": "transcoder_generated_transcoder_v1_TranscoderService_GetJobTemplate_sync", + "segments": [ + { + "end": 44, + "start": 27, + "type": "FULL" + }, + { + "end": 44, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 41, + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 45, + "start": 42, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "async": true, + "method": { + "service": { + "shortName": "TranscoderService" + }, + "shortName": "GetJob" + } + }, + "file": "transcoder_generated_transcoder_v1_transcoder_service_get_job_async.py", + "regionTag": "transcoder_generated_transcoder_v1_TranscoderService_GetJob_async", + "segments": [ + { + "end": 44, + "start": 27, + "type": "FULL" + }, + { + "end": 44, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 41, + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 45, + "start": 42, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "method": { + "service": { + "shortName": "TranscoderService" + }, + "shortName": "GetJob" + } + }, + "file": "transcoder_generated_transcoder_v1_transcoder_service_get_job_sync.py", + "regionTag": "transcoder_generated_transcoder_v1_TranscoderService_GetJob_sync", + "segments": [ + { + "end": 44, + "start": 27, + "type": "FULL" + }, + { + "end": 44, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 41, + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 45, + "start": 42, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "async": true, + "method": { + "service": { + "shortName": "TranscoderService" + }, + "shortName": "ListJobTemplates" + } + }, + "file": "transcoder_generated_transcoder_v1_transcoder_service_list_job_templates_async.py", + "regionTag": "transcoder_generated_transcoder_v1_TranscoderService_ListJobTemplates_async", + "segments": [ + { + "end": 45, + "start": 27, + "type": "FULL" + }, + { + "end": 45, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 41, + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 46, + "start": 42, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "method": { + "service": { + "shortName": "TranscoderService" + }, + "shortName": "ListJobTemplates" + } + }, + "file": "transcoder_generated_transcoder_v1_transcoder_service_list_job_templates_sync.py", + "regionTag": "transcoder_generated_transcoder_v1_TranscoderService_ListJobTemplates_sync", + "segments": [ + { + "end": 45, + "start": 27, + "type": "FULL" + }, + { + "end": 45, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 41, + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 46, + "start": 42, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "async": true, + "method": { + "service": { + "shortName": "TranscoderService" + }, + "shortName": "ListJobs" + } + }, + "file": "transcoder_generated_transcoder_v1_transcoder_service_list_jobs_async.py", + "regionTag": "transcoder_generated_transcoder_v1_TranscoderService_ListJobs_async", + "segments": [ + { + "end": 45, + "start": 27, + "type": "FULL" + }, + { + "end": 45, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 41, + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 46, + "start": 42, + "type": "RESPONSE_HANDLING" + } + ] + }, + { + "clientMethod": { + "method": { + "service": { + "shortName": "TranscoderService" + }, + "shortName": "ListJobs" + } + }, + "file": "transcoder_generated_transcoder_v1_transcoder_service_list_jobs_sync.py", + "regionTag": "transcoder_generated_transcoder_v1_TranscoderService_ListJobs_sync", + "segments": [ + { + "end": 45, + "start": 27, + "type": "FULL" + }, + { + "end": 45, + "start": 27, + "type": "SHORT" + }, + { + "end": 33, + "start": 31, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 38, + "start": 34, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 41, + "start": 39, + "type": "REQUEST_EXECUTION" + }, + { + "end": 46, + "start": 42, + "type": "RESPONSE_HANDLING" + } + ] + } + ] +} diff --git a/samples/generated_samples/transcoder_generated_transcoder_v1_transcoder_service_create_job_async.py b/samples/generated_samples/transcoder_generated_transcoder_v1_transcoder_service_create_job_async.py new file mode 100644 index 0000000..13dbbf6 --- /dev/null +++ b/samples/generated_samples/transcoder_generated_transcoder_v1_transcoder_service_create_job_async.py @@ -0,0 +1,49 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateJob +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-video-transcoder + + +# [START transcoder_generated_transcoder_v1_TranscoderService_CreateJob_async] +from google.cloud.video import transcoder_v1 + + +async def sample_create_job(): + # Create a client + client = transcoder_v1.TranscoderServiceAsyncClient() + + # Initialize request argument(s) + job = transcoder_v1.Job() + job.template_id = "template_id_value" + + request = transcoder_v1.CreateJobRequest( + parent="parent_value", + job=job, + ) + + # Make the request + response = await client.create_job(request=request) + + # Handle the response + print(response) + +# [END transcoder_generated_transcoder_v1_TranscoderService_CreateJob_async] diff --git a/samples/generated_samples/transcoder_generated_transcoder_v1_transcoder_service_create_job_sync.py b/samples/generated_samples/transcoder_generated_transcoder_v1_transcoder_service_create_job_sync.py new file mode 100644 index 0000000..537d2f2 --- /dev/null +++ b/samples/generated_samples/transcoder_generated_transcoder_v1_transcoder_service_create_job_sync.py @@ -0,0 +1,49 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateJob +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-video-transcoder + + +# [START transcoder_generated_transcoder_v1_TranscoderService_CreateJob_sync] +from google.cloud.video import transcoder_v1 + + +def sample_create_job(): + # Create a client + client = transcoder_v1.TranscoderServiceClient() + + # Initialize request argument(s) + job = transcoder_v1.Job() + job.template_id = "template_id_value" + + request = transcoder_v1.CreateJobRequest( + parent="parent_value", + job=job, + ) + + # Make the request + response = client.create_job(request=request) + + # Handle the response + print(response) + +# [END transcoder_generated_transcoder_v1_TranscoderService_CreateJob_sync] diff --git a/samples/generated_samples/transcoder_generated_transcoder_v1_transcoder_service_create_job_template_async.py b/samples/generated_samples/transcoder_generated_transcoder_v1_transcoder_service_create_job_template_async.py new file mode 100644 index 0000000..033dff6 --- /dev/null +++ b/samples/generated_samples/transcoder_generated_transcoder_v1_transcoder_service_create_job_template_async.py @@ -0,0 +1,46 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateJobTemplate +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-video-transcoder + + +# [START transcoder_generated_transcoder_v1_TranscoderService_CreateJobTemplate_async] +from google.cloud.video import transcoder_v1 + + +async def sample_create_job_template(): + # Create a client + client = transcoder_v1.TranscoderServiceAsyncClient() + + # Initialize request argument(s) + request = transcoder_v1.CreateJobTemplateRequest( + parent="parent_value", + job_template_id="job_template_id_value", + ) + + # Make the request + response = await client.create_job_template(request=request) + + # Handle the response + print(response) + +# [END transcoder_generated_transcoder_v1_TranscoderService_CreateJobTemplate_async] diff --git a/samples/generated_samples/transcoder_generated_transcoder_v1_transcoder_service_create_job_template_sync.py b/samples/generated_samples/transcoder_generated_transcoder_v1_transcoder_service_create_job_template_sync.py new file mode 100644 index 0000000..d2899a3 --- /dev/null +++ b/samples/generated_samples/transcoder_generated_transcoder_v1_transcoder_service_create_job_template_sync.py @@ -0,0 +1,46 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateJobTemplate +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-video-transcoder + + +# [START transcoder_generated_transcoder_v1_TranscoderService_CreateJobTemplate_sync] +from google.cloud.video import transcoder_v1 + + +def sample_create_job_template(): + # Create a client + client = transcoder_v1.TranscoderServiceClient() + + # Initialize request argument(s) + request = transcoder_v1.CreateJobTemplateRequest( + parent="parent_value", + job_template_id="job_template_id_value", + ) + + # Make the request + response = client.create_job_template(request=request) + + # Handle the response + print(response) + +# [END transcoder_generated_transcoder_v1_TranscoderService_CreateJobTemplate_sync] diff --git a/samples/generated_samples/transcoder_generated_transcoder_v1_transcoder_service_delete_job_async.py b/samples/generated_samples/transcoder_generated_transcoder_v1_transcoder_service_delete_job_async.py new file mode 100644 index 0000000..e55591a --- /dev/null +++ b/samples/generated_samples/transcoder_generated_transcoder_v1_transcoder_service_delete_job_async.py @@ -0,0 +1,43 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteJob +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-video-transcoder + + +# [START transcoder_generated_transcoder_v1_TranscoderService_DeleteJob_async] +from google.cloud.video import transcoder_v1 + + +async def sample_delete_job(): + # Create a client + client = transcoder_v1.TranscoderServiceAsyncClient() + + # Initialize request argument(s) + request = transcoder_v1.DeleteJobRequest( + name="name_value", + ) + + # Make the request + await client.delete_job(request=request) + + +# [END transcoder_generated_transcoder_v1_TranscoderService_DeleteJob_async] diff --git a/samples/generated_samples/transcoder_generated_transcoder_v1_transcoder_service_delete_job_sync.py b/samples/generated_samples/transcoder_generated_transcoder_v1_transcoder_service_delete_job_sync.py new file mode 100644 index 0000000..78eae73 --- /dev/null +++ b/samples/generated_samples/transcoder_generated_transcoder_v1_transcoder_service_delete_job_sync.py @@ -0,0 +1,43 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteJob +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-video-transcoder + + +# [START transcoder_generated_transcoder_v1_TranscoderService_DeleteJob_sync] +from google.cloud.video import transcoder_v1 + + +def sample_delete_job(): + # Create a client + client = transcoder_v1.TranscoderServiceClient() + + # Initialize request argument(s) + request = transcoder_v1.DeleteJobRequest( + name="name_value", + ) + + # Make the request + client.delete_job(request=request) + + +# [END transcoder_generated_transcoder_v1_TranscoderService_DeleteJob_sync] diff --git a/samples/generated_samples/transcoder_generated_transcoder_v1_transcoder_service_delete_job_template_async.py b/samples/generated_samples/transcoder_generated_transcoder_v1_transcoder_service_delete_job_template_async.py new file mode 100644 index 0000000..18a7fd4 --- /dev/null +++ b/samples/generated_samples/transcoder_generated_transcoder_v1_transcoder_service_delete_job_template_async.py @@ -0,0 +1,43 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteJobTemplate +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-video-transcoder + + +# [START transcoder_generated_transcoder_v1_TranscoderService_DeleteJobTemplate_async] +from google.cloud.video import transcoder_v1 + + +async def sample_delete_job_template(): + # Create a client + client = transcoder_v1.TranscoderServiceAsyncClient() + + # Initialize request argument(s) + request = transcoder_v1.DeleteJobTemplateRequest( + name="name_value", + ) + + # Make the request + await client.delete_job_template(request=request) + + +# [END transcoder_generated_transcoder_v1_TranscoderService_DeleteJobTemplate_async] diff --git a/samples/generated_samples/transcoder_generated_transcoder_v1_transcoder_service_delete_job_template_sync.py b/samples/generated_samples/transcoder_generated_transcoder_v1_transcoder_service_delete_job_template_sync.py new file mode 100644 index 0000000..b6f9cb2 --- /dev/null +++ b/samples/generated_samples/transcoder_generated_transcoder_v1_transcoder_service_delete_job_template_sync.py @@ -0,0 +1,43 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteJobTemplate +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-video-transcoder + + +# [START transcoder_generated_transcoder_v1_TranscoderService_DeleteJobTemplate_sync] +from google.cloud.video import transcoder_v1 + + +def sample_delete_job_template(): + # Create a client + client = transcoder_v1.TranscoderServiceClient() + + # Initialize request argument(s) + request = transcoder_v1.DeleteJobTemplateRequest( + name="name_value", + ) + + # Make the request + client.delete_job_template(request=request) + + +# [END transcoder_generated_transcoder_v1_TranscoderService_DeleteJobTemplate_sync] diff --git a/samples/generated_samples/transcoder_generated_transcoder_v1_transcoder_service_get_job_async.py b/samples/generated_samples/transcoder_generated_transcoder_v1_transcoder_service_get_job_async.py new file mode 100644 index 0000000..e381d4f --- /dev/null +++ b/samples/generated_samples/transcoder_generated_transcoder_v1_transcoder_service_get_job_async.py @@ -0,0 +1,45 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetJob +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-video-transcoder + + +# [START transcoder_generated_transcoder_v1_TranscoderService_GetJob_async] +from google.cloud.video import transcoder_v1 + + +async def sample_get_job(): + # Create a client + client = transcoder_v1.TranscoderServiceAsyncClient() + + # Initialize request argument(s) + request = transcoder_v1.GetJobRequest( + name="name_value", + ) + + # Make the request + response = await client.get_job(request=request) + + # Handle the response + print(response) + +# [END transcoder_generated_transcoder_v1_TranscoderService_GetJob_async] diff --git a/samples/generated_samples/transcoder_generated_transcoder_v1_transcoder_service_get_job_sync.py b/samples/generated_samples/transcoder_generated_transcoder_v1_transcoder_service_get_job_sync.py new file mode 100644 index 0000000..b828c2f --- /dev/null +++ b/samples/generated_samples/transcoder_generated_transcoder_v1_transcoder_service_get_job_sync.py @@ -0,0 +1,45 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetJob +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-video-transcoder + + +# [START transcoder_generated_transcoder_v1_TranscoderService_GetJob_sync] +from google.cloud.video import transcoder_v1 + + +def sample_get_job(): + # Create a client + client = transcoder_v1.TranscoderServiceClient() + + # Initialize request argument(s) + request = transcoder_v1.GetJobRequest( + name="name_value", + ) + + # Make the request + response = client.get_job(request=request) + + # Handle the response + print(response) + +# [END transcoder_generated_transcoder_v1_TranscoderService_GetJob_sync] diff --git a/samples/generated_samples/transcoder_generated_transcoder_v1_transcoder_service_get_job_template_async.py b/samples/generated_samples/transcoder_generated_transcoder_v1_transcoder_service_get_job_template_async.py new file mode 100644 index 0000000..b6f6834 --- /dev/null +++ b/samples/generated_samples/transcoder_generated_transcoder_v1_transcoder_service_get_job_template_async.py @@ -0,0 +1,45 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetJobTemplate +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-video-transcoder + + +# [START transcoder_generated_transcoder_v1_TranscoderService_GetJobTemplate_async] +from google.cloud.video import transcoder_v1 + + +async def sample_get_job_template(): + # Create a client + client = transcoder_v1.TranscoderServiceAsyncClient() + + # Initialize request argument(s) + request = transcoder_v1.GetJobTemplateRequest( + name="name_value", + ) + + # Make the request + response = await client.get_job_template(request=request) + + # Handle the response + print(response) + +# [END transcoder_generated_transcoder_v1_TranscoderService_GetJobTemplate_async] diff --git a/samples/generated_samples/transcoder_generated_transcoder_v1_transcoder_service_get_job_template_sync.py b/samples/generated_samples/transcoder_generated_transcoder_v1_transcoder_service_get_job_template_sync.py new file mode 100644 index 0000000..29df7fe --- /dev/null +++ b/samples/generated_samples/transcoder_generated_transcoder_v1_transcoder_service_get_job_template_sync.py @@ -0,0 +1,45 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetJobTemplate +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-video-transcoder + + +# [START transcoder_generated_transcoder_v1_TranscoderService_GetJobTemplate_sync] +from google.cloud.video import transcoder_v1 + + +def sample_get_job_template(): + # Create a client + client = transcoder_v1.TranscoderServiceClient() + + # Initialize request argument(s) + request = transcoder_v1.GetJobTemplateRequest( + name="name_value", + ) + + # Make the request + response = client.get_job_template(request=request) + + # Handle the response + print(response) + +# [END transcoder_generated_transcoder_v1_TranscoderService_GetJobTemplate_sync] diff --git a/samples/generated_samples/transcoder_generated_transcoder_v1_transcoder_service_list_job_templates_async.py b/samples/generated_samples/transcoder_generated_transcoder_v1_transcoder_service_list_job_templates_async.py new file mode 100644 index 0000000..387bcaa --- /dev/null +++ b/samples/generated_samples/transcoder_generated_transcoder_v1_transcoder_service_list_job_templates_async.py @@ -0,0 +1,46 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListJobTemplates +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-video-transcoder + + +# [START transcoder_generated_transcoder_v1_TranscoderService_ListJobTemplates_async] +from google.cloud.video import transcoder_v1 + + +async def sample_list_job_templates(): + # Create a client + client = transcoder_v1.TranscoderServiceAsyncClient() + + # Initialize request argument(s) + request = transcoder_v1.ListJobTemplatesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_job_templates(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END transcoder_generated_transcoder_v1_TranscoderService_ListJobTemplates_async] diff --git a/samples/generated_samples/transcoder_generated_transcoder_v1_transcoder_service_list_job_templates_sync.py b/samples/generated_samples/transcoder_generated_transcoder_v1_transcoder_service_list_job_templates_sync.py new file mode 100644 index 0000000..80683dd --- /dev/null +++ b/samples/generated_samples/transcoder_generated_transcoder_v1_transcoder_service_list_job_templates_sync.py @@ -0,0 +1,46 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListJobTemplates +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-video-transcoder + + +# [START transcoder_generated_transcoder_v1_TranscoderService_ListJobTemplates_sync] +from google.cloud.video import transcoder_v1 + + +def sample_list_job_templates(): + # Create a client + client = transcoder_v1.TranscoderServiceClient() + + # Initialize request argument(s) + request = transcoder_v1.ListJobTemplatesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_job_templates(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END transcoder_generated_transcoder_v1_TranscoderService_ListJobTemplates_sync] diff --git a/samples/generated_samples/transcoder_generated_transcoder_v1_transcoder_service_list_jobs_async.py b/samples/generated_samples/transcoder_generated_transcoder_v1_transcoder_service_list_jobs_async.py new file mode 100644 index 0000000..656a52e --- /dev/null +++ b/samples/generated_samples/transcoder_generated_transcoder_v1_transcoder_service_list_jobs_async.py @@ -0,0 +1,46 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListJobs +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-video-transcoder + + +# [START transcoder_generated_transcoder_v1_TranscoderService_ListJobs_async] +from google.cloud.video import transcoder_v1 + + +async def sample_list_jobs(): + # Create a client + client = transcoder_v1.TranscoderServiceAsyncClient() + + # Initialize request argument(s) + request = transcoder_v1.ListJobsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_jobs(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END transcoder_generated_transcoder_v1_TranscoderService_ListJobs_async] diff --git a/samples/generated_samples/transcoder_generated_transcoder_v1_transcoder_service_list_jobs_sync.py b/samples/generated_samples/transcoder_generated_transcoder_v1_transcoder_service_list_jobs_sync.py new file mode 100644 index 0000000..352587f --- /dev/null +++ b/samples/generated_samples/transcoder_generated_transcoder_v1_transcoder_service_list_jobs_sync.py @@ -0,0 +1,46 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListJobs +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-video-transcoder + + +# [START transcoder_generated_transcoder_v1_TranscoderService_ListJobs_sync] +from google.cloud.video import transcoder_v1 + + +def sample_list_jobs(): + # Create a client + client = transcoder_v1.TranscoderServiceClient() + + # Initialize request argument(s) + request = transcoder_v1.ListJobsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_jobs(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END transcoder_generated_transcoder_v1_TranscoderService_ListJobs_sync] diff --git a/samples/snippets/create_job_with_periodic_images_spritesheet.py b/samples/snippets/create_job_with_periodic_images_spritesheet.py index 5028a27..95621e2 100644 --- a/samples/snippets/create_job_with_periodic_images_spritesheet.py +++ b/samples/snippets/create_job_with_periodic_images_spritesheet.py @@ -50,7 +50,7 @@ def create_job_with_periodic_images_spritesheet( job.output_uri = output_uri job.config = transcoder_v1.types.JobConfig( # Create an ad-hoc job. For more information, see https://cloud.google.com/transcoder/docs/how-to/jobs#create_jobs_ad_hoc. - # See all options for the job config at https://cloud.google.com/transcoder/docs/reference/rest/v1beta1/JobConfig. + # See all options for the job config at https://cloud.google.com/transcoder/docs/reference/rest/v1/JobConfig. elementary_streams=[ # This section defines the output video stream. transcoder_v1.types.ElementaryStream( diff --git a/samples/snippets/create_job_with_set_number_images_spritesheet.py b/samples/snippets/create_job_with_set_number_images_spritesheet.py index d416eec..b25c872 100644 --- a/samples/snippets/create_job_with_set_number_images_spritesheet.py +++ b/samples/snippets/create_job_with_set_number_images_spritesheet.py @@ -49,7 +49,7 @@ def create_job_with_set_number_images_spritesheet( job.output_uri = output_uri job.config = transcoder_v1.types.JobConfig( # Create an ad-hoc job. For more information, see https://cloud.google.com/transcoder/docs/how-to/jobs#create_jobs_ad_hoc. - # See all options for the job config at https://cloud.google.com/transcoder/docs/reference/rest/v1beta1/JobConfig. + # See all options for the job config at https://cloud.google.com/transcoder/docs/reference/rest/v1/JobConfig. elementary_streams=[ # This section defines the output video stream. transcoder_v1.types.ElementaryStream( diff --git a/scripts/fixup_transcoder_v1beta1_keywords.py b/scripts/fixup_transcoder_v1beta1_keywords.py deleted file mode 100644 index 1d0bea5..0000000 --- a/scripts/fixup_transcoder_v1beta1_keywords.py +++ /dev/null @@ -1,183 +0,0 @@ -#! /usr/bin/env python3 -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import argparse -import os -import libcst as cst -import pathlib -import sys -from typing import (Any, Callable, Dict, List, Sequence, Tuple) - - -def partition( - predicate: Callable[[Any], bool], - iterator: Sequence[Any] -) -> Tuple[List[Any], List[Any]]: - """A stable, out-of-place partition.""" - results = ([], []) - - for i in iterator: - results[int(predicate(i))].append(i) - - # Returns trueList, falseList - return results[1], results[0] - - -class transcoderCallTransformer(cst.CSTTransformer): - CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') - METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { - 'create_job': ('parent', 'job', ), - 'create_job_template': ('parent', 'job_template', 'job_template_id', ), - 'delete_job': ('name', ), - 'delete_job_template': ('name', ), - 'get_job': ('name', ), - 'get_job_template': ('name', ), - 'list_jobs': ('parent', 'page_size', 'page_token', ), - 'list_job_templates': ('parent', 'page_size', 'page_token', ), - } - - def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: - try: - key = original.func.attr.value - kword_params = self.METHOD_TO_PARAMS[key] - except (AttributeError, KeyError): - # Either not a method from the API or too convoluted to be sure. - return updated - - # If the existing code is valid, keyword args come after positional args. - # Therefore, all positional args must map to the first parameters. - args, kwargs = partition(lambda a: not bool(a.keyword), updated.args) - if any(k.keyword.value == "request" for k in kwargs): - # We've already fixed this file, don't fix it again. - return updated - - kwargs, ctrl_kwargs = partition( - lambda a: a.keyword.value not in self.CTRL_PARAMS, - kwargs - ) - - args, ctrl_args = args[:len(kword_params)], args[len(kword_params):] - ctrl_kwargs.extend(cst.Arg(value=a.value, keyword=cst.Name(value=ctrl)) - for a, ctrl in zip(ctrl_args, self.CTRL_PARAMS)) - - request_arg = cst.Arg( - value=cst.Dict([ - cst.DictElement( - cst.SimpleString("'{}'".format(name)), -cst.Element(value=arg.value) - ) - # Note: the args + kwargs looks silly, but keep in mind that - # the control parameters had to be stripped out, and that - # those could have been passed positionally or by keyword. - for name, arg in zip(kword_params, args + kwargs)]), - keyword=cst.Name("request") - ) - - return updated.with_changes( - args=[request_arg] + ctrl_kwargs - ) - - -def fix_files( - in_dir: pathlib.Path, - out_dir: pathlib.Path, - *, - transformer=transcoderCallTransformer(), -): - """Duplicate the input dir to the output dir, fixing file method calls. - - Preconditions: - * in_dir is a real directory - * out_dir is a real, empty directory - """ - pyfile_gen = ( - pathlib.Path(os.path.join(root, f)) - for root, _, files in os.walk(in_dir) - for f in files if os.path.splitext(f)[1] == ".py" - ) - - for fpath in pyfile_gen: - with open(fpath, 'r') as f: - src = f.read() - - # Parse the code and insert method call fixes. - tree = cst.parse_module(src) - updated = tree.visit(transformer) - - # Create the path and directory structure for the new file. - updated_path = out_dir.joinpath(fpath.relative_to(in_dir)) - updated_path.parent.mkdir(parents=True, exist_ok=True) - - # Generate the updated source file at the corresponding path. - with open(updated_path, 'w') as f: - f.write(updated.code) - - -if __name__ == '__main__': - parser = argparse.ArgumentParser( - description="""Fix up source that uses the transcoder client library. - -The existing sources are NOT overwritten but are copied to output_dir with changes made. - -Note: This tool operates at a best-effort level at converting positional - parameters in client method calls to keyword based parameters. - Cases where it WILL FAIL include - A) * or ** expansion in a method call. - B) Calls via function or method alias (includes free function calls) - C) Indirect or dispatched calls (e.g. the method is looked up dynamically) - - These all constitute false negatives. The tool will also detect false - positives when an API method shares a name with another method. -""") - parser.add_argument( - '-d', - '--input-directory', - required=True, - dest='input_dir', - help='the input directory to walk for python files to fix up', - ) - parser.add_argument( - '-o', - '--output-directory', - required=True, - dest='output_dir', - help='the directory to output files fixed via un-flattening', - ) - args = parser.parse_args() - input_dir = pathlib.Path(args.input_dir) - output_dir = pathlib.Path(args.output_dir) - if not input_dir.is_dir(): - print( - f"input directory '{input_dir}' does not exist or is not a directory", - file=sys.stderr, - ) - sys.exit(-1) - - if not output_dir.is_dir(): - print( - f"output directory '{output_dir}' does not exist or is not a directory", - file=sys.stderr, - ) - sys.exit(-1) - - if os.listdir(output_dir): - print( - f"output directory '{output_dir}' is not empty", - file=sys.stderr, - ) - sys.exit(-1) - - fix_files(input_dir, output_dir) diff --git a/tests/unit/gapic/transcoder_v1beta1/__init__.py b/tests/unit/gapic/transcoder_v1beta1/__init__.py deleted file mode 100644 index 4de6597..0000000 --- a/tests/unit/gapic/transcoder_v1beta1/__init__.py +++ /dev/null @@ -1,15 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/tests/unit/gapic/transcoder_v1beta1/test_transcoder_service.py b/tests/unit/gapic/transcoder_v1beta1/test_transcoder_service.py deleted file mode 100644 index cd2c1b7..0000000 --- a/tests/unit/gapic/transcoder_v1beta1/test_transcoder_service.py +++ /dev/null @@ -1,3307 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import os -import mock - -import grpc -from grpc.experimental import aio -import math -import pytest -from proto.marshal.rules.dates import DurationRule, TimestampRule - - -from google.api_core import client_options -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import grpc_helpers -from google.api_core import grpc_helpers_async -from google.api_core import path_template -from google.auth import credentials as ga_credentials -from google.auth.exceptions import MutualTLSChannelError -from google.cloud.video.transcoder_v1beta1.services.transcoder_service import ( - TranscoderServiceAsyncClient, -) -from google.cloud.video.transcoder_v1beta1.services.transcoder_service import ( - TranscoderServiceClient, -) -from google.cloud.video.transcoder_v1beta1.services.transcoder_service import pagers -from google.cloud.video.transcoder_v1beta1.services.transcoder_service import transports -from google.cloud.video.transcoder_v1beta1.types import resources -from google.cloud.video.transcoder_v1beta1.types import services -from google.oauth2 import service_account -from google.protobuf import duration_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore -import google.auth - - -def client_cert_source_callback(): - return b"cert bytes", b"key bytes" - - -# If default endpoint is localhost, then default mtls endpoint will be the same. -# This method modifies the default endpoint so the client can produce a different -# mtls endpoint for endpoint testing purposes. -def modify_default_endpoint(client): - return ( - "foo.googleapis.com" - if ("localhost" in client.DEFAULT_ENDPOINT) - else client.DEFAULT_ENDPOINT - ) - - -def test__get_default_mtls_endpoint(): - api_endpoint = "example.googleapis.com" - api_mtls_endpoint = "example.mtls.googleapis.com" - sandbox_endpoint = "example.sandbox.googleapis.com" - sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" - non_googleapi = "api.example.com" - - assert TranscoderServiceClient._get_default_mtls_endpoint(None) is None - assert ( - TranscoderServiceClient._get_default_mtls_endpoint(api_endpoint) - == api_mtls_endpoint - ) - assert ( - TranscoderServiceClient._get_default_mtls_endpoint(api_mtls_endpoint) - == api_mtls_endpoint - ) - assert ( - TranscoderServiceClient._get_default_mtls_endpoint(sandbox_endpoint) - == sandbox_mtls_endpoint - ) - assert ( - TranscoderServiceClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) - == sandbox_mtls_endpoint - ) - assert ( - TranscoderServiceClient._get_default_mtls_endpoint(non_googleapi) - == non_googleapi - ) - - -@pytest.mark.parametrize( - "client_class", [TranscoderServiceClient, TranscoderServiceAsyncClient,] -) -def test_transcoder_service_client_from_service_account_info(client_class): - creds = ga_credentials.AnonymousCredentials() - with mock.patch.object( - service_account.Credentials, "from_service_account_info" - ) as factory: - factory.return_value = creds - info = {"valid": True} - client = client_class.from_service_account_info(info) - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - assert client.transport._host == "transcoder.googleapis.com:443" - - -@pytest.mark.parametrize( - "transport_class,transport_name", - [ - (transports.TranscoderServiceGrpcTransport, "grpc"), - (transports.TranscoderServiceGrpcAsyncIOTransport, "grpc_asyncio"), - ], -) -def test_transcoder_service_client_service_account_always_use_jwt( - transport_class, transport_name -): - with mock.patch.object( - service_account.Credentials, "with_always_use_jwt_access", create=True - ) as use_jwt: - creds = service_account.Credentials(None, None, None) - transport = transport_class(credentials=creds, always_use_jwt_access=True) - use_jwt.assert_called_once_with(True) - - with mock.patch.object( - service_account.Credentials, "with_always_use_jwt_access", create=True - ) as use_jwt: - creds = service_account.Credentials(None, None, None) - transport = transport_class(credentials=creds, always_use_jwt_access=False) - use_jwt.assert_not_called() - - -@pytest.mark.parametrize( - "client_class", [TranscoderServiceClient, TranscoderServiceAsyncClient,] -) -def test_transcoder_service_client_from_service_account_file(client_class): - creds = ga_credentials.AnonymousCredentials() - with mock.patch.object( - service_account.Credentials, "from_service_account_file" - ) as factory: - factory.return_value = creds - client = client_class.from_service_account_file("dummy/file/path.json") - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - client = client_class.from_service_account_json("dummy/file/path.json") - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - assert client.transport._host == "transcoder.googleapis.com:443" - - -def test_transcoder_service_client_get_transport_class(): - transport = TranscoderServiceClient.get_transport_class() - available_transports = [ - transports.TranscoderServiceGrpcTransport, - ] - assert transport in available_transports - - transport = TranscoderServiceClient.get_transport_class("grpc") - assert transport == transports.TranscoderServiceGrpcTransport - - -@pytest.mark.parametrize( - "client_class,transport_class,transport_name", - [ - (TranscoderServiceClient, transports.TranscoderServiceGrpcTransport, "grpc"), - ( - TranscoderServiceAsyncClient, - transports.TranscoderServiceGrpcAsyncIOTransport, - "grpc_asyncio", - ), - ], -) -@mock.patch.object( - TranscoderServiceClient, - "DEFAULT_ENDPOINT", - modify_default_endpoint(TranscoderServiceClient), -) -@mock.patch.object( - TranscoderServiceAsyncClient, - "DEFAULT_ENDPOINT", - modify_default_endpoint(TranscoderServiceAsyncClient), -) -def test_transcoder_service_client_client_options( - client_class, transport_class, transport_name -): - # Check that if channel is provided we won't create a new one. - with mock.patch.object(TranscoderServiceClient, "get_transport_class") as gtc: - transport = transport_class(credentials=ga_credentials.AnonymousCredentials()) - client = client_class(transport=transport) - gtc.assert_not_called() - - # Check that if channel is provided via str we will create a new one. - with mock.patch.object(TranscoderServiceClient, "get_transport_class") as gtc: - client = client_class(transport=transport_name) - gtc.assert_called() - - # Check the case api_endpoint is provided. - options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") - with mock.patch.object(transport_class, "__init__") as patched: - patched.return_value = None - client = client_class(transport=transport_name, client_options=options) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host="squid.clam.whelk", - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is - # "never". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - with mock.patch.object(transport_class, "__init__") as patched: - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is - # "always". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - with mock.patch.object(transport_class, "__init__") as patched: - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_MTLS_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has - # unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): - with pytest.raises(MutualTLSChannelError): - client = client_class(transport=transport_name) - - # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. - with mock.patch.dict( - os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"} - ): - with pytest.raises(ValueError): - client = client_class(transport=transport_name) - - # Check the case quota_project_id is provided - options = client_options.ClientOptions(quota_project_id="octopus") - with mock.patch.object(transport_class, "__init__") as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id="octopus", - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - ) - - -@pytest.mark.parametrize( - "client_class,transport_class,transport_name,use_client_cert_env", - [ - ( - TranscoderServiceClient, - transports.TranscoderServiceGrpcTransport, - "grpc", - "true", - ), - ( - TranscoderServiceAsyncClient, - transports.TranscoderServiceGrpcAsyncIOTransport, - "grpc_asyncio", - "true", - ), - ( - TranscoderServiceClient, - transports.TranscoderServiceGrpcTransport, - "grpc", - "false", - ), - ( - TranscoderServiceAsyncClient, - transports.TranscoderServiceGrpcAsyncIOTransport, - "grpc_asyncio", - "false", - ), - ], -) -@mock.patch.object( - TranscoderServiceClient, - "DEFAULT_ENDPOINT", - modify_default_endpoint(TranscoderServiceClient), -) -@mock.patch.object( - TranscoderServiceAsyncClient, - "DEFAULT_ENDPOINT", - modify_default_endpoint(TranscoderServiceAsyncClient), -) -@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) -def test_transcoder_service_client_mtls_env_auto( - client_class, transport_class, transport_name, use_client_cert_env -): - # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default - # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. - - # Check the case client_cert_source is provided. Whether client cert is used depends on - # GOOGLE_API_USE_CLIENT_CERTIFICATE value. - with mock.patch.dict( - os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} - ): - options = client_options.ClientOptions( - client_cert_source=client_cert_source_callback - ) - with mock.patch.object(transport_class, "__init__") as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - - if use_client_cert_env == "false": - expected_client_cert_source = None - expected_host = client.DEFAULT_ENDPOINT - else: - expected_client_cert_source = client_cert_source_callback - expected_host = client.DEFAULT_MTLS_ENDPOINT - - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=expected_host, - scopes=None, - client_cert_source_for_mtls=expected_client_cert_source, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - ) - - # Check the case ADC client cert is provided. Whether client cert is used depends on - # GOOGLE_API_USE_CLIENT_CERTIFICATE value. - with mock.patch.dict( - os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} - ): - with mock.patch.object(transport_class, "__init__") as patched: - with mock.patch( - "google.auth.transport.mtls.has_default_client_cert_source", - return_value=True, - ): - with mock.patch( - "google.auth.transport.mtls.default_client_cert_source", - return_value=client_cert_source_callback, - ): - if use_client_cert_env == "false": - expected_host = client.DEFAULT_ENDPOINT - expected_client_cert_source = None - else: - expected_host = client.DEFAULT_MTLS_ENDPOINT - expected_client_cert_source = client_cert_source_callback - - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=expected_host, - scopes=None, - client_cert_source_for_mtls=expected_client_cert_source, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - ) - - # Check the case client_cert_source and ADC client cert are not provided. - with mock.patch.dict( - os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env} - ): - with mock.patch.object(transport_class, "__init__") as patched: - with mock.patch( - "google.auth.transport.mtls.has_default_client_cert_source", - return_value=False, - ): - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - ) - - -@pytest.mark.parametrize( - "client_class", [TranscoderServiceClient, TranscoderServiceAsyncClient] -) -@mock.patch.object( - TranscoderServiceClient, - "DEFAULT_ENDPOINT", - modify_default_endpoint(TranscoderServiceClient), -) -@mock.patch.object( - TranscoderServiceAsyncClient, - "DEFAULT_ENDPOINT", - modify_default_endpoint(TranscoderServiceAsyncClient), -) -def test_transcoder_service_client_get_mtls_endpoint_and_cert_source(client_class): - mock_client_cert_source = mock.Mock() - - # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - mock_api_endpoint = "foo" - options = client_options.ClientOptions( - client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint - ) - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( - options - ) - assert api_endpoint == mock_api_endpoint - assert cert_source == mock_client_cert_source - - # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): - mock_client_cert_source = mock.Mock() - mock_api_endpoint = "foo" - options = client_options.ClientOptions( - client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint - ) - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source( - options - ) - assert api_endpoint == mock_api_endpoint - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_ENDPOINT - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch( - "google.auth.transport.mtls.has_default_client_cert_source", - return_value=False, - ): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_ENDPOINT - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch( - "google.auth.transport.mtls.has_default_client_cert_source", - return_value=True, - ): - with mock.patch( - "google.auth.transport.mtls.default_client_cert_source", - return_value=mock_client_cert_source, - ): - ( - api_endpoint, - cert_source, - ) = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT - assert cert_source == mock_client_cert_source - - -@pytest.mark.parametrize( - "client_class,transport_class,transport_name", - [ - (TranscoderServiceClient, transports.TranscoderServiceGrpcTransport, "grpc"), - ( - TranscoderServiceAsyncClient, - transports.TranscoderServiceGrpcAsyncIOTransport, - "grpc_asyncio", - ), - ], -) -def test_transcoder_service_client_client_options_scopes( - client_class, transport_class, transport_name -): - # Check the case scopes are provided. - options = client_options.ClientOptions(scopes=["1", "2"],) - with mock.patch.object(transport_class, "__init__") as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_ENDPOINT, - scopes=["1", "2"], - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - ) - - -@pytest.mark.parametrize( - "client_class,transport_class,transport_name,grpc_helpers", - [ - ( - TranscoderServiceClient, - transports.TranscoderServiceGrpcTransport, - "grpc", - grpc_helpers, - ), - ( - TranscoderServiceAsyncClient, - transports.TranscoderServiceGrpcAsyncIOTransport, - "grpc_asyncio", - grpc_helpers_async, - ), - ], -) -def test_transcoder_service_client_client_options_credentials_file( - client_class, transport_class, transport_name, grpc_helpers -): - # Check the case credentials file is provided. - options = client_options.ClientOptions(credentials_file="credentials.json") - - with mock.patch.object(transport_class, "__init__") as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file="credentials.json", - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - ) - - -def test_transcoder_service_client_client_options_from_dict(): - with mock.patch( - "google.cloud.video.transcoder_v1beta1.services.transcoder_service.transports.TranscoderServiceGrpcTransport.__init__" - ) as grpc_transport: - grpc_transport.return_value = None - client = TranscoderServiceClient( - client_options={"api_endpoint": "squid.clam.whelk"} - ) - grpc_transport.assert_called_once_with( - credentials=None, - credentials_file=None, - host="squid.clam.whelk", - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - ) - - -@pytest.mark.parametrize( - "client_class,transport_class,transport_name,grpc_helpers", - [ - ( - TranscoderServiceClient, - transports.TranscoderServiceGrpcTransport, - "grpc", - grpc_helpers, - ), - ( - TranscoderServiceAsyncClient, - transports.TranscoderServiceGrpcAsyncIOTransport, - "grpc_asyncio", - grpc_helpers_async, - ), - ], -) -def test_transcoder_service_client_create_channel_credentials_file( - client_class, transport_class, transport_name, grpc_helpers -): - # Check the case credentials file is provided. - options = client_options.ClientOptions(credentials_file="credentials.json") - - with mock.patch.object(transport_class, "__init__") as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file="credentials.json", - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - ) - - # test that the credentials from file are saved and used as the credentials. - with mock.patch.object( - google.auth, "load_credentials_from_file", autospec=True - ) as load_creds, mock.patch.object( - google.auth, "default", autospec=True - ) as adc, mock.patch.object( - grpc_helpers, "create_channel" - ) as create_channel: - creds = ga_credentials.AnonymousCredentials() - file_creds = ga_credentials.AnonymousCredentials() - load_creds.return_value = (file_creds, None) - adc.return_value = (creds, None) - client = client_class(client_options=options, transport=transport_name) - create_channel.assert_called_with( - "transcoder.googleapis.com:443", - credentials=file_creds, - credentials_file=None, - quota_project_id=None, - default_scopes=("https://www.googleapis.com/auth/cloud-platform",), - scopes=None, - default_host="transcoder.googleapis.com", - ssl_credentials=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - -@pytest.mark.parametrize("request_type", [services.CreateJobRequest, dict,]) -def test_create_job(request_type, transport: str = "grpc"): - client = TranscoderServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_job), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = resources.Job( - name="name_value", - input_uri="input_uri_value", - output_uri="output_uri_value", - priority=898, - state=resources.Job.ProcessingState.PENDING, - failure_reason="failure_reason_value", - ttl_after_completion_days=2670, - template_id="template_id_value", - ) - response = client.create_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == services.CreateJobRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, resources.Job) - assert response.name == "name_value" - assert response.input_uri == "input_uri_value" - assert response.output_uri == "output_uri_value" - assert response.priority == 898 - assert response.state == resources.Job.ProcessingState.PENDING - assert response.failure_reason == "failure_reason_value" - assert response.ttl_after_completion_days == 2670 - - -def test_create_job_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = TranscoderServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_job), "__call__") as call: - client.create_job() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == services.CreateJobRequest() - - -@pytest.mark.asyncio -async def test_create_job_async( - transport: str = "grpc_asyncio", request_type=services.CreateJobRequest -): - client = TranscoderServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_job), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - resources.Job( - name="name_value", - input_uri="input_uri_value", - output_uri="output_uri_value", - priority=898, - state=resources.Job.ProcessingState.PENDING, - failure_reason="failure_reason_value", - ttl_after_completion_days=2670, - ) - ) - response = await client.create_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == services.CreateJobRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, resources.Job) - assert response.name == "name_value" - assert response.input_uri == "input_uri_value" - assert response.output_uri == "output_uri_value" - assert response.priority == 898 - assert response.state == resources.Job.ProcessingState.PENDING - assert response.failure_reason == "failure_reason_value" - assert response.ttl_after_completion_days == 2670 - - -@pytest.mark.asyncio -async def test_create_job_async_from_dict(): - await test_create_job_async(request_type=dict) - - -def test_create_job_field_headers(): - client = TranscoderServiceClient(credentials=ga_credentials.AnonymousCredentials(),) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = services.CreateJobRequest() - - request.parent = "parent/value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_job), "__call__") as call: - call.return_value = resources.Job() - client.create_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] - - -@pytest.mark.asyncio -async def test_create_job_field_headers_async(): - client = TranscoderServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = services.CreateJobRequest() - - request.parent = "parent/value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_job), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.Job()) - await client.create_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] - - -def test_create_job_flattened(): - client = TranscoderServiceClient(credentials=ga_credentials.AnonymousCredentials(),) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_job), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = resources.Job() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.create_job( - parent="parent_value", job=resources.Job(name="name_value"), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" - assert arg == mock_val - arg = args[0].job - mock_val = resources.Job(name="name_value") - assert arg == mock_val - - -def test_create_job_flattened_error(): - client = TranscoderServiceClient(credentials=ga_credentials.AnonymousCredentials(),) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_job( - services.CreateJobRequest(), - parent="parent_value", - job=resources.Job(name="name_value"), - ) - - -@pytest.mark.asyncio -async def test_create_job_flattened_async(): - client = TranscoderServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.create_job), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = resources.Job() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.Job()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.create_job( - parent="parent_value", job=resources.Job(name="name_value"), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" - assert arg == mock_val - arg = args[0].job - mock_val = resources.Job(name="name_value") - assert arg == mock_val - - -@pytest.mark.asyncio -async def test_create_job_flattened_error_async(): - client = TranscoderServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.create_job( - services.CreateJobRequest(), - parent="parent_value", - job=resources.Job(name="name_value"), - ) - - -@pytest.mark.parametrize("request_type", [services.ListJobsRequest, dict,]) -def test_list_jobs(request_type, transport: str = "grpc"): - client = TranscoderServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_jobs), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = services.ListJobsResponse( - next_page_token="next_page_token_value", - ) - response = client.list_jobs(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == services.ListJobsRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListJobsPager) - assert response.next_page_token == "next_page_token_value" - - -def test_list_jobs_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = TranscoderServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_jobs), "__call__") as call: - client.list_jobs() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == services.ListJobsRequest() - - -@pytest.mark.asyncio -async def test_list_jobs_async( - transport: str = "grpc_asyncio", request_type=services.ListJobsRequest -): - client = TranscoderServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_jobs), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - services.ListJobsResponse(next_page_token="next_page_token_value",) - ) - response = await client.list_jobs(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == services.ListJobsRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListJobsAsyncPager) - assert response.next_page_token == "next_page_token_value" - - -@pytest.mark.asyncio -async def test_list_jobs_async_from_dict(): - await test_list_jobs_async(request_type=dict) - - -def test_list_jobs_field_headers(): - client = TranscoderServiceClient(credentials=ga_credentials.AnonymousCredentials(),) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = services.ListJobsRequest() - - request.parent = "parent/value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_jobs), "__call__") as call: - call.return_value = services.ListJobsResponse() - client.list_jobs(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] - - -@pytest.mark.asyncio -async def test_list_jobs_field_headers_async(): - client = TranscoderServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = services.ListJobsRequest() - - request.parent = "parent/value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_jobs), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - services.ListJobsResponse() - ) - await client.list_jobs(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] - - -def test_list_jobs_flattened(): - client = TranscoderServiceClient(credentials=ga_credentials.AnonymousCredentials(),) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_jobs), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = services.ListJobsResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list_jobs(parent="parent_value",) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" - assert arg == mock_val - - -def test_list_jobs_flattened_error(): - client = TranscoderServiceClient(credentials=ga_credentials.AnonymousCredentials(),) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_jobs( - services.ListJobsRequest(), parent="parent_value", - ) - - -@pytest.mark.asyncio -async def test_list_jobs_flattened_async(): - client = TranscoderServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_jobs), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = services.ListJobsResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - services.ListJobsResponse() - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.list_jobs(parent="parent_value",) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" - assert arg == mock_val - - -@pytest.mark.asyncio -async def test_list_jobs_flattened_error_async(): - client = TranscoderServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.list_jobs( - services.ListJobsRequest(), parent="parent_value", - ) - - -def test_list_jobs_pager(transport_name: str = "grpc"): - client = TranscoderServiceClient( - credentials=ga_credentials.AnonymousCredentials, transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_jobs), "__call__") as call: - # Set the response to a series of pages. - call.side_effect = ( - services.ListJobsResponse( - jobs=[resources.Job(), resources.Job(), resources.Job(),], - next_page_token="abc", - ), - services.ListJobsResponse(jobs=[], next_page_token="def",), - services.ListJobsResponse(jobs=[resources.Job(),], next_page_token="ghi",), - services.ListJobsResponse(jobs=[resources.Job(), resources.Job(),],), - RuntimeError, - ) - - metadata = () - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), - ) - pager = client.list_jobs(request={}) - - assert pager._metadata == metadata - - results = [i for i in pager] - assert len(results) == 6 - assert all(isinstance(i, resources.Job) for i in results) - - -def test_list_jobs_pages(transport_name: str = "grpc"): - client = TranscoderServiceClient( - credentials=ga_credentials.AnonymousCredentials, transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.list_jobs), "__call__") as call: - # Set the response to a series of pages. - call.side_effect = ( - services.ListJobsResponse( - jobs=[resources.Job(), resources.Job(), resources.Job(),], - next_page_token="abc", - ), - services.ListJobsResponse(jobs=[], next_page_token="def",), - services.ListJobsResponse(jobs=[resources.Job(),], next_page_token="ghi",), - services.ListJobsResponse(jobs=[resources.Job(), resources.Job(),],), - RuntimeError, - ) - pages = list(client.list_jobs(request={}).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token - - -@pytest.mark.asyncio -async def test_list_jobs_async_pager(): - client = TranscoderServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_jobs), "__call__", new_callable=mock.AsyncMock - ) as call: - # Set the response to a series of pages. - call.side_effect = ( - services.ListJobsResponse( - jobs=[resources.Job(), resources.Job(), resources.Job(),], - next_page_token="abc", - ), - services.ListJobsResponse(jobs=[], next_page_token="def",), - services.ListJobsResponse(jobs=[resources.Job(),], next_page_token="ghi",), - services.ListJobsResponse(jobs=[resources.Job(), resources.Job(),],), - RuntimeError, - ) - async_pager = await client.list_jobs(request={},) - assert async_pager.next_page_token == "abc" - responses = [] - async for response in async_pager: - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, resources.Job) for i in responses) - - -@pytest.mark.asyncio -async def test_list_jobs_async_pages(): - client = TranscoderServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_jobs), "__call__", new_callable=mock.AsyncMock - ) as call: - # Set the response to a series of pages. - call.side_effect = ( - services.ListJobsResponse( - jobs=[resources.Job(), resources.Job(), resources.Job(),], - next_page_token="abc", - ), - services.ListJobsResponse(jobs=[], next_page_token="def",), - services.ListJobsResponse(jobs=[resources.Job(),], next_page_token="ghi",), - services.ListJobsResponse(jobs=[resources.Job(), resources.Job(),],), - RuntimeError, - ) - pages = [] - async for page_ in (await client.list_jobs(request={})).pages: - pages.append(page_) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token - - -@pytest.mark.parametrize("request_type", [services.GetJobRequest, dict,]) -def test_get_job(request_type, transport: str = "grpc"): - client = TranscoderServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_job), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = resources.Job( - name="name_value", - input_uri="input_uri_value", - output_uri="output_uri_value", - priority=898, - state=resources.Job.ProcessingState.PENDING, - failure_reason="failure_reason_value", - ttl_after_completion_days=2670, - template_id="template_id_value", - ) - response = client.get_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == services.GetJobRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, resources.Job) - assert response.name == "name_value" - assert response.input_uri == "input_uri_value" - assert response.output_uri == "output_uri_value" - assert response.priority == 898 - assert response.state == resources.Job.ProcessingState.PENDING - assert response.failure_reason == "failure_reason_value" - assert response.ttl_after_completion_days == 2670 - - -def test_get_job_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = TranscoderServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_job), "__call__") as call: - client.get_job() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == services.GetJobRequest() - - -@pytest.mark.asyncio -async def test_get_job_async( - transport: str = "grpc_asyncio", request_type=services.GetJobRequest -): - client = TranscoderServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_job), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - resources.Job( - name="name_value", - input_uri="input_uri_value", - output_uri="output_uri_value", - priority=898, - state=resources.Job.ProcessingState.PENDING, - failure_reason="failure_reason_value", - ttl_after_completion_days=2670, - ) - ) - response = await client.get_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == services.GetJobRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, resources.Job) - assert response.name == "name_value" - assert response.input_uri == "input_uri_value" - assert response.output_uri == "output_uri_value" - assert response.priority == 898 - assert response.state == resources.Job.ProcessingState.PENDING - assert response.failure_reason == "failure_reason_value" - assert response.ttl_after_completion_days == 2670 - - -@pytest.mark.asyncio -async def test_get_job_async_from_dict(): - await test_get_job_async(request_type=dict) - - -def test_get_job_field_headers(): - client = TranscoderServiceClient(credentials=ga_credentials.AnonymousCredentials(),) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = services.GetJobRequest() - - request.name = "name/value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_job), "__call__") as call: - call.return_value = resources.Job() - client.get_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] - - -@pytest.mark.asyncio -async def test_get_job_field_headers_async(): - client = TranscoderServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = services.GetJobRequest() - - request.name = "name/value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_job), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.Job()) - await client.get_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] - - -def test_get_job_flattened(): - client = TranscoderServiceClient(credentials=ga_credentials.AnonymousCredentials(),) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_job), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = resources.Job() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get_job(name="name_value",) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" - assert arg == mock_val - - -def test_get_job_flattened_error(): - client = TranscoderServiceClient(credentials=ga_credentials.AnonymousCredentials(),) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_job( - services.GetJobRequest(), name="name_value", - ) - - -@pytest.mark.asyncio -async def test_get_job_flattened_async(): - client = TranscoderServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_job), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = resources.Job() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.Job()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.get_job(name="name_value",) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" - assert arg == mock_val - - -@pytest.mark.asyncio -async def test_get_job_flattened_error_async(): - client = TranscoderServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.get_job( - services.GetJobRequest(), name="name_value", - ) - - -@pytest.mark.parametrize("request_type", [services.DeleteJobRequest, dict,]) -def test_delete_job(request_type, transport: str = "grpc"): - client = TranscoderServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_job), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.delete_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == services.DeleteJobRequest() - - # Establish that the response is the type that we expect. - assert response is None - - -def test_delete_job_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = TranscoderServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_job), "__call__") as call: - client.delete_job() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == services.DeleteJobRequest() - - -@pytest.mark.asyncio -async def test_delete_job_async( - transport: str = "grpc_asyncio", request_type=services.DeleteJobRequest -): - client = TranscoderServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_job), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == services.DeleteJobRequest() - - # Establish that the response is the type that we expect. - assert response is None - - -@pytest.mark.asyncio -async def test_delete_job_async_from_dict(): - await test_delete_job_async(request_type=dict) - - -def test_delete_job_field_headers(): - client = TranscoderServiceClient(credentials=ga_credentials.AnonymousCredentials(),) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = services.DeleteJobRequest() - - request.name = "name/value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_job), "__call__") as call: - call.return_value = None - client.delete_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] - - -@pytest.mark.asyncio -async def test_delete_job_field_headers_async(): - client = TranscoderServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = services.DeleteJobRequest() - - request.name = "name/value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_job), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] - - -def test_delete_job_flattened(): - client = TranscoderServiceClient(credentials=ga_credentials.AnonymousCredentials(),) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_job), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.delete_job(name="name_value",) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" - assert arg == mock_val - - -def test_delete_job_flattened_error(): - client = TranscoderServiceClient(credentials=ga_credentials.AnonymousCredentials(),) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_job( - services.DeleteJobRequest(), name="name_value", - ) - - -@pytest.mark.asyncio -async def test_delete_job_flattened_async(): - client = TranscoderServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.delete_job), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = None - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.delete_job(name="name_value",) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" - assert arg == mock_val - - -@pytest.mark.asyncio -async def test_delete_job_flattened_error_async(): - client = TranscoderServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.delete_job( - services.DeleteJobRequest(), name="name_value", - ) - - -@pytest.mark.parametrize("request_type", [services.CreateJobTemplateRequest, dict,]) -def test_create_job_template(request_type, transport: str = "grpc"): - client = TranscoderServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_job_template), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = resources.JobTemplate(name="name_value",) - response = client.create_job_template(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == services.CreateJobTemplateRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, resources.JobTemplate) - assert response.name == "name_value" - - -def test_create_job_template_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = TranscoderServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_job_template), "__call__" - ) as call: - client.create_job_template() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == services.CreateJobTemplateRequest() - - -@pytest.mark.asyncio -async def test_create_job_template_async( - transport: str = "grpc_asyncio", request_type=services.CreateJobTemplateRequest -): - client = TranscoderServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_job_template), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - resources.JobTemplate(name="name_value",) - ) - response = await client.create_job_template(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == services.CreateJobTemplateRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, resources.JobTemplate) - assert response.name == "name_value" - - -@pytest.mark.asyncio -async def test_create_job_template_async_from_dict(): - await test_create_job_template_async(request_type=dict) - - -def test_create_job_template_field_headers(): - client = TranscoderServiceClient(credentials=ga_credentials.AnonymousCredentials(),) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = services.CreateJobTemplateRequest() - - request.parent = "parent/value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_job_template), "__call__" - ) as call: - call.return_value = resources.JobTemplate() - client.create_job_template(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] - - -@pytest.mark.asyncio -async def test_create_job_template_field_headers_async(): - client = TranscoderServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = services.CreateJobTemplateRequest() - - request.parent = "parent/value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_job_template), "__call__" - ) as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - resources.JobTemplate() - ) - await client.create_job_template(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] - - -def test_create_job_template_flattened(): - client = TranscoderServiceClient(credentials=ga_credentials.AnonymousCredentials(),) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_job_template), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = resources.JobTemplate() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.create_job_template( - parent="parent_value", - job_template=resources.JobTemplate(name="name_value"), - job_template_id="job_template_id_value", - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" - assert arg == mock_val - arg = args[0].job_template - mock_val = resources.JobTemplate(name="name_value") - assert arg == mock_val - arg = args[0].job_template_id - mock_val = "job_template_id_value" - assert arg == mock_val - - -def test_create_job_template_flattened_error(): - client = TranscoderServiceClient(credentials=ga_credentials.AnonymousCredentials(),) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_job_template( - services.CreateJobTemplateRequest(), - parent="parent_value", - job_template=resources.JobTemplate(name="name_value"), - job_template_id="job_template_id_value", - ) - - -@pytest.mark.asyncio -async def test_create_job_template_flattened_async(): - client = TranscoderServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_job_template), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = resources.JobTemplate() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - resources.JobTemplate() - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.create_job_template( - parent="parent_value", - job_template=resources.JobTemplate(name="name_value"), - job_template_id="job_template_id_value", - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" - assert arg == mock_val - arg = args[0].job_template - mock_val = resources.JobTemplate(name="name_value") - assert arg == mock_val - arg = args[0].job_template_id - mock_val = "job_template_id_value" - assert arg == mock_val - - -@pytest.mark.asyncio -async def test_create_job_template_flattened_error_async(): - client = TranscoderServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.create_job_template( - services.CreateJobTemplateRequest(), - parent="parent_value", - job_template=resources.JobTemplate(name="name_value"), - job_template_id="job_template_id_value", - ) - - -@pytest.mark.parametrize("request_type", [services.ListJobTemplatesRequest, dict,]) -def test_list_job_templates(request_type, transport: str = "grpc"): - client = TranscoderServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_job_templates), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = services.ListJobTemplatesResponse( - next_page_token="next_page_token_value", - ) - response = client.list_job_templates(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == services.ListJobTemplatesRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListJobTemplatesPager) - assert response.next_page_token == "next_page_token_value" - - -def test_list_job_templates_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = TranscoderServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_job_templates), "__call__" - ) as call: - client.list_job_templates() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == services.ListJobTemplatesRequest() - - -@pytest.mark.asyncio -async def test_list_job_templates_async( - transport: str = "grpc_asyncio", request_type=services.ListJobTemplatesRequest -): - client = TranscoderServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_job_templates), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - services.ListJobTemplatesResponse(next_page_token="next_page_token_value",) - ) - response = await client.list_job_templates(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == services.ListJobTemplatesRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListJobTemplatesAsyncPager) - assert response.next_page_token == "next_page_token_value" - - -@pytest.mark.asyncio -async def test_list_job_templates_async_from_dict(): - await test_list_job_templates_async(request_type=dict) - - -def test_list_job_templates_field_headers(): - client = TranscoderServiceClient(credentials=ga_credentials.AnonymousCredentials(),) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = services.ListJobTemplatesRequest() - - request.parent = "parent/value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_job_templates), "__call__" - ) as call: - call.return_value = services.ListJobTemplatesResponse() - client.list_job_templates(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] - - -@pytest.mark.asyncio -async def test_list_job_templates_field_headers_async(): - client = TranscoderServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = services.ListJobTemplatesRequest() - - request.parent = "parent/value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_job_templates), "__call__" - ) as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - services.ListJobTemplatesResponse() - ) - await client.list_job_templates(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] - - -def test_list_job_templates_flattened(): - client = TranscoderServiceClient(credentials=ga_credentials.AnonymousCredentials(),) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_job_templates), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = services.ListJobTemplatesResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list_job_templates(parent="parent_value",) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" - assert arg == mock_val - - -def test_list_job_templates_flattened_error(): - client = TranscoderServiceClient(credentials=ga_credentials.AnonymousCredentials(),) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_job_templates( - services.ListJobTemplatesRequest(), parent="parent_value", - ) - - -@pytest.mark.asyncio -async def test_list_job_templates_flattened_async(): - client = TranscoderServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_job_templates), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = services.ListJobTemplatesResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - services.ListJobTemplatesResponse() - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.list_job_templates(parent="parent_value",) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = "parent_value" - assert arg == mock_val - - -@pytest.mark.asyncio -async def test_list_job_templates_flattened_error_async(): - client = TranscoderServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.list_job_templates( - services.ListJobTemplatesRequest(), parent="parent_value", - ) - - -def test_list_job_templates_pager(transport_name: str = "grpc"): - client = TranscoderServiceClient( - credentials=ga_credentials.AnonymousCredentials, transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_job_templates), "__call__" - ) as call: - # Set the response to a series of pages. - call.side_effect = ( - services.ListJobTemplatesResponse( - job_templates=[ - resources.JobTemplate(), - resources.JobTemplate(), - resources.JobTemplate(), - ], - next_page_token="abc", - ), - services.ListJobTemplatesResponse(job_templates=[], next_page_token="def",), - services.ListJobTemplatesResponse( - job_templates=[resources.JobTemplate(),], next_page_token="ghi", - ), - services.ListJobTemplatesResponse( - job_templates=[resources.JobTemplate(), resources.JobTemplate(),], - ), - RuntimeError, - ) - - metadata = () - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata((("parent", ""),)), - ) - pager = client.list_job_templates(request={}) - - assert pager._metadata == metadata - - results = [i for i in pager] - assert len(results) == 6 - assert all(isinstance(i, resources.JobTemplate) for i in results) - - -def test_list_job_templates_pages(transport_name: str = "grpc"): - client = TranscoderServiceClient( - credentials=ga_credentials.AnonymousCredentials, transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_job_templates), "__call__" - ) as call: - # Set the response to a series of pages. - call.side_effect = ( - services.ListJobTemplatesResponse( - job_templates=[ - resources.JobTemplate(), - resources.JobTemplate(), - resources.JobTemplate(), - ], - next_page_token="abc", - ), - services.ListJobTemplatesResponse(job_templates=[], next_page_token="def",), - services.ListJobTemplatesResponse( - job_templates=[resources.JobTemplate(),], next_page_token="ghi", - ), - services.ListJobTemplatesResponse( - job_templates=[resources.JobTemplate(), resources.JobTemplate(),], - ), - RuntimeError, - ) - pages = list(client.list_job_templates(request={}).pages) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token - - -@pytest.mark.asyncio -async def test_list_job_templates_async_pager(): - client = TranscoderServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_job_templates), - "__call__", - new_callable=mock.AsyncMock, - ) as call: - # Set the response to a series of pages. - call.side_effect = ( - services.ListJobTemplatesResponse( - job_templates=[ - resources.JobTemplate(), - resources.JobTemplate(), - resources.JobTemplate(), - ], - next_page_token="abc", - ), - services.ListJobTemplatesResponse(job_templates=[], next_page_token="def",), - services.ListJobTemplatesResponse( - job_templates=[resources.JobTemplate(),], next_page_token="ghi", - ), - services.ListJobTemplatesResponse( - job_templates=[resources.JobTemplate(), resources.JobTemplate(),], - ), - RuntimeError, - ) - async_pager = await client.list_job_templates(request={},) - assert async_pager.next_page_token == "abc" - responses = [] - async for response in async_pager: - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, resources.JobTemplate) for i in responses) - - -@pytest.mark.asyncio -async def test_list_job_templates_async_pages(): - client = TranscoderServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_job_templates), - "__call__", - new_callable=mock.AsyncMock, - ) as call: - # Set the response to a series of pages. - call.side_effect = ( - services.ListJobTemplatesResponse( - job_templates=[ - resources.JobTemplate(), - resources.JobTemplate(), - resources.JobTemplate(), - ], - next_page_token="abc", - ), - services.ListJobTemplatesResponse(job_templates=[], next_page_token="def",), - services.ListJobTemplatesResponse( - job_templates=[resources.JobTemplate(),], next_page_token="ghi", - ), - services.ListJobTemplatesResponse( - job_templates=[resources.JobTemplate(), resources.JobTemplate(),], - ), - RuntimeError, - ) - pages = [] - async for page_ in (await client.list_job_templates(request={})).pages: - pages.append(page_) - for page_, token in zip(pages, ["abc", "def", "ghi", ""]): - assert page_.raw_page.next_page_token == token - - -@pytest.mark.parametrize("request_type", [services.GetJobTemplateRequest, dict,]) -def test_get_job_template(request_type, transport: str = "grpc"): - client = TranscoderServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_job_template), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = resources.JobTemplate(name="name_value",) - response = client.get_job_template(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == services.GetJobTemplateRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, resources.JobTemplate) - assert response.name == "name_value" - - -def test_get_job_template_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = TranscoderServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_job_template), "__call__") as call: - client.get_job_template() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == services.GetJobTemplateRequest() - - -@pytest.mark.asyncio -async def test_get_job_template_async( - transport: str = "grpc_asyncio", request_type=services.GetJobTemplateRequest -): - client = TranscoderServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_job_template), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - resources.JobTemplate(name="name_value",) - ) - response = await client.get_job_template(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == services.GetJobTemplateRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, resources.JobTemplate) - assert response.name == "name_value" - - -@pytest.mark.asyncio -async def test_get_job_template_async_from_dict(): - await test_get_job_template_async(request_type=dict) - - -def test_get_job_template_field_headers(): - client = TranscoderServiceClient(credentials=ga_credentials.AnonymousCredentials(),) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = services.GetJobTemplateRequest() - - request.name = "name/value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_job_template), "__call__") as call: - call.return_value = resources.JobTemplate() - client.get_job_template(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] - - -@pytest.mark.asyncio -async def test_get_job_template_field_headers_async(): - client = TranscoderServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = services.GetJobTemplateRequest() - - request.name = "name/value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_job_template), "__call__") as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - resources.JobTemplate() - ) - await client.get_job_template(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] - - -def test_get_job_template_flattened(): - client = TranscoderServiceClient(credentials=ga_credentials.AnonymousCredentials(),) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_job_template), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = resources.JobTemplate() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get_job_template(name="name_value",) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" - assert arg == mock_val - - -def test_get_job_template_flattened_error(): - client = TranscoderServiceClient(credentials=ga_credentials.AnonymousCredentials(),) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_job_template( - services.GetJobTemplateRequest(), name="name_value", - ) - - -@pytest.mark.asyncio -async def test_get_job_template_flattened_async(): - client = TranscoderServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object(type(client.transport.get_job_template), "__call__") as call: - # Designate an appropriate return value for the call. - call.return_value = resources.JobTemplate() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - resources.JobTemplate() - ) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.get_job_template(name="name_value",) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" - assert arg == mock_val - - -@pytest.mark.asyncio -async def test_get_job_template_flattened_error_async(): - client = TranscoderServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.get_job_template( - services.GetJobTemplateRequest(), name="name_value", - ) - - -@pytest.mark.parametrize("request_type", [services.DeleteJobTemplateRequest, dict,]) -def test_delete_job_template(request_type, transport: str = "grpc"): - client = TranscoderServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_job_template), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.delete_job_template(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == services.DeleteJobTemplateRequest() - - # Establish that the response is the type that we expect. - assert response is None - - -def test_delete_job_template_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = TranscoderServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport="grpc", - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_job_template), "__call__" - ) as call: - client.delete_job_template() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == services.DeleteJobTemplateRequest() - - -@pytest.mark.asyncio -async def test_delete_job_template_async( - transport: str = "grpc_asyncio", request_type=services.DeleteJobTemplateRequest -): - client = TranscoderServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_job_template), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_job_template(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == services.DeleteJobTemplateRequest() - - # Establish that the response is the type that we expect. - assert response is None - - -@pytest.mark.asyncio -async def test_delete_job_template_async_from_dict(): - await test_delete_job_template_async(request_type=dict) - - -def test_delete_job_template_field_headers(): - client = TranscoderServiceClient(credentials=ga_credentials.AnonymousCredentials(),) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = services.DeleteJobTemplateRequest() - - request.name = "name/value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_job_template), "__call__" - ) as call: - call.return_value = None - client.delete_job_template(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] - - -@pytest.mark.asyncio -async def test_delete_job_template_field_headers_async(): - client = TranscoderServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = services.DeleteJobTemplateRequest() - - request.name = "name/value" - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_job_template), "__call__" - ) as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_job_template(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] - - -def test_delete_job_template_flattened(): - client = TranscoderServiceClient(credentials=ga_credentials.AnonymousCredentials(),) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_job_template), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = None - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.delete_job_template(name="name_value",) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" - assert arg == mock_val - - -def test_delete_job_template_flattened_error(): - client = TranscoderServiceClient(credentials=ga_credentials.AnonymousCredentials(),) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_job_template( - services.DeleteJobTemplateRequest(), name="name_value", - ) - - -@pytest.mark.asyncio -async def test_delete_job_template_flattened_async(): - client = TranscoderServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_job_template), "__call__" - ) as call: - # Designate an appropriate return value for the call. - call.return_value = None - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.delete_job_template(name="name_value",) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = "name_value" - assert arg == mock_val - - -@pytest.mark.asyncio -async def test_delete_job_template_flattened_error_async(): - client = TranscoderServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.delete_job_template( - services.DeleteJobTemplateRequest(), name="name_value", - ) - - -def test_credentials_transport_error(): - # It is an error to provide credentials and a transport instance. - transport = transports.TranscoderServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = TranscoderServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, - ) - - # It is an error to provide a credentials file and a transport instance. - transport = transports.TranscoderServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = TranscoderServiceClient( - client_options={"credentials_file": "credentials.json"}, - transport=transport, - ) - - # It is an error to provide an api_key and a transport instance. - transport = transports.TranscoderServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - options = client_options.ClientOptions() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = TranscoderServiceClient(client_options=options, transport=transport,) - - # It is an error to provide an api_key and a credential. - options = mock.Mock() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = TranscoderServiceClient( - client_options=options, credentials=ga_credentials.AnonymousCredentials() - ) - - # It is an error to provide scopes and a transport instance. - transport = transports.TranscoderServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = TranscoderServiceClient( - client_options={"scopes": ["1", "2"]}, transport=transport, - ) - - -def test_transport_instance(): - # A client may be instantiated with a custom transport instance. - transport = transports.TranscoderServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - client = TranscoderServiceClient(transport=transport) - assert client.transport is transport - - -def test_transport_get_channel(): - # A client may be instantiated with a custom transport instance. - transport = transports.TranscoderServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel - - transport = transports.TranscoderServiceGrpcAsyncIOTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel - - -@pytest.mark.parametrize( - "transport_class", - [ - transports.TranscoderServiceGrpcTransport, - transports.TranscoderServiceGrpcAsyncIOTransport, - ], -) -def test_transport_adc(transport_class): - # Test default credentials are used if not provided. - with mock.patch.object(google.auth, "default") as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class() - adc.assert_called_once() - - -def test_transport_grpc_default(): - # A client should use the gRPC transport by default. - client = TranscoderServiceClient(credentials=ga_credentials.AnonymousCredentials(),) - assert isinstance(client.transport, transports.TranscoderServiceGrpcTransport,) - - -def test_transcoder_service_base_transport_error(): - # Passing both a credentials object and credentials_file should raise an error - with pytest.raises(core_exceptions.DuplicateCredentialArgs): - transport = transports.TranscoderServiceTransport( - credentials=ga_credentials.AnonymousCredentials(), - credentials_file="credentials.json", - ) - - -def test_transcoder_service_base_transport(): - # Instantiate the base transport. - with mock.patch( - "google.cloud.video.transcoder_v1beta1.services.transcoder_service.transports.TranscoderServiceTransport.__init__" - ) as Transport: - Transport.return_value = None - transport = transports.TranscoderServiceTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Every method on the transport should just blindly - # raise NotImplementedError. - methods = ( - "create_job", - "list_jobs", - "get_job", - "delete_job", - "create_job_template", - "list_job_templates", - "get_job_template", - "delete_job_template", - ) - for method in methods: - with pytest.raises(NotImplementedError): - getattr(transport, method)(request=object()) - - with pytest.raises(NotImplementedError): - transport.close() - - -def test_transcoder_service_base_transport_with_credentials_file(): - # Instantiate the base transport with a credentials file - with mock.patch.object( - google.auth, "load_credentials_from_file", autospec=True - ) as load_creds, mock.patch( - "google.cloud.video.transcoder_v1beta1.services.transcoder_service.transports.TranscoderServiceTransport._prep_wrapped_messages" - ) as Transport: - Transport.return_value = None - load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.TranscoderServiceTransport( - credentials_file="credentials.json", quota_project_id="octopus", - ) - load_creds.assert_called_once_with( - "credentials.json", - scopes=None, - default_scopes=("https://www.googleapis.com/auth/cloud-platform",), - quota_project_id="octopus", - ) - - -def test_transcoder_service_base_transport_with_adc(): - # Test the default credentials are used if credentials and credentials_file are None. - with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch( - "google.cloud.video.transcoder_v1beta1.services.transcoder_service.transports.TranscoderServiceTransport._prep_wrapped_messages" - ) as Transport: - Transport.return_value = None - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.TranscoderServiceTransport() - adc.assert_called_once() - - -def test_transcoder_service_auth_adc(): - # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - TranscoderServiceClient() - adc.assert_called_once_with( - scopes=None, - default_scopes=("https://www.googleapis.com/auth/cloud-platform",), - quota_project_id=None, - ) - - -@pytest.mark.parametrize( - "transport_class", - [ - transports.TranscoderServiceGrpcTransport, - transports.TranscoderServiceGrpcAsyncIOTransport, - ], -) -def test_transcoder_service_transport_auth_adc(transport_class): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class(quota_project_id="octopus", scopes=["1", "2"]) - adc.assert_called_once_with( - scopes=["1", "2"], - default_scopes=("https://www.googleapis.com/auth/cloud-platform",), - quota_project_id="octopus", - ) - - -@pytest.mark.parametrize( - "transport_class,grpc_helpers", - [ - (transports.TranscoderServiceGrpcTransport, grpc_helpers), - (transports.TranscoderServiceGrpcAsyncIOTransport, grpc_helpers_async), - ], -) -def test_transcoder_service_transport_create_channel(transport_class, grpc_helpers): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object( - google.auth, "default", autospec=True - ) as adc, mock.patch.object( - grpc_helpers, "create_channel", autospec=True - ) as create_channel: - creds = ga_credentials.AnonymousCredentials() - adc.return_value = (creds, None) - transport_class(quota_project_id="octopus", scopes=["1", "2"]) - - create_channel.assert_called_with( - "transcoder.googleapis.com:443", - credentials=creds, - credentials_file=None, - quota_project_id="octopus", - default_scopes=("https://www.googleapis.com/auth/cloud-platform",), - scopes=["1", "2"], - default_host="transcoder.googleapis.com", - ssl_credentials=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - -@pytest.mark.parametrize( - "transport_class", - [ - transports.TranscoderServiceGrpcTransport, - transports.TranscoderServiceGrpcAsyncIOTransport, - ], -) -def test_transcoder_service_grpc_transport_client_cert_source_for_mtls(transport_class): - cred = ga_credentials.AnonymousCredentials() - - # Check ssl_channel_credentials is used if provided. - with mock.patch.object(transport_class, "create_channel") as mock_create_channel: - mock_ssl_channel_creds = mock.Mock() - transport_class( - host="squid.clam.whelk", - credentials=cred, - ssl_channel_credentials=mock_ssl_channel_creds, - ) - mock_create_channel.assert_called_once_with( - "squid.clam.whelk:443", - credentials=cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_channel_creds, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls - # is used. - with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): - with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: - transport_class( - credentials=cred, - client_cert_source_for_mtls=client_cert_source_callback, - ) - expected_cert, expected_key = client_cert_source_callback() - mock_ssl_cred.assert_called_once_with( - certificate_chain=expected_cert, private_key=expected_key - ) - - -def test_transcoder_service_host_no_port(): - client = TranscoderServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions( - api_endpoint="transcoder.googleapis.com" - ), - ) - assert client.transport._host == "transcoder.googleapis.com:443" - - -def test_transcoder_service_host_with_port(): - client = TranscoderServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions( - api_endpoint="transcoder.googleapis.com:8000" - ), - ) - assert client.transport._host == "transcoder.googleapis.com:8000" - - -def test_transcoder_service_grpc_transport_channel(): - channel = grpc.secure_channel("http://localhost/", grpc.local_channel_credentials()) - - # Check that channel is used if provided. - transport = transports.TranscoderServiceGrpcTransport( - host="squid.clam.whelk", channel=channel, - ) - assert transport.grpc_channel == channel - assert transport._host == "squid.clam.whelk:443" - assert transport._ssl_channel_credentials == None - - -def test_transcoder_service_grpc_asyncio_transport_channel(): - channel = aio.secure_channel("http://localhost/", grpc.local_channel_credentials()) - - # Check that channel is used if provided. - transport = transports.TranscoderServiceGrpcAsyncIOTransport( - host="squid.clam.whelk", channel=channel, - ) - assert transport.grpc_channel == channel - assert transport._host == "squid.clam.whelk:443" - assert transport._ssl_channel_credentials == None - - -# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are -# removed from grpc/grpc_asyncio transport constructor. -@pytest.mark.parametrize( - "transport_class", - [ - transports.TranscoderServiceGrpcTransport, - transports.TranscoderServiceGrpcAsyncIOTransport, - ], -) -def test_transcoder_service_transport_channel_mtls_with_client_cert_source( - transport_class, -): - with mock.patch( - "grpc.ssl_channel_credentials", autospec=True - ) as grpc_ssl_channel_cred: - with mock.patch.object( - transport_class, "create_channel" - ) as grpc_create_channel: - mock_ssl_cred = mock.Mock() - grpc_ssl_channel_cred.return_value = mock_ssl_cred - - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - - cred = ga_credentials.AnonymousCredentials() - with pytest.warns(DeprecationWarning): - with mock.patch.object(google.auth, "default") as adc: - adc.return_value = (cred, None) - transport = transport_class( - host="squid.clam.whelk", - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=client_cert_source_callback, - ) - adc.assert_called_once() - - grpc_ssl_channel_cred.assert_called_once_with( - certificate_chain=b"cert bytes", private_key=b"key bytes" - ) - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - assert transport.grpc_channel == mock_grpc_channel - assert transport._ssl_channel_credentials == mock_ssl_cred - - -# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are -# removed from grpc/grpc_asyncio transport constructor. -@pytest.mark.parametrize( - "transport_class", - [ - transports.TranscoderServiceGrpcTransport, - transports.TranscoderServiceGrpcAsyncIOTransport, - ], -) -def test_transcoder_service_transport_channel_mtls_with_adc(transport_class): - mock_ssl_cred = mock.Mock() - with mock.patch.multiple( - "google.auth.transport.grpc.SslCredentials", - __init__=mock.Mock(return_value=None), - ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), - ): - with mock.patch.object( - transport_class, "create_channel" - ) as grpc_create_channel: - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - mock_cred = mock.Mock() - - with pytest.warns(DeprecationWarning): - transport = transport_class( - host="squid.clam.whelk", - credentials=mock_cred, - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=None, - ) - - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=mock_cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - assert transport.grpc_channel == mock_grpc_channel - - -def test_job_path(): - project = "squid" - location = "clam" - job = "whelk" - expected = "projects/{project}/locations/{location}/jobs/{job}".format( - project=project, location=location, job=job, - ) - actual = TranscoderServiceClient.job_path(project, location, job) - assert expected == actual - - -def test_parse_job_path(): - expected = { - "project": "octopus", - "location": "oyster", - "job": "nudibranch", - } - path = TranscoderServiceClient.job_path(**expected) - - # Check that the path construction is reversible. - actual = TranscoderServiceClient.parse_job_path(path) - assert expected == actual - - -def test_job_template_path(): - project = "cuttlefish" - location = "mussel" - job_template = "winkle" - expected = "projects/{project}/locations/{location}/jobTemplates/{job_template}".format( - project=project, location=location, job_template=job_template, - ) - actual = TranscoderServiceClient.job_template_path(project, location, job_template) - assert expected == actual - - -def test_parse_job_template_path(): - expected = { - "project": "nautilus", - "location": "scallop", - "job_template": "abalone", - } - path = TranscoderServiceClient.job_template_path(**expected) - - # Check that the path construction is reversible. - actual = TranscoderServiceClient.parse_job_template_path(path) - assert expected == actual - - -def test_common_billing_account_path(): - billing_account = "squid" - expected = "billingAccounts/{billing_account}".format( - billing_account=billing_account, - ) - actual = TranscoderServiceClient.common_billing_account_path(billing_account) - assert expected == actual - - -def test_parse_common_billing_account_path(): - expected = { - "billing_account": "clam", - } - path = TranscoderServiceClient.common_billing_account_path(**expected) - - # Check that the path construction is reversible. - actual = TranscoderServiceClient.parse_common_billing_account_path(path) - assert expected == actual - - -def test_common_folder_path(): - folder = "whelk" - expected = "folders/{folder}".format(folder=folder,) - actual = TranscoderServiceClient.common_folder_path(folder) - assert expected == actual - - -def test_parse_common_folder_path(): - expected = { - "folder": "octopus", - } - path = TranscoderServiceClient.common_folder_path(**expected) - - # Check that the path construction is reversible. - actual = TranscoderServiceClient.parse_common_folder_path(path) - assert expected == actual - - -def test_common_organization_path(): - organization = "oyster" - expected = "organizations/{organization}".format(organization=organization,) - actual = TranscoderServiceClient.common_organization_path(organization) - assert expected == actual - - -def test_parse_common_organization_path(): - expected = { - "organization": "nudibranch", - } - path = TranscoderServiceClient.common_organization_path(**expected) - - # Check that the path construction is reversible. - actual = TranscoderServiceClient.parse_common_organization_path(path) - assert expected == actual - - -def test_common_project_path(): - project = "cuttlefish" - expected = "projects/{project}".format(project=project,) - actual = TranscoderServiceClient.common_project_path(project) - assert expected == actual - - -def test_parse_common_project_path(): - expected = { - "project": "mussel", - } - path = TranscoderServiceClient.common_project_path(**expected) - - # Check that the path construction is reversible. - actual = TranscoderServiceClient.parse_common_project_path(path) - assert expected == actual - - -def test_common_location_path(): - project = "winkle" - location = "nautilus" - expected = "projects/{project}/locations/{location}".format( - project=project, location=location, - ) - actual = TranscoderServiceClient.common_location_path(project, location) - assert expected == actual - - -def test_parse_common_location_path(): - expected = { - "project": "scallop", - "location": "abalone", - } - path = TranscoderServiceClient.common_location_path(**expected) - - # Check that the path construction is reversible. - actual = TranscoderServiceClient.parse_common_location_path(path) - assert expected == actual - - -def test_client_with_default_client_info(): - client_info = gapic_v1.client_info.ClientInfo() - - with mock.patch.object( - transports.TranscoderServiceTransport, "_prep_wrapped_messages" - ) as prep: - client = TranscoderServiceClient( - credentials=ga_credentials.AnonymousCredentials(), client_info=client_info, - ) - prep.assert_called_once_with(client_info) - - with mock.patch.object( - transports.TranscoderServiceTransport, "_prep_wrapped_messages" - ) as prep: - transport_class = TranscoderServiceClient.get_transport_class() - transport = transport_class( - credentials=ga_credentials.AnonymousCredentials(), client_info=client_info, - ) - prep.assert_called_once_with(client_info) - - -@pytest.mark.asyncio -async def test_transport_close_async(): - client = TranscoderServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport="grpc_asyncio", - ) - with mock.patch.object( - type(getattr(client.transport, "grpc_channel")), "close" - ) as close: - async with client: - close.assert_not_called() - close.assert_called_once() - - -def test_transport_close(): - transports = { - "grpc": "_grpc_channel", - } - - for transport, close_name in transports.items(): - client = TranscoderServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport - ) - with mock.patch.object( - type(getattr(client.transport, close_name)), "close" - ) as close: - with client: - close.assert_not_called() - close.assert_called_once() - - -def test_client_ctx(): - transports = [ - "grpc", - ] - for transport in transports: - client = TranscoderServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport - ) - # Test client calls underlying transport. - with mock.patch.object(type(client.transport), "close") as close: - close.assert_not_called() - with client: - pass - close.assert_called() - - -@pytest.mark.parametrize( - "client_class,transport_class", - [ - (TranscoderServiceClient, transports.TranscoderServiceGrpcTransport), - ( - TranscoderServiceAsyncClient, - transports.TranscoderServiceGrpcAsyncIOTransport, - ), - ], -) -def test_api_key_credentials(client_class, transport_class): - with mock.patch.object( - google.auth._default, "get_api_key_credentials", create=True - ) as get_api_key_credentials: - mock_cred = mock.Mock() - get_api_key_credentials.return_value = mock_cred - options = client_options.ClientOptions() - options.api_key = "api_key" - with mock.patch.object(transport_class, "__init__") as patched: - patched.return_value = None - client = client_class(client_options=options) - patched.assert_called_once_with( - credentials=mock_cred, - credentials_file=None, - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - ) From 7fbc61917562c269439828df82b474700c95ea23 Mon Sep 17 00:00:00 2001 From: Nicholas Cook Date: Thu, 24 Feb 2022 13:46:48 -0800 Subject: [PATCH 19/20] docs(samples): update samples to use mapping_ attribute of AudioStream (#142) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * fix: revise samples with fix from #138 * 🦉 Updates from OwlBot post-processor See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md Co-authored-by: Owl Bot --- .../create_job_with_embedded_captions.py | 31 +++++-------------- .../create_job_with_standalone_captions.py | 31 +++++-------------- 2 files changed, 14 insertions(+), 48 deletions(-) diff --git a/samples/snippets/create_job_with_embedded_captions.py b/samples/snippets/create_job_with_embedded_captions.py index a81e668..c28a066 100644 --- a/samples/snippets/create_job_with_embedded_captions.py +++ b/samples/snippets/create_job_with_embedded_captions.py @@ -81,31 +81,14 @@ def create_job_with_embedded_captions( ), transcoder_v1.types.ElementaryStream( key="cea-stream0", - # The following doesn't work because "mapping" is a reserved - # argument name in GCP python client libraries (see - # https://github.com/googleapis/proto-plus-python/blob/main/proto/message.py#L447): - # - # text_stream=transcoder_v1.types.TextStream( - # codec="cea608", - # mapping=[ - # transcoder_v1.types.TextStream.TextMapping( - # atom_key="atom0", - # input_key="caption-input0", - # input_track=0, - # ), - # ], - # ), - # Use a python dictionary as a workaround: - text_stream={ - "codec": "cea608", - "mapping": [ - { - "atom_key": "atom0", - "input_key": "caption-input0", - "input_track": 0, - } + text_stream=transcoder_v1.types.TextStream( + codec="cea608", + mapping_=[ + transcoder_v1.types.TextStream.TextMapping( + atom_key="atom0", input_key="caption-input0", input_track=0, + ), ], - }, + ), ), ], mux_streams=[ diff --git a/samples/snippets/create_job_with_standalone_captions.py b/samples/snippets/create_job_with_standalone_captions.py index de32f49..49ccfc8 100644 --- a/samples/snippets/create_job_with_standalone_captions.py +++ b/samples/snippets/create_job_with_standalone_captions.py @@ -82,31 +82,14 @@ def create_job_with_standalone_captions( ), transcoder_v1.types.ElementaryStream( key="vtt-stream0", - # The following doesn't work because "mapping" is a reserved - # argument name in GCP python client libraries (see - # https://github.com/googleapis/proto-plus-python/blob/main/proto/message.py#L447): - # - # text_stream=transcoder_v1.types.TextStream( - # codec="webvtt", - # mapping=[ - # transcoder_v1.types.TextStream.TextMapping( - # atom_key="atom0", - # input_key="caption-input0", - # input_track=0, - # ), - # ], - # ), - # Use a python dictionary as a workaround: - text_stream={ - "codec": "webvtt", - "mapping": [ - { - "atom_key": "atom0", - "input_key": "caption-input0", - "input_track": 0, - } + text_stream=transcoder_v1.types.TextStream( + codec="webvtt", + mapping_=[ + transcoder_v1.types.TextStream.TextMapping( + atom_key="atom0", input_key="caption-input0", input_track=0, + ), ], - }, + ), ), ], mux_streams=[ From 3402ef8b098c8e12bf964bdee0d36b5e49d27de0 Mon Sep 17 00:00:00 2001 From: "release-please[bot]" <55107282+release-please[bot]@users.noreply.github.com> Date: Thu, 24 Feb 2022 17:05:40 -0500 Subject: [PATCH 20/20] chore(main): release 1.3.0 (#136) * chore(main): release 1.3.0 * prune changelog Co-authored-by: release-please[bot] <55107282+release-please[bot]@users.noreply.github.com> Co-authored-by: Anthonios Partheniou --- CHANGELOG.md | 23 +++++++++++++++++++++++ setup.py | 2 +- 2 files changed, 24 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index eb3e903..376a660 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,28 @@ # Changelog +## [1.3.0](https://github.com/googleapis/python-video-transcoder/compare/v1.2.1...v1.3.0) (2022-02-24) + + +### Features + +* add api key support ([#127](https://github.com/googleapis/python-video-transcoder/issues/127)) ([ac6a403](https://github.com/googleapis/python-video-transcoder/commit/ac6a4031ac66a721d776c41885830023023f14f2)), closes [#140](https://github.com/googleapis/python-video-transcoder/issues/140) + + +### Bug Fixes + +* **deps:** require proto-plus >= 1.20.3 ([ac6a403](https://github.com/googleapis/python-video-transcoder/commit/ac6a4031ac66a721d776c41885830023023f14f2)) +* Remove deprecated v1beta1 API that is no longer available ([#138](https://github.com/googleapis/python-video-transcoder/issues/138)) ([e8a85da](https://github.com/googleapis/python-video-transcoder/commit/e8a85da130a0b688167a1474c339c66af1c6760c)) +* rename mapping attribute of AudioStream to mapping_ ([c14695b](https://github.com/googleapis/python-video-transcoder/commit/c14695bc9070ec64a890c8f81af382165f5d04ea)) +* resolve DuplicateCredentialArgs error when using credentials_file ([6774bd3](https://github.com/googleapis/python-video-transcoder/commit/6774bd328f235894caf7343088c25cc2809d8932)) +* resolve issue where mapping attribute of AudioStream could not be set ([c14695b](https://github.com/googleapis/python-video-transcoder/commit/c14695bc9070ec64a890c8f81af382165f5d04ea)) + + +### Documentation + +* add generated snippets ([e8a85da](https://github.com/googleapis/python-video-transcoder/commit/e8a85da130a0b688167a1474c339c66af1c6760c)) +* **samples:** update samples to use mapping_ attribute of AudioStream ([#142](https://github.com/googleapis/python-video-transcoder/issues/142)) ([7fbc619](https://github.com/googleapis/python-video-transcoder/commit/7fbc61917562c269439828df82b474700c95ea23)) +* **samples:** add samples and tests for adding captions to a job ([#131](https://github.com/googleapis/python-video-transcoder/issues/131)) ([e30431f](https://github.com/googleapis/python-video-transcoder/commit/e30431fec7c15666afbb5bc975f7077389aac06d)) + ### [1.2.1](https://www.github.com/googleapis/python-video-transcoder/compare/v1.2.0...v1.2.1) (2021-11-04) diff --git a/setup.py b/setup.py index d638073..a3653ee 100644 --- a/setup.py +++ b/setup.py @@ -19,7 +19,7 @@ import os import setuptools # type: ignore -version = "1.2.1" +version = "1.3.0" package_root = os.path.abspath(os.path.dirname(__file__))