From bd73ca6f146df0353d409408c359cc5708d2a824 Mon Sep 17 00:00:00 2001 From: Owl Bot Date: Fri, 6 Aug 2021 21:47:03 +0000 Subject: [PATCH 1/2] feat: Add video cropping feature feat: Add video padding feature feat: Add ttl_after_completion_days field to Job docs: Update proto documentation docs: Indicate v1beta1 deprecation PiperOrigin-RevId: 389250478 Source-Link: https://github.com/googleapis/googleapis/commit/9a7d19079b5e3c22a5a08eaa94273f5d1eebb317 Source-Link: https://github.com/googleapis/googleapis-gen/commit/c27e567bdcb376f4f12235cfa8b205458cdf5274 --- owl-bot-staging/v1/.coveragerc | 17 + owl-bot-staging/v1/MANIFEST.in | 2 + owl-bot-staging/v1/README.rst | 49 + owl-bot-staging/v1/docs/conf.py | 376 ++ owl-bot-staging/v1/docs/index.rst | 7 + .../v1/docs/transcoder_v1/services.rst | 6 + .../docs/transcoder_v1/transcoder_service.rst | 10 + .../v1/docs/transcoder_v1/types.rst | 7 + .../google/cloud/video/transcoder/__init__.py | 81 + .../v1/google/cloud/video/transcoder/py.typed | 2 + .../cloud/video/transcoder_v1/__init__.py | 82 + .../video/transcoder_v1/gapic_metadata.json | 103 + .../google/cloud/video/transcoder_v1/py.typed | 2 + .../video/transcoder_v1/services/__init__.py | 15 + .../services/transcoder_service/__init__.py | 22 + .../transcoder_service/async_client.py | 810 ++++ .../services/transcoder_service/client.py | 1010 +++++ .../services/transcoder_service/pagers.py | 263 ++ .../transcoder_service/transports/__init__.py | 33 + .../transcoder_service/transports/base.py | 268 ++ .../transcoder_service/transports/grpc.py | 442 +++ .../transports/grpc_asyncio.py | 446 +++ .../video/transcoder_v1/types/__init__.py | 80 + .../video/transcoder_v1/types/resources.py | 1942 ++++++++++ .../video/transcoder_v1/types/services.py | 298 ++ owl-bot-staging/v1/mypy.ini | 3 + owl-bot-staging/v1/noxfile.py | 132 + .../scripts/fixup_transcoder_v1_keywords.py | 183 + owl-bot-staging/v1/setup.py | 54 + owl-bot-staging/v1/tests/__init__.py | 16 + owl-bot-staging/v1/tests/unit/__init__.py | 16 + .../v1/tests/unit/gapic/__init__.py | 16 + .../unit/gapic/transcoder_v1/__init__.py | 16 + .../transcoder_v1/test_transcoder_service.py | 3237 ++++++++++++++++ owl-bot-staging/v1beta1/.coveragerc | 17 + owl-bot-staging/v1beta1/MANIFEST.in | 2 + owl-bot-staging/v1beta1/README.rst | 49 + owl-bot-staging/v1beta1/docs/conf.py | 376 ++ owl-bot-staging/v1beta1/docs/index.rst | 7 + .../docs/transcoder_v1beta1/services.rst | 6 + .../transcoder_v1beta1/transcoder_service.rst | 10 + .../v1beta1/docs/transcoder_v1beta1/types.rst | 7 + .../google/cloud/video/transcoder/__init__.py | 85 + .../google/cloud/video/transcoder/py.typed | 2 + .../video/transcoder_v1beta1/__init__.py | 86 + .../transcoder_v1beta1/gapic_metadata.json | 103 + .../cloud/video/transcoder_v1beta1/py.typed | 2 + .../transcoder_v1beta1/services/__init__.py | 15 + .../services/transcoder_service/__init__.py | 22 + .../transcoder_service/async_client.py | 809 ++++ .../services/transcoder_service/client.py | 1009 +++++ .../services/transcoder_service/pagers.py | 263 ++ .../transcoder_service/transports/__init__.py | 33 + .../transcoder_service/transports/base.py | 268 ++ .../transcoder_service/transports/grpc.py | 442 +++ .../transports/grpc_asyncio.py | 446 +++ .../transcoder_v1beta1/types/__init__.py | 84 + .../transcoder_v1beta1/types/resources.py | 1736 +++++++++ .../transcoder_v1beta1/types/services.py | 256 ++ owl-bot-staging/v1beta1/mypy.ini | 3 + owl-bot-staging/v1beta1/noxfile.py | 132 + .../fixup_transcoder_v1beta1_keywords.py | 183 + owl-bot-staging/v1beta1/setup.py | 54 + owl-bot-staging/v1beta1/tests/__init__.py | 16 + .../v1beta1/tests/unit/__init__.py | 16 + .../v1beta1/tests/unit/gapic/__init__.py | 16 + .../unit/gapic/transcoder_v1beta1/__init__.py | 16 + .../test_transcoder_service.py | 3243 +++++++++++++++++ 68 files changed, 19860 insertions(+) create mode 100644 owl-bot-staging/v1/.coveragerc create mode 100644 owl-bot-staging/v1/MANIFEST.in create mode 100644 owl-bot-staging/v1/README.rst create mode 100644 owl-bot-staging/v1/docs/conf.py create mode 100644 owl-bot-staging/v1/docs/index.rst create mode 100644 owl-bot-staging/v1/docs/transcoder_v1/services.rst create mode 100644 owl-bot-staging/v1/docs/transcoder_v1/transcoder_service.rst create mode 100644 owl-bot-staging/v1/docs/transcoder_v1/types.rst create mode 100644 owl-bot-staging/v1/google/cloud/video/transcoder/__init__.py create mode 100644 owl-bot-staging/v1/google/cloud/video/transcoder/py.typed create mode 100644 owl-bot-staging/v1/google/cloud/video/transcoder_v1/__init__.py create mode 100644 owl-bot-staging/v1/google/cloud/video/transcoder_v1/gapic_metadata.json create mode 100644 owl-bot-staging/v1/google/cloud/video/transcoder_v1/py.typed create mode 100644 owl-bot-staging/v1/google/cloud/video/transcoder_v1/services/__init__.py create mode 100644 owl-bot-staging/v1/google/cloud/video/transcoder_v1/services/transcoder_service/__init__.py create mode 100644 owl-bot-staging/v1/google/cloud/video/transcoder_v1/services/transcoder_service/async_client.py create mode 100644 owl-bot-staging/v1/google/cloud/video/transcoder_v1/services/transcoder_service/client.py create mode 100644 owl-bot-staging/v1/google/cloud/video/transcoder_v1/services/transcoder_service/pagers.py create mode 100644 owl-bot-staging/v1/google/cloud/video/transcoder_v1/services/transcoder_service/transports/__init__.py create mode 100644 owl-bot-staging/v1/google/cloud/video/transcoder_v1/services/transcoder_service/transports/base.py create mode 100644 owl-bot-staging/v1/google/cloud/video/transcoder_v1/services/transcoder_service/transports/grpc.py create mode 100644 owl-bot-staging/v1/google/cloud/video/transcoder_v1/services/transcoder_service/transports/grpc_asyncio.py create mode 100644 owl-bot-staging/v1/google/cloud/video/transcoder_v1/types/__init__.py create mode 100644 owl-bot-staging/v1/google/cloud/video/transcoder_v1/types/resources.py create mode 100644 owl-bot-staging/v1/google/cloud/video/transcoder_v1/types/services.py create mode 100644 owl-bot-staging/v1/mypy.ini create mode 100644 owl-bot-staging/v1/noxfile.py create mode 100644 owl-bot-staging/v1/scripts/fixup_transcoder_v1_keywords.py create mode 100644 owl-bot-staging/v1/setup.py create mode 100644 owl-bot-staging/v1/tests/__init__.py create mode 100644 owl-bot-staging/v1/tests/unit/__init__.py create mode 100644 owl-bot-staging/v1/tests/unit/gapic/__init__.py create mode 100644 owl-bot-staging/v1/tests/unit/gapic/transcoder_v1/__init__.py create mode 100644 owl-bot-staging/v1/tests/unit/gapic/transcoder_v1/test_transcoder_service.py create mode 100644 owl-bot-staging/v1beta1/.coveragerc create mode 100644 owl-bot-staging/v1beta1/MANIFEST.in create mode 100644 owl-bot-staging/v1beta1/README.rst create mode 100644 owl-bot-staging/v1beta1/docs/conf.py create mode 100644 owl-bot-staging/v1beta1/docs/index.rst create mode 100644 owl-bot-staging/v1beta1/docs/transcoder_v1beta1/services.rst create mode 100644 owl-bot-staging/v1beta1/docs/transcoder_v1beta1/transcoder_service.rst create mode 100644 owl-bot-staging/v1beta1/docs/transcoder_v1beta1/types.rst create mode 100644 owl-bot-staging/v1beta1/google/cloud/video/transcoder/__init__.py create mode 100644 owl-bot-staging/v1beta1/google/cloud/video/transcoder/py.typed create mode 100644 owl-bot-staging/v1beta1/google/cloud/video/transcoder_v1beta1/__init__.py create mode 100644 owl-bot-staging/v1beta1/google/cloud/video/transcoder_v1beta1/gapic_metadata.json create mode 100644 owl-bot-staging/v1beta1/google/cloud/video/transcoder_v1beta1/py.typed create mode 100644 owl-bot-staging/v1beta1/google/cloud/video/transcoder_v1beta1/services/__init__.py create mode 100644 owl-bot-staging/v1beta1/google/cloud/video/transcoder_v1beta1/services/transcoder_service/__init__.py create mode 100644 owl-bot-staging/v1beta1/google/cloud/video/transcoder_v1beta1/services/transcoder_service/async_client.py create mode 100644 owl-bot-staging/v1beta1/google/cloud/video/transcoder_v1beta1/services/transcoder_service/client.py create mode 100644 owl-bot-staging/v1beta1/google/cloud/video/transcoder_v1beta1/services/transcoder_service/pagers.py create mode 100644 owl-bot-staging/v1beta1/google/cloud/video/transcoder_v1beta1/services/transcoder_service/transports/__init__.py create mode 100644 owl-bot-staging/v1beta1/google/cloud/video/transcoder_v1beta1/services/transcoder_service/transports/base.py create mode 100644 owl-bot-staging/v1beta1/google/cloud/video/transcoder_v1beta1/services/transcoder_service/transports/grpc.py create mode 100644 owl-bot-staging/v1beta1/google/cloud/video/transcoder_v1beta1/services/transcoder_service/transports/grpc_asyncio.py create mode 100644 owl-bot-staging/v1beta1/google/cloud/video/transcoder_v1beta1/types/__init__.py create mode 100644 owl-bot-staging/v1beta1/google/cloud/video/transcoder_v1beta1/types/resources.py create mode 100644 owl-bot-staging/v1beta1/google/cloud/video/transcoder_v1beta1/types/services.py create mode 100644 owl-bot-staging/v1beta1/mypy.ini create mode 100644 owl-bot-staging/v1beta1/noxfile.py create mode 100644 owl-bot-staging/v1beta1/scripts/fixup_transcoder_v1beta1_keywords.py create mode 100644 owl-bot-staging/v1beta1/setup.py create mode 100644 owl-bot-staging/v1beta1/tests/__init__.py create mode 100644 owl-bot-staging/v1beta1/tests/unit/__init__.py create mode 100644 owl-bot-staging/v1beta1/tests/unit/gapic/__init__.py create mode 100644 owl-bot-staging/v1beta1/tests/unit/gapic/transcoder_v1beta1/__init__.py create mode 100644 owl-bot-staging/v1beta1/tests/unit/gapic/transcoder_v1beta1/test_transcoder_service.py diff --git a/owl-bot-staging/v1/.coveragerc b/owl-bot-staging/v1/.coveragerc new file mode 100644 index 0000000..6926d19 --- /dev/null +++ b/owl-bot-staging/v1/.coveragerc @@ -0,0 +1,17 @@ +[run] +branch = True + +[report] +show_missing = True +omit = + google/cloud/video/transcoder/__init__.py +exclude_lines = + # Re-enable the standard pragma + pragma: NO COVER + # Ignore debug-only repr + def __repr__ + # Ignore pkg_resources exceptions. + # This is added at the module level as a safeguard for if someone + # generates the code and tries to run it without pip installing. This + # makes it virtually impossible to test properly. + except pkg_resources.DistributionNotFound diff --git a/owl-bot-staging/v1/MANIFEST.in b/owl-bot-staging/v1/MANIFEST.in new file mode 100644 index 0000000..da1cb61 --- /dev/null +++ b/owl-bot-staging/v1/MANIFEST.in @@ -0,0 +1,2 @@ +recursive-include google/cloud/video/transcoder *.py +recursive-include google/cloud/video/transcoder_v1 *.py diff --git a/owl-bot-staging/v1/README.rst b/owl-bot-staging/v1/README.rst new file mode 100644 index 0000000..43621a1 --- /dev/null +++ b/owl-bot-staging/v1/README.rst @@ -0,0 +1,49 @@ +Python Client for Google Cloud Video Transcoder API +================================================= + +Quick Start +----------- + +In order to use this library, you first need to go through the following steps: + +1. `Select or create a Cloud Platform project.`_ +2. `Enable billing for your project.`_ +3. Enable the Google Cloud Video Transcoder API. +4. `Setup Authentication.`_ + +.. _Select or create a Cloud Platform project.: https://console.cloud.google.com/project +.. _Enable billing for your project.: https://cloud.google.com/billing/docs/how-to/modify-project#enable_billing_for_a_project +.. _Setup Authentication.: https://googleapis.dev/python/google-api-core/latest/auth.html + +Installation +~~~~~~~~~~~~ + +Install this library in a `virtualenv`_ using pip. `virtualenv`_ is a tool to +create isolated Python environments. The basic problem it addresses is one of +dependencies and versions, and indirectly permissions. + +With `virtualenv`_, it's possible to install this library without needing system +install permissions, and without clashing with the installed system +dependencies. + +.. _`virtualenv`: https://virtualenv.pypa.io/en/latest/ + + +Mac/Linux +^^^^^^^^^ + +.. code-block:: console + + python3 -m venv + source /bin/activate + /bin/pip install /path/to/library + + +Windows +^^^^^^^ + +.. code-block:: console + + python3 -m venv + \Scripts\activate + \Scripts\pip.exe install \path\to\library diff --git a/owl-bot-staging/v1/docs/conf.py b/owl-bot-staging/v1/docs/conf.py new file mode 100644 index 0000000..d739f6d --- /dev/null +++ b/owl-bot-staging/v1/docs/conf.py @@ -0,0 +1,376 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# +# google-cloud-video-transcoder documentation build configuration file +# +# This file is execfile()d with the current directory set to its +# containing dir. +# +# Note that not all possible configuration values are present in this +# autogenerated file. +# +# All configuration values have a default; values that are commented out +# serve to show the default. + +import sys +import os +import shlex + +# If extensions (or modules to document with autodoc) are in another directory, +# add these directories to sys.path here. If the directory is relative to the +# documentation root, use os.path.abspath to make it absolute, like shown here. +sys.path.insert(0, os.path.abspath("..")) + +__version__ = "0.1.0" + +# -- General configuration ------------------------------------------------ + +# If your documentation needs a minimal Sphinx version, state it here. +needs_sphinx = "1.6.3" + +# Add any Sphinx extension module names here, as strings. They can be +# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom +# ones. +extensions = [ + "sphinx.ext.autodoc", + "sphinx.ext.autosummary", + "sphinx.ext.intersphinx", + "sphinx.ext.coverage", + "sphinx.ext.napoleon", + "sphinx.ext.todo", + "sphinx.ext.viewcode", +] + +# autodoc/autosummary flags +autoclass_content = "both" +autodoc_default_flags = ["members"] +autosummary_generate = True + + +# Add any paths that contain templates here, relative to this directory. +templates_path = ["_templates"] + +# Allow markdown includes (so releases.md can include CHANGLEOG.md) +# http://www.sphinx-doc.org/en/master/markdown.html +source_parsers = {".md": "recommonmark.parser.CommonMarkParser"} + +# The suffix(es) of source filenames. +# You can specify multiple suffix as a list of string: +source_suffix = [".rst", ".md"] + +# The encoding of source files. +# source_encoding = 'utf-8-sig' + +# The master toctree document. +master_doc = "index" + +# General information about the project. +project = u"google-cloud-video-transcoder" +copyright = u"2020, Google, LLC" +author = u"Google APIs" # TODO: autogenerate this bit + +# The version info for the project you're documenting, acts as replacement for +# |version| and |release|, also used in various other places throughout the +# built documents. +# +# The full version, including alpha/beta/rc tags. +release = __version__ +# The short X.Y version. +version = ".".join(release.split(".")[0:2]) + +# The language for content autogenerated by Sphinx. Refer to documentation +# for a list of supported languages. +# +# This is also used if you do content translation via gettext catalogs. +# Usually you set "language" from the command line for these cases. +language = None + +# There are two options for replacing |today|: either, you set today to some +# non-false value, then it is used: +# today = '' +# Else, today_fmt is used as the format for a strftime call. +# today_fmt = '%B %d, %Y' + +# List of patterns, relative to source directory, that match files and +# directories to ignore when looking for source files. +exclude_patterns = ["_build"] + +# The reST default role (used for this markup: `text`) to use for all +# documents. +# default_role = None + +# If true, '()' will be appended to :func: etc. cross-reference text. +# add_function_parentheses = True + +# If true, the current module name will be prepended to all description +# unit titles (such as .. function::). +# add_module_names = True + +# If true, sectionauthor and moduleauthor directives will be shown in the +# output. They are ignored by default. +# show_authors = False + +# The name of the Pygments (syntax highlighting) style to use. +pygments_style = "sphinx" + +# A list of ignored prefixes for module index sorting. +# modindex_common_prefix = [] + +# If true, keep warnings as "system message" paragraphs in the built documents. +# keep_warnings = False + +# If true, `todo` and `todoList` produce output, else they produce nothing. +todo_include_todos = True + + +# -- Options for HTML output ---------------------------------------------- + +# The theme to use for HTML and HTML Help pages. See the documentation for +# a list of builtin themes. +html_theme = "alabaster" + +# Theme options are theme-specific and customize the look and feel of a theme +# further. For a list of options available for each theme, see the +# documentation. +html_theme_options = { + "description": "Google Cloud Video Client Libraries for Python", + "github_user": "googleapis", + "github_repo": "google-cloud-python", + "github_banner": True, + "font_family": "'Roboto', Georgia, sans", + "head_font_family": "'Roboto', Georgia, serif", + "code_font_family": "'Roboto Mono', 'Consolas', monospace", +} + +# Add any paths that contain custom themes here, relative to this directory. +# html_theme_path = [] + +# The name for this set of Sphinx documents. If None, it defaults to +# " v documentation". +# html_title = None + +# A shorter title for the navigation bar. Default is the same as html_title. +# html_short_title = None + +# The name of an image file (relative to this directory) to place at the top +# of the sidebar. +# html_logo = None + +# The name of an image file (within the static path) to use as favicon of the +# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 +# pixels large. +# html_favicon = None + +# Add any paths that contain custom static files (such as style sheets) here, +# relative to this directory. They are copied after the builtin static files, +# so a file named "default.css" will overwrite the builtin "default.css". +html_static_path = ["_static"] + +# Add any extra paths that contain custom files (such as robots.txt or +# .htaccess) here, relative to this directory. These files are copied +# directly to the root of the documentation. +# html_extra_path = [] + +# If not '', a 'Last updated on:' timestamp is inserted at every page bottom, +# using the given strftime format. +# html_last_updated_fmt = '%b %d, %Y' + +# If true, SmartyPants will be used to convert quotes and dashes to +# typographically correct entities. +# html_use_smartypants = True + +# Custom sidebar templates, maps document names to template names. +# html_sidebars = {} + +# Additional templates that should be rendered to pages, maps page names to +# template names. +# html_additional_pages = {} + +# If false, no module index is generated. +# html_domain_indices = True + +# If false, no index is generated. +# html_use_index = True + +# If true, the index is split into individual pages for each letter. +# html_split_index = False + +# If true, links to the reST sources are added to the pages. +# html_show_sourcelink = True + +# If true, "Created using Sphinx" is shown in the HTML footer. Default is True. +# html_show_sphinx = True + +# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. +# html_show_copyright = True + +# If true, an OpenSearch description file will be output, and all pages will +# contain a tag referring to it. The value of this option must be the +# base URL from which the finished HTML is served. +# html_use_opensearch = '' + +# This is the file name suffix for HTML files (e.g. ".xhtml"). +# html_file_suffix = None + +# Language to be used for generating the HTML full-text search index. +# Sphinx supports the following languages: +# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja' +# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr' +# html_search_language = 'en' + +# A dictionary with options for the search language support, empty by default. +# Now only 'ja' uses this config value +# html_search_options = {'type': 'default'} + +# The name of a javascript file (relative to the configuration directory) that +# implements a search results scorer. If empty, the default will be used. +# html_search_scorer = 'scorer.js' + +# Output file base name for HTML help builder. +htmlhelp_basename = "google-cloud-video-transcoder-doc" + +# -- Options for warnings ------------------------------------------------------ + + +suppress_warnings = [ + # Temporarily suppress this to avoid "more than one target found for + # cross-reference" warning, which are intractable for us to avoid while in + # a mono-repo. + # See https://github.com/sphinx-doc/sphinx/blob + # /2a65ffeef5c107c19084fabdd706cdff3f52d93c/sphinx/domains/python.py#L843 + "ref.python" +] + +# -- Options for LaTeX output --------------------------------------------- + +latex_elements = { + # The paper size ('letterpaper' or 'a4paper'). + # 'papersize': 'letterpaper', + # The font size ('10pt', '11pt' or '12pt'). + # 'pointsize': '10pt', + # Additional stuff for the LaTeX preamble. + # 'preamble': '', + # Latex figure (float) alignment + # 'figure_align': 'htbp', +} + +# Grouping the document tree into LaTeX files. List of tuples +# (source start file, target name, title, +# author, documentclass [howto, manual, or own class]). +latex_documents = [ + ( + master_doc, + "google-cloud-video-transcoder.tex", + u"google-cloud-video-transcoder Documentation", + author, + "manual", + ) +] + +# The name of an image file (relative to this directory) to place at the top of +# the title page. +# latex_logo = None + +# For "manual" documents, if this is true, then toplevel headings are parts, +# not chapters. +# latex_use_parts = False + +# If true, show page references after internal links. +# latex_show_pagerefs = False + +# If true, show URL addresses after external links. +# latex_show_urls = False + +# Documents to append as an appendix to all manuals. +# latex_appendices = [] + +# If false, no module index is generated. +# latex_domain_indices = True + + +# -- Options for manual page output --------------------------------------- + +# One entry per manual page. List of tuples +# (source start file, name, description, authors, manual section). +man_pages = [ + ( + master_doc, + "google-cloud-video-transcoder", + u"Google Cloud Video Transcoder Documentation", + [author], + 1, + ) +] + +# If true, show URL addresses after external links. +# man_show_urls = False + + +# -- Options for Texinfo output ------------------------------------------- + +# Grouping the document tree into Texinfo files. List of tuples +# (source start file, target name, title, author, +# dir menu entry, description, category) +texinfo_documents = [ + ( + master_doc, + "google-cloud-video-transcoder", + u"google-cloud-video-transcoder Documentation", + author, + "google-cloud-video-transcoder", + "GAPIC library for Google Cloud Video Transcoder API", + "APIs", + ) +] + +# Documents to append as an appendix to all manuals. +# texinfo_appendices = [] + +# If false, no module index is generated. +# texinfo_domain_indices = True + +# How to display URL addresses: 'footnote', 'no', or 'inline'. +# texinfo_show_urls = 'footnote' + +# If true, do not generate a @detailmenu in the "Top" node's menu. +# texinfo_no_detailmenu = False + + +# Example configuration for intersphinx: refer to the Python standard library. +intersphinx_mapping = { + "python": ("http://python.readthedocs.org/en/latest/", None), + "gax": ("https://gax-python.readthedocs.org/en/latest/", None), + "google-auth": ("https://google-auth.readthedocs.io/en/stable", None), + "google-gax": ("https://gax-python.readthedocs.io/en/latest/", None), + "google.api_core": ("https://googleapis.dev/python/google-api-core/latest/", None), + "grpc": ("https://grpc.io/grpc/python/", None), + "requests": ("http://requests.kennethreitz.org/en/stable/", None), + "proto": ("https://proto-plus-python.readthedocs.io/en/stable", None), + "protobuf": ("https://googleapis.dev/python/protobuf/latest/", None), +} + + +# Napoleon settings +napoleon_google_docstring = True +napoleon_numpy_docstring = True +napoleon_include_private_with_doc = False +napoleon_include_special_with_doc = True +napoleon_use_admonition_for_examples = False +napoleon_use_admonition_for_notes = False +napoleon_use_admonition_for_references = False +napoleon_use_ivar = False +napoleon_use_param = True +napoleon_use_rtype = True diff --git a/owl-bot-staging/v1/docs/index.rst b/owl-bot-staging/v1/docs/index.rst new file mode 100644 index 0000000..0cfe564 --- /dev/null +++ b/owl-bot-staging/v1/docs/index.rst @@ -0,0 +1,7 @@ +API Reference +------------- +.. toctree:: + :maxdepth: 2 + + transcoder_v1/services + transcoder_v1/types diff --git a/owl-bot-staging/v1/docs/transcoder_v1/services.rst b/owl-bot-staging/v1/docs/transcoder_v1/services.rst new file mode 100644 index 0000000..1bd129e --- /dev/null +++ b/owl-bot-staging/v1/docs/transcoder_v1/services.rst @@ -0,0 +1,6 @@ +Services for Google Cloud Video Transcoder v1 API +================================================= +.. toctree:: + :maxdepth: 2 + + transcoder_service diff --git a/owl-bot-staging/v1/docs/transcoder_v1/transcoder_service.rst b/owl-bot-staging/v1/docs/transcoder_v1/transcoder_service.rst new file mode 100644 index 0000000..5bf6bd8 --- /dev/null +++ b/owl-bot-staging/v1/docs/transcoder_v1/transcoder_service.rst @@ -0,0 +1,10 @@ +TranscoderService +----------------------------------- + +.. automodule:: google.cloud.video.transcoder_v1.services.transcoder_service + :members: + :inherited-members: + +.. automodule:: google.cloud.video.transcoder_v1.services.transcoder_service.pagers + :members: + :inherited-members: diff --git a/owl-bot-staging/v1/docs/transcoder_v1/types.rst b/owl-bot-staging/v1/docs/transcoder_v1/types.rst new file mode 100644 index 0000000..7dc3c71 --- /dev/null +++ b/owl-bot-staging/v1/docs/transcoder_v1/types.rst @@ -0,0 +1,7 @@ +Types for Google Cloud Video Transcoder v1 API +============================================== + +.. automodule:: google.cloud.video.transcoder_v1.types + :members: + :undoc-members: + :show-inheritance: diff --git a/owl-bot-staging/v1/google/cloud/video/transcoder/__init__.py b/owl-bot-staging/v1/google/cloud/video/transcoder/__init__.py new file mode 100644 index 0000000..c203235 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/video/transcoder/__init__.py @@ -0,0 +1,81 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from google.cloud.video.transcoder_v1.services.transcoder_service.client import TranscoderServiceClient +from google.cloud.video.transcoder_v1.services.transcoder_service.async_client import TranscoderServiceAsyncClient + +from google.cloud.video.transcoder_v1.types.resources import AdBreak +from google.cloud.video.transcoder_v1.types.resources import AudioStream +from google.cloud.video.transcoder_v1.types.resources import EditAtom +from google.cloud.video.transcoder_v1.types.resources import ElementaryStream +from google.cloud.video.transcoder_v1.types.resources import Encryption +from google.cloud.video.transcoder_v1.types.resources import Input +from google.cloud.video.transcoder_v1.types.resources import Job +from google.cloud.video.transcoder_v1.types.resources import JobConfig +from google.cloud.video.transcoder_v1.types.resources import JobTemplate +from google.cloud.video.transcoder_v1.types.resources import Manifest +from google.cloud.video.transcoder_v1.types.resources import MuxStream +from google.cloud.video.transcoder_v1.types.resources import Output +from google.cloud.video.transcoder_v1.types.resources import Overlay +from google.cloud.video.transcoder_v1.types.resources import PreprocessingConfig +from google.cloud.video.transcoder_v1.types.resources import PubsubDestination +from google.cloud.video.transcoder_v1.types.resources import SegmentSettings +from google.cloud.video.transcoder_v1.types.resources import SpriteSheet +from google.cloud.video.transcoder_v1.types.resources import TextStream +from google.cloud.video.transcoder_v1.types.resources import VideoStream +from google.cloud.video.transcoder_v1.types.services import CreateJobRequest +from google.cloud.video.transcoder_v1.types.services import CreateJobTemplateRequest +from google.cloud.video.transcoder_v1.types.services import DeleteJobRequest +from google.cloud.video.transcoder_v1.types.services import DeleteJobTemplateRequest +from google.cloud.video.transcoder_v1.types.services import GetJobRequest +from google.cloud.video.transcoder_v1.types.services import GetJobTemplateRequest +from google.cloud.video.transcoder_v1.types.services import ListJobsRequest +from google.cloud.video.transcoder_v1.types.services import ListJobsResponse +from google.cloud.video.transcoder_v1.types.services import ListJobTemplatesRequest +from google.cloud.video.transcoder_v1.types.services import ListJobTemplatesResponse + +__all__ = ('TranscoderServiceClient', + 'TranscoderServiceAsyncClient', + 'AdBreak', + 'AudioStream', + 'EditAtom', + 'ElementaryStream', + 'Encryption', + 'Input', + 'Job', + 'JobConfig', + 'JobTemplate', + 'Manifest', + 'MuxStream', + 'Output', + 'Overlay', + 'PreprocessingConfig', + 'PubsubDestination', + 'SegmentSettings', + 'SpriteSheet', + 'TextStream', + 'VideoStream', + 'CreateJobRequest', + 'CreateJobTemplateRequest', + 'DeleteJobRequest', + 'DeleteJobTemplateRequest', + 'GetJobRequest', + 'GetJobTemplateRequest', + 'ListJobsRequest', + 'ListJobsResponse', + 'ListJobTemplatesRequest', + 'ListJobTemplatesResponse', +) diff --git a/owl-bot-staging/v1/google/cloud/video/transcoder/py.typed b/owl-bot-staging/v1/google/cloud/video/transcoder/py.typed new file mode 100644 index 0000000..a2716a6 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/video/transcoder/py.typed @@ -0,0 +1,2 @@ +# Marker file for PEP 561. +# The google-cloud-video-transcoder package uses inline types. diff --git a/owl-bot-staging/v1/google/cloud/video/transcoder_v1/__init__.py b/owl-bot-staging/v1/google/cloud/video/transcoder_v1/__init__.py new file mode 100644 index 0000000..d9d6f49 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/video/transcoder_v1/__init__.py @@ -0,0 +1,82 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from .services.transcoder_service import TranscoderServiceClient +from .services.transcoder_service import TranscoderServiceAsyncClient + +from .types.resources import AdBreak +from .types.resources import AudioStream +from .types.resources import EditAtom +from .types.resources import ElementaryStream +from .types.resources import Encryption +from .types.resources import Input +from .types.resources import Job +from .types.resources import JobConfig +from .types.resources import JobTemplate +from .types.resources import Manifest +from .types.resources import MuxStream +from .types.resources import Output +from .types.resources import Overlay +from .types.resources import PreprocessingConfig +from .types.resources import PubsubDestination +from .types.resources import SegmentSettings +from .types.resources import SpriteSheet +from .types.resources import TextStream +from .types.resources import VideoStream +from .types.services import CreateJobRequest +from .types.services import CreateJobTemplateRequest +from .types.services import DeleteJobRequest +from .types.services import DeleteJobTemplateRequest +from .types.services import GetJobRequest +from .types.services import GetJobTemplateRequest +from .types.services import ListJobsRequest +from .types.services import ListJobsResponse +from .types.services import ListJobTemplatesRequest +from .types.services import ListJobTemplatesResponse + +__all__ = ( + 'TranscoderServiceAsyncClient', +'AdBreak', +'AudioStream', +'CreateJobRequest', +'CreateJobTemplateRequest', +'DeleteJobRequest', +'DeleteJobTemplateRequest', +'EditAtom', +'ElementaryStream', +'Encryption', +'GetJobRequest', +'GetJobTemplateRequest', +'Input', +'Job', +'JobConfig', +'JobTemplate', +'ListJobTemplatesRequest', +'ListJobTemplatesResponse', +'ListJobsRequest', +'ListJobsResponse', +'Manifest', +'MuxStream', +'Output', +'Overlay', +'PreprocessingConfig', +'PubsubDestination', +'SegmentSettings', +'SpriteSheet', +'TextStream', +'TranscoderServiceClient', +'VideoStream', +) diff --git a/owl-bot-staging/v1/google/cloud/video/transcoder_v1/gapic_metadata.json b/owl-bot-staging/v1/google/cloud/video/transcoder_v1/gapic_metadata.json new file mode 100644 index 0000000..6651379 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/video/transcoder_v1/gapic_metadata.json @@ -0,0 +1,103 @@ + { + "comment": "This file maps proto services/RPCs to the corresponding library clients/methods", + "language": "python", + "libraryPackage": "google.cloud.video.transcoder_v1", + "protoPackage": "google.cloud.video.transcoder.v1", + "schema": "1.0", + "services": { + "TranscoderService": { + "clients": { + "grpc": { + "libraryClient": "TranscoderServiceClient", + "rpcs": { + "CreateJob": { + "methods": [ + "create_job" + ] + }, + "CreateJobTemplate": { + "methods": [ + "create_job_template" + ] + }, + "DeleteJob": { + "methods": [ + "delete_job" + ] + }, + "DeleteJobTemplate": { + "methods": [ + "delete_job_template" + ] + }, + "GetJob": { + "methods": [ + "get_job" + ] + }, + "GetJobTemplate": { + "methods": [ + "get_job_template" + ] + }, + "ListJobTemplates": { + "methods": [ + "list_job_templates" + ] + }, + "ListJobs": { + "methods": [ + "list_jobs" + ] + } + } + }, + "grpc-async": { + "libraryClient": "TranscoderServiceAsyncClient", + "rpcs": { + "CreateJob": { + "methods": [ + "create_job" + ] + }, + "CreateJobTemplate": { + "methods": [ + "create_job_template" + ] + }, + "DeleteJob": { + "methods": [ + "delete_job" + ] + }, + "DeleteJobTemplate": { + "methods": [ + "delete_job_template" + ] + }, + "GetJob": { + "methods": [ + "get_job" + ] + }, + "GetJobTemplate": { + "methods": [ + "get_job_template" + ] + }, + "ListJobTemplates": { + "methods": [ + "list_job_templates" + ] + }, + "ListJobs": { + "methods": [ + "list_jobs" + ] + } + } + } + } + } + } +} diff --git a/owl-bot-staging/v1/google/cloud/video/transcoder_v1/py.typed b/owl-bot-staging/v1/google/cloud/video/transcoder_v1/py.typed new file mode 100644 index 0000000..a2716a6 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/video/transcoder_v1/py.typed @@ -0,0 +1,2 @@ +# Marker file for PEP 561. +# The google-cloud-video-transcoder package uses inline types. diff --git a/owl-bot-staging/v1/google/cloud/video/transcoder_v1/services/__init__.py b/owl-bot-staging/v1/google/cloud/video/transcoder_v1/services/__init__.py new file mode 100644 index 0000000..4de6597 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/video/transcoder_v1/services/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/owl-bot-staging/v1/google/cloud/video/transcoder_v1/services/transcoder_service/__init__.py b/owl-bot-staging/v1/google/cloud/video/transcoder_v1/services/transcoder_service/__init__.py new file mode 100644 index 0000000..1688786 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/video/transcoder_v1/services/transcoder_service/__init__.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .client import TranscoderServiceClient +from .async_client import TranscoderServiceAsyncClient + +__all__ = ( + 'TranscoderServiceClient', + 'TranscoderServiceAsyncClient', +) diff --git a/owl-bot-staging/v1/google/cloud/video/transcoder_v1/services/transcoder_service/async_client.py b/owl-bot-staging/v1/google/cloud/video/transcoder_v1/services/transcoder_service/async_client.py new file mode 100644 index 0000000..65d7b8e --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/video/transcoder_v1/services/transcoder_service/async_client.py @@ -0,0 +1,810 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import functools +import re +from typing import Dict, Sequence, Tuple, Type, Union +import pkg_resources + +import google.api_core.client_options as ClientOptions # type: ignore +from google.api_core import exceptions as core_exceptions # type: ignore +from google.api_core import gapic_v1 # type: ignore +from google.api_core import retry as retries # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.video.transcoder_v1.services.transcoder_service import pagers +from google.cloud.video.transcoder_v1.types import resources +from google.cloud.video.transcoder_v1.types import services +from google.protobuf import timestamp_pb2 # type: ignore +from google.rpc import status_pb2 # type: ignore +from .transports.base import TranscoderServiceTransport, DEFAULT_CLIENT_INFO +from .transports.grpc_asyncio import TranscoderServiceGrpcAsyncIOTransport +from .client import TranscoderServiceClient + + +class TranscoderServiceAsyncClient: + """Using the Transcoder API, you can queue asynchronous jobs for + transcoding media into various output formats. Output formats + may include different streaming standards such as HTTP Live + Streaming (HLS) and Dynamic Adaptive Streaming over HTTP (DASH). + You can also customize jobs using advanced features such as + Digital Rights Management (DRM), audio equalization, content + concatenation, and digital ad-stitch ready content generation. + """ + + _client: TranscoderServiceClient + + DEFAULT_ENDPOINT = TranscoderServiceClient.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = TranscoderServiceClient.DEFAULT_MTLS_ENDPOINT + + job_path = staticmethod(TranscoderServiceClient.job_path) + parse_job_path = staticmethod(TranscoderServiceClient.parse_job_path) + job_template_path = staticmethod(TranscoderServiceClient.job_template_path) + parse_job_template_path = staticmethod(TranscoderServiceClient.parse_job_template_path) + common_billing_account_path = staticmethod(TranscoderServiceClient.common_billing_account_path) + parse_common_billing_account_path = staticmethod(TranscoderServiceClient.parse_common_billing_account_path) + common_folder_path = staticmethod(TranscoderServiceClient.common_folder_path) + parse_common_folder_path = staticmethod(TranscoderServiceClient.parse_common_folder_path) + common_organization_path = staticmethod(TranscoderServiceClient.common_organization_path) + parse_common_organization_path = staticmethod(TranscoderServiceClient.parse_common_organization_path) + common_project_path = staticmethod(TranscoderServiceClient.common_project_path) + parse_common_project_path = staticmethod(TranscoderServiceClient.parse_common_project_path) + common_location_path = staticmethod(TranscoderServiceClient.common_location_path) + parse_common_location_path = staticmethod(TranscoderServiceClient.parse_common_location_path) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + TranscoderServiceAsyncClient: The constructed client. + """ + return TranscoderServiceClient.from_service_account_info.__func__(TranscoderServiceAsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + TranscoderServiceAsyncClient: The constructed client. + """ + return TranscoderServiceClient.from_service_account_file.__func__(TranscoderServiceAsyncClient, filename, *args, **kwargs) # type: ignore + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> TranscoderServiceTransport: + """Returns the transport used by the client instance. + + Returns: + TranscoderServiceTransport: The transport used by the client instance. + """ + return self._client.transport + + get_transport_class = functools.partial(type(TranscoderServiceClient).get_transport_class, type(TranscoderServiceClient)) + + def __init__(self, *, + credentials: ga_credentials.Credentials = None, + transport: Union[str, TranscoderServiceTransport] = "grpc_asyncio", + client_options: ClientOptions = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the transcoder service client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, ~.TranscoderServiceTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (ClientOptions): Custom options for the client. It + won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client = TranscoderServiceClient( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + + ) + + async def create_job(self, + request: services.CreateJobRequest = None, + *, + parent: str = None, + job: resources.Job = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> resources.Job: + r"""Creates a job in the specified region. + + Args: + request (:class:`google.cloud.video.transcoder_v1.types.CreateJobRequest`): + The request object. Request message for + `TranscoderService.CreateJob`. + parent (:class:`str`): + Required. The parent location to create and process this + job. Format: ``projects/{project}/locations/{location}`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + job (:class:`google.cloud.video.transcoder_v1.types.Job`): + Required. Parameters for creating + transcoding job. + + This corresponds to the ``job`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.video.transcoder_v1.types.Job: + Transcoding job resource. + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, job]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = services.CreateJobRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if job is not None: + request.job = job + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.create_job, + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_jobs(self, + request: services.ListJobsRequest = None, + *, + parent: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListJobsAsyncPager: + r"""Lists jobs in the specified region. + + Args: + request (:class:`google.cloud.video.transcoder_v1.types.ListJobsRequest`): + The request object. Request message for + `TranscoderService.ListJobs`. The parent location from + which to retrieve the collection of jobs. + parent (:class:`str`): + Required. Format: + ``projects/{project}/locations/{location}`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.video.transcoder_v1.services.transcoder_service.pagers.ListJobsAsyncPager: + Response message for TranscoderService.ListJobs. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = services.ListJobsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_jobs, + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListJobsAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_job(self, + request: services.GetJobRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> resources.Job: + r"""Returns the job data. + + Args: + request (:class:`google.cloud.video.transcoder_v1.types.GetJobRequest`): + The request object. Request message for + `TranscoderService.GetJob`. + name (:class:`str`): + Required. The name of the job to retrieve. Format: + ``projects/{project}/locations/{location}/jobs/{job}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.video.transcoder_v1.types.Job: + Transcoding job resource. + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = services.GetJobRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_job, + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_job(self, + request: services.DeleteJobRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a job. + + Args: + request (:class:`google.cloud.video.transcoder_v1.types.DeleteJobRequest`): + The request object. Request message for + `TranscoderService.DeleteJob`. + name (:class:`str`): + Required. The name of the job to delete. Format: + ``projects/{project}/locations/{location}/jobs/{job}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = services.DeleteJobRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.delete_job, + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def create_job_template(self, + request: services.CreateJobTemplateRequest = None, + *, + parent: str = None, + job_template: resources.JobTemplate = None, + job_template_id: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> resources.JobTemplate: + r"""Creates a job template in the specified region. + + Args: + request (:class:`google.cloud.video.transcoder_v1.types.CreateJobTemplateRequest`): + The request object. Request message for + `TranscoderService.CreateJobTemplate`. + parent (:class:`str`): + Required. The parent location to create this job + template. Format: + ``projects/{project}/locations/{location}`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + job_template (:class:`google.cloud.video.transcoder_v1.types.JobTemplate`): + Required. Parameters for creating job + template. + + This corresponds to the ``job_template`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + job_template_id (:class:`str`): + Required. The ID to use for the job template, which will + become the final component of the job template's + resource name. + + This value should be 4-63 characters, and valid + characters must match the regular expression + ``[a-zA-Z][a-zA-Z0-9_-]*``. + + This corresponds to the ``job_template_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.video.transcoder_v1.types.JobTemplate: + Transcoding job template resource. + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, job_template, job_template_id]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = services.CreateJobTemplateRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if job_template is not None: + request.job_template = job_template + if job_template_id is not None: + request.job_template_id = job_template_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.create_job_template, + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_job_templates(self, + request: services.ListJobTemplatesRequest = None, + *, + parent: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListJobTemplatesAsyncPager: + r"""Lists job templates in the specified region. + + Args: + request (:class:`google.cloud.video.transcoder_v1.types.ListJobTemplatesRequest`): + The request object. Request message for + `TranscoderService.ListJobTemplates`. + parent (:class:`str`): + Required. The parent location from which to retrieve the + collection of job templates. Format: + ``projects/{project}/locations/{location}`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.video.transcoder_v1.services.transcoder_service.pagers.ListJobTemplatesAsyncPager: + Response message for TranscoderService.ListJobTemplates. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = services.ListJobTemplatesRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_job_templates, + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListJobTemplatesAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_job_template(self, + request: services.GetJobTemplateRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> resources.JobTemplate: + r"""Returns the job template data. + + Args: + request (:class:`google.cloud.video.transcoder_v1.types.GetJobTemplateRequest`): + The request object. Request message for + `TranscoderService.GetJobTemplate`. + name (:class:`str`): + Required. The name of the job template to retrieve. + Format: + ``projects/{project}/locations/{location}/jobTemplates/{job_template}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.video.transcoder_v1.types.JobTemplate: + Transcoding job template resource. + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = services.GetJobTemplateRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_job_template, + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_job_template(self, + request: services.DeleteJobTemplateRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a job template. + + Args: + request (:class:`google.cloud.video.transcoder_v1.types.DeleteJobTemplateRequest`): + The request object. Request message for + `TranscoderService.DeleteJobTemplate`. + name (:class:`str`): + Required. The name of the job template to delete. + ``projects/{project}/locations/{location}/jobTemplates/{job_template}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = services.DeleteJobTemplateRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.delete_job_template, + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + + + + +try: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=pkg_resources.get_distribution( + "google-cloud-video-transcoder", + ).version, + ) +except pkg_resources.DistributionNotFound: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() + + +__all__ = ( + "TranscoderServiceAsyncClient", +) diff --git a/owl-bot-staging/v1/google/cloud/video/transcoder_v1/services/transcoder_service/client.py b/owl-bot-staging/v1/google/cloud/video/transcoder_v1/services/transcoder_service/client.py new file mode 100644 index 0000000..3f4470b --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/video/transcoder_v1/services/transcoder_service/client.py @@ -0,0 +1,1010 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from distutils import util +import os +import re +from typing import Callable, Dict, Optional, Sequence, Tuple, Type, Union +import pkg_resources + +from google.api_core import client_options as client_options_lib # type: ignore +from google.api_core import exceptions as core_exceptions # type: ignore +from google.api_core import gapic_v1 # type: ignore +from google.api_core import retry as retries # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.video.transcoder_v1.services.transcoder_service import pagers +from google.cloud.video.transcoder_v1.types import resources +from google.cloud.video.transcoder_v1.types import services +from google.protobuf import timestamp_pb2 # type: ignore +from google.rpc import status_pb2 # type: ignore +from .transports.base import TranscoderServiceTransport, DEFAULT_CLIENT_INFO +from .transports.grpc import TranscoderServiceGrpcTransport +from .transports.grpc_asyncio import TranscoderServiceGrpcAsyncIOTransport + + +class TranscoderServiceClientMeta(type): + """Metaclass for the TranscoderService client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + _transport_registry = OrderedDict() # type: Dict[str, Type[TranscoderServiceTransport]] + _transport_registry["grpc"] = TranscoderServiceGrpcTransport + _transport_registry["grpc_asyncio"] = TranscoderServiceGrpcAsyncIOTransport + + def get_transport_class(cls, + label: str = None, + ) -> Type[TranscoderServiceTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class TranscoderServiceClient(metaclass=TranscoderServiceClientMeta): + """Using the Transcoder API, you can queue asynchronous jobs for + transcoding media into various output formats. Output formats + may include different streaming standards such as HTTP Live + Streaming (HLS) and Dynamic Adaptive Streaming over HTTP (DASH). + You can also customize jobs using advanced features such as + Digital Rights Management (DRM), audio equalization, content + concatenation, and digital ad-stitch ready content generation. + """ + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + DEFAULT_ENDPOINT = "transcoder.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + TranscoderServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + TranscoderServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file( + filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> TranscoderServiceTransport: + """Returns the transport used by the client instance. + + Returns: + TranscoderServiceTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def job_path(project: str,location: str,job: str,) -> str: + """Returns a fully-qualified job string.""" + return "projects/{project}/locations/{location}/jobs/{job}".format(project=project, location=location, job=job, ) + + @staticmethod + def parse_job_path(path: str) -> Dict[str,str]: + """Parses a job path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/jobs/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def job_template_path(project: str,location: str,job_template: str,) -> str: + """Returns a fully-qualified job_template string.""" + return "projects/{project}/locations/{location}/jobTemplates/{job_template}".format(project=project, location=location, job_template=job_template, ) + + @staticmethod + def parse_job_template_path(path: str) -> Dict[str,str]: + """Parses a job_template path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/jobTemplates/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_billing_account_path(billing_account: str, ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str,str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path(folder: str, ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format(folder=folder, ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str,str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path(organization: str, ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format(organization=organization, ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str,str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path(project: str, ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format(project=project, ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str,str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path(project: str, location: str, ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format(project=project, location=location, ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str,str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + def __init__(self, *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Union[str, TranscoderServiceTransport, None] = None, + client_options: Optional[client_options_lib.ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the transcoder service client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, TranscoderServiceTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. It won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + if isinstance(client_options, dict): + client_options = client_options_lib.from_dict(client_options) + if client_options is None: + client_options = client_options_lib.ClientOptions() + + # Create SSL credentials for mutual TLS if needed. + use_client_cert = bool(util.strtobool(os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false"))) + + client_cert_source_func = None + is_mtls = False + if use_client_cert: + if client_options.client_cert_source: + is_mtls = True + client_cert_source_func = client_options.client_cert_source + else: + is_mtls = mtls.has_default_client_cert_source() + if is_mtls: + client_cert_source_func = mtls.default_client_cert_source() + else: + client_cert_source_func = None + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + else: + use_mtls_env = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_mtls_env == "never": + api_endpoint = self.DEFAULT_ENDPOINT + elif use_mtls_env == "always": + api_endpoint = self.DEFAULT_MTLS_ENDPOINT + elif use_mtls_env == "auto": + if is_mtls: + api_endpoint = self.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = self.DEFAULT_ENDPOINT + else: + raise MutualTLSChannelError( + "Unsupported GOOGLE_API_USE_MTLS_ENDPOINT value. Accepted " + "values: never, auto, always" + ) + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + if isinstance(transport, TranscoderServiceTransport): + # transport is a TranscoderServiceTransport instance. + if credentials or client_options.credentials_file: + raise ValueError("When providing a transport instance, " + "provide its credentials directly.") + if client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = transport + else: + Transport = type(self).get_transport_class(transport) + self._transport = Transport( + credentials=credentials, + credentials_file=client_options.credentials_file, + host=api_endpoint, + scopes=client_options.scopes, + client_cert_source_for_mtls=client_cert_source_func, + quota_project_id=client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=( + Transport == type(self).get_transport_class("grpc") + or Transport == type(self).get_transport_class("grpc_asyncio") + ), + ) + + def create_job(self, + request: services.CreateJobRequest = None, + *, + parent: str = None, + job: resources.Job = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> resources.Job: + r"""Creates a job in the specified region. + + Args: + request (google.cloud.video.transcoder_v1.types.CreateJobRequest): + The request object. Request message for + `TranscoderService.CreateJob`. + parent (str): + Required. The parent location to create and process this + job. Format: ``projects/{project}/locations/{location}`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + job (google.cloud.video.transcoder_v1.types.Job): + Required. Parameters for creating + transcoding job. + + This corresponds to the ``job`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.video.transcoder_v1.types.Job: + Transcoding job resource. + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, job]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a services.CreateJobRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, services.CreateJobRequest): + request = services.CreateJobRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if job is not None: + request.job = job + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_job] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_jobs(self, + request: services.ListJobsRequest = None, + *, + parent: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListJobsPager: + r"""Lists jobs in the specified region. + + Args: + request (google.cloud.video.transcoder_v1.types.ListJobsRequest): + The request object. Request message for + `TranscoderService.ListJobs`. The parent location from + which to retrieve the collection of jobs. + parent (str): + Required. Format: + ``projects/{project}/locations/{location}`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.video.transcoder_v1.services.transcoder_service.pagers.ListJobsPager: + Response message for TranscoderService.ListJobs. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a services.ListJobsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, services.ListJobsRequest): + request = services.ListJobsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_jobs] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListJobsPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_job(self, + request: services.GetJobRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> resources.Job: + r"""Returns the job data. + + Args: + request (google.cloud.video.transcoder_v1.types.GetJobRequest): + The request object. Request message for + `TranscoderService.GetJob`. + name (str): + Required. The name of the job to retrieve. Format: + ``projects/{project}/locations/{location}/jobs/{job}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.video.transcoder_v1.types.Job: + Transcoding job resource. + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a services.GetJobRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, services.GetJobRequest): + request = services.GetJobRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_job] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_job(self, + request: services.DeleteJobRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a job. + + Args: + request (google.cloud.video.transcoder_v1.types.DeleteJobRequest): + The request object. Request message for + `TranscoderService.DeleteJob`. + name (str): + Required. The name of the job to delete. Format: + ``projects/{project}/locations/{location}/jobs/{job}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a services.DeleteJobRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, services.DeleteJobRequest): + request = services.DeleteJobRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_job] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def create_job_template(self, + request: services.CreateJobTemplateRequest = None, + *, + parent: str = None, + job_template: resources.JobTemplate = None, + job_template_id: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> resources.JobTemplate: + r"""Creates a job template in the specified region. + + Args: + request (google.cloud.video.transcoder_v1.types.CreateJobTemplateRequest): + The request object. Request message for + `TranscoderService.CreateJobTemplate`. + parent (str): + Required. The parent location to create this job + template. Format: + ``projects/{project}/locations/{location}`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + job_template (google.cloud.video.transcoder_v1.types.JobTemplate): + Required. Parameters for creating job + template. + + This corresponds to the ``job_template`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + job_template_id (str): + Required. The ID to use for the job template, which will + become the final component of the job template's + resource name. + + This value should be 4-63 characters, and valid + characters must match the regular expression + ``[a-zA-Z][a-zA-Z0-9_-]*``. + + This corresponds to the ``job_template_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.video.transcoder_v1.types.JobTemplate: + Transcoding job template resource. + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, job_template, job_template_id]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a services.CreateJobTemplateRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, services.CreateJobTemplateRequest): + request = services.CreateJobTemplateRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if job_template is not None: + request.job_template = job_template + if job_template_id is not None: + request.job_template_id = job_template_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_job_template] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_job_templates(self, + request: services.ListJobTemplatesRequest = None, + *, + parent: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListJobTemplatesPager: + r"""Lists job templates in the specified region. + + Args: + request (google.cloud.video.transcoder_v1.types.ListJobTemplatesRequest): + The request object. Request message for + `TranscoderService.ListJobTemplates`. + parent (str): + Required. The parent location from which to retrieve the + collection of job templates. Format: + ``projects/{project}/locations/{location}`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.video.transcoder_v1.services.transcoder_service.pagers.ListJobTemplatesPager: + Response message for TranscoderService.ListJobTemplates. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a services.ListJobTemplatesRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, services.ListJobTemplatesRequest): + request = services.ListJobTemplatesRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_job_templates] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListJobTemplatesPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_job_template(self, + request: services.GetJobTemplateRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> resources.JobTemplate: + r"""Returns the job template data. + + Args: + request (google.cloud.video.transcoder_v1.types.GetJobTemplateRequest): + The request object. Request message for + `TranscoderService.GetJobTemplate`. + name (str): + Required. The name of the job template to retrieve. + Format: + ``projects/{project}/locations/{location}/jobTemplates/{job_template}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.video.transcoder_v1.types.JobTemplate: + Transcoding job template resource. + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a services.GetJobTemplateRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, services.GetJobTemplateRequest): + request = services.GetJobTemplateRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_job_template] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_job_template(self, + request: services.DeleteJobTemplateRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a job template. + + Args: + request (google.cloud.video.transcoder_v1.types.DeleteJobTemplateRequest): + The request object. Request message for + `TranscoderService.DeleteJobTemplate`. + name (str): + Required. The name of the job template to delete. + ``projects/{project}/locations/{location}/jobTemplates/{job_template}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a services.DeleteJobTemplateRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, services.DeleteJobTemplateRequest): + request = services.DeleteJobTemplateRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_job_template] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + + + + +try: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=pkg_resources.get_distribution( + "google-cloud-video-transcoder", + ).version, + ) +except pkg_resources.DistributionNotFound: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() + + +__all__ = ( + "TranscoderServiceClient", +) diff --git a/owl-bot-staging/v1/google/cloud/video/transcoder_v1/services/transcoder_service/pagers.py b/owl-bot-staging/v1/google/cloud/video/transcoder_v1/services/transcoder_service/pagers.py new file mode 100644 index 0000000..0987d76 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/video/transcoder_v1/services/transcoder_service/pagers.py @@ -0,0 +1,263 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Any, AsyncIterable, Awaitable, Callable, Iterable, Sequence, Tuple, Optional + +from google.cloud.video.transcoder_v1.types import resources +from google.cloud.video.transcoder_v1.types import services + + +class ListJobsPager: + """A pager for iterating through ``list_jobs`` requests. + + This class thinly wraps an initial + :class:`google.cloud.video.transcoder_v1.types.ListJobsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``jobs`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListJobs`` requests and continue to iterate + through the ``jobs`` field on the + corresponding responses. + + All the usual :class:`google.cloud.video.transcoder_v1.types.ListJobsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., services.ListJobsResponse], + request: services.ListJobsRequest, + response: services.ListJobsResponse, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.video.transcoder_v1.types.ListJobsRequest): + The initial request object. + response (google.cloud.video.transcoder_v1.types.ListJobsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = services.ListJobsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterable[services.ListJobsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterable[resources.Job]: + for page in self.pages: + yield from page.jobs + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListJobsAsyncPager: + """A pager for iterating through ``list_jobs`` requests. + + This class thinly wraps an initial + :class:`google.cloud.video.transcoder_v1.types.ListJobsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``jobs`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListJobs`` requests and continue to iterate + through the ``jobs`` field on the + corresponding responses. + + All the usual :class:`google.cloud.video.transcoder_v1.types.ListJobsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., Awaitable[services.ListJobsResponse]], + request: services.ListJobsRequest, + response: services.ListJobsResponse, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.video.transcoder_v1.types.ListJobsRequest): + The initial request object. + response (google.cloud.video.transcoder_v1.types.ListJobsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = services.ListJobsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterable[services.ListJobsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterable[resources.Job]: + async def async_generator(): + async for page in self.pages: + for response in page.jobs: + yield response + + return async_generator() + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListJobTemplatesPager: + """A pager for iterating through ``list_job_templates`` requests. + + This class thinly wraps an initial + :class:`google.cloud.video.transcoder_v1.types.ListJobTemplatesResponse` object, and + provides an ``__iter__`` method to iterate through its + ``job_templates`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListJobTemplates`` requests and continue to iterate + through the ``job_templates`` field on the + corresponding responses. + + All the usual :class:`google.cloud.video.transcoder_v1.types.ListJobTemplatesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., services.ListJobTemplatesResponse], + request: services.ListJobTemplatesRequest, + response: services.ListJobTemplatesResponse, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.video.transcoder_v1.types.ListJobTemplatesRequest): + The initial request object. + response (google.cloud.video.transcoder_v1.types.ListJobTemplatesResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = services.ListJobTemplatesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterable[services.ListJobTemplatesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterable[resources.JobTemplate]: + for page in self.pages: + yield from page.job_templates + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListJobTemplatesAsyncPager: + """A pager for iterating through ``list_job_templates`` requests. + + This class thinly wraps an initial + :class:`google.cloud.video.transcoder_v1.types.ListJobTemplatesResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``job_templates`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListJobTemplates`` requests and continue to iterate + through the ``job_templates`` field on the + corresponding responses. + + All the usual :class:`google.cloud.video.transcoder_v1.types.ListJobTemplatesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., Awaitable[services.ListJobTemplatesResponse]], + request: services.ListJobTemplatesRequest, + response: services.ListJobTemplatesResponse, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.video.transcoder_v1.types.ListJobTemplatesRequest): + The initial request object. + response (google.cloud.video.transcoder_v1.types.ListJobTemplatesResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = services.ListJobTemplatesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterable[services.ListJobTemplatesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterable[resources.JobTemplate]: + async def async_generator(): + async for page in self.pages: + for response in page.job_templates: + yield response + + return async_generator() + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) diff --git a/owl-bot-staging/v1/google/cloud/video/transcoder_v1/services/transcoder_service/transports/__init__.py b/owl-bot-staging/v1/google/cloud/video/transcoder_v1/services/transcoder_service/transports/__init__.py new file mode 100644 index 0000000..5ed2b9a --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/video/transcoder_v1/services/transcoder_service/transports/__init__.py @@ -0,0 +1,33 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import TranscoderServiceTransport +from .grpc import TranscoderServiceGrpcTransport +from .grpc_asyncio import TranscoderServiceGrpcAsyncIOTransport + + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[TranscoderServiceTransport]] +_transport_registry['grpc'] = TranscoderServiceGrpcTransport +_transport_registry['grpc_asyncio'] = TranscoderServiceGrpcAsyncIOTransport + +__all__ = ( + 'TranscoderServiceTransport', + 'TranscoderServiceGrpcTransport', + 'TranscoderServiceGrpcAsyncIOTransport', +) diff --git a/owl-bot-staging/v1/google/cloud/video/transcoder_v1/services/transcoder_service/transports/base.py b/owl-bot-staging/v1/google/cloud/video/transcoder_v1/services/transcoder_service/transports/base.py new file mode 100644 index 0000000..68e3d92 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/video/transcoder_v1/services/transcoder_service/transports/base.py @@ -0,0 +1,268 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union +import packaging.version +import pkg_resources + +import google.auth # type: ignore +import google.api_core # type: ignore +from google.api_core import exceptions as core_exceptions # type: ignore +from google.api_core import gapic_v1 # type: ignore +from google.api_core import retry as retries # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.video.transcoder_v1.types import resources +from google.cloud.video.transcoder_v1.types import services +from google.protobuf import empty_pb2 # type: ignore + +try: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=pkg_resources.get_distribution( + 'google-cloud-video-transcoder', + ).version, + ) +except pkg_resources.DistributionNotFound: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() + +try: + # google.auth.__version__ was added in 1.26.0 + _GOOGLE_AUTH_VERSION = google.auth.__version__ +except AttributeError: + try: # try pkg_resources if it is available + _GOOGLE_AUTH_VERSION = pkg_resources.get_distribution("google-auth").version + except pkg_resources.DistributionNotFound: # pragma: NO COVER + _GOOGLE_AUTH_VERSION = None + + +class TranscoderServiceTransport(abc.ABC): + """Abstract transport class for TranscoderService.""" + + AUTH_SCOPES = ( + 'https://www.googleapis.com/auth/cloud-platform', + ) + + DEFAULT_HOST: str = 'transcoder.googleapis.com' + def __init__( + self, *, + host: str = DEFAULT_HOST, + credentials: ga_credentials.Credentials = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ':' not in host: + host += ':443' + self._host = host + + scopes_kwargs = self._get_scopes_kwargs(self._host, scopes) + + # Save the scopes. + self._scopes = scopes + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive") + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, + **scopes_kwargs, + quota_project_id=quota_project_id + ) + + elif credentials is None: + credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) + + # If the credentials is service account credentials, then always try to use self signed JWT. + if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # TODO(busunkim): This method is in the base transport + # to avoid duplicating code across the transport classes. These functions + # should be deleted once the minimum required versions of google-auth is increased. + + # TODO: Remove this function once google-auth >= 1.25.0 is required + @classmethod + def _get_scopes_kwargs(cls, host: str, scopes: Optional[Sequence[str]]) -> Dict[str, Optional[Sequence[str]]]: + """Returns scopes kwargs to pass to google-auth methods depending on the google-auth version""" + + scopes_kwargs = {} + + if _GOOGLE_AUTH_VERSION and ( + packaging.version.parse(_GOOGLE_AUTH_VERSION) + >= packaging.version.parse("1.25.0") + ): + scopes_kwargs = {"scopes": scopes, "default_scopes": cls.AUTH_SCOPES} + else: + scopes_kwargs = {"scopes": scopes or cls.AUTH_SCOPES} + + return scopes_kwargs + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.create_job: gapic_v1.method.wrap_method( + self.create_job, + default_timeout=60.0, + client_info=client_info, + ), + self.list_jobs: gapic_v1.method.wrap_method( + self.list_jobs, + default_timeout=60.0, + client_info=client_info, + ), + self.get_job: gapic_v1.method.wrap_method( + self.get_job, + default_timeout=60.0, + client_info=client_info, + ), + self.delete_job: gapic_v1.method.wrap_method( + self.delete_job, + default_timeout=60.0, + client_info=client_info, + ), + self.create_job_template: gapic_v1.method.wrap_method( + self.create_job_template, + default_timeout=60.0, + client_info=client_info, + ), + self.list_job_templates: gapic_v1.method.wrap_method( + self.list_job_templates, + default_timeout=60.0, + client_info=client_info, + ), + self.get_job_template: gapic_v1.method.wrap_method( + self.get_job_template, + default_timeout=60.0, + client_info=client_info, + ), + self.delete_job_template: gapic_v1.method.wrap_method( + self.delete_job_template, + default_timeout=60.0, + client_info=client_info, + ), + } + + @property + def create_job(self) -> Callable[ + [services.CreateJobRequest], + Union[ + resources.Job, + Awaitable[resources.Job] + ]]: + raise NotImplementedError() + + @property + def list_jobs(self) -> Callable[ + [services.ListJobsRequest], + Union[ + services.ListJobsResponse, + Awaitable[services.ListJobsResponse] + ]]: + raise NotImplementedError() + + @property + def get_job(self) -> Callable[ + [services.GetJobRequest], + Union[ + resources.Job, + Awaitable[resources.Job] + ]]: + raise NotImplementedError() + + @property + def delete_job(self) -> Callable[ + [services.DeleteJobRequest], + Union[ + empty_pb2.Empty, + Awaitable[empty_pb2.Empty] + ]]: + raise NotImplementedError() + + @property + def create_job_template(self) -> Callable[ + [services.CreateJobTemplateRequest], + Union[ + resources.JobTemplate, + Awaitable[resources.JobTemplate] + ]]: + raise NotImplementedError() + + @property + def list_job_templates(self) -> Callable[ + [services.ListJobTemplatesRequest], + Union[ + services.ListJobTemplatesResponse, + Awaitable[services.ListJobTemplatesResponse] + ]]: + raise NotImplementedError() + + @property + def get_job_template(self) -> Callable[ + [services.GetJobTemplateRequest], + Union[ + resources.JobTemplate, + Awaitable[resources.JobTemplate] + ]]: + raise NotImplementedError() + + @property + def delete_job_template(self) -> Callable[ + [services.DeleteJobTemplateRequest], + Union[ + empty_pb2.Empty, + Awaitable[empty_pb2.Empty] + ]]: + raise NotImplementedError() + + +__all__ = ( + 'TranscoderServiceTransport', +) diff --git a/owl-bot-staging/v1/google/cloud/video/transcoder_v1/services/transcoder_service/transports/grpc.py b/owl-bot-staging/v1/google/cloud/video/transcoder_v1/services/transcoder_service/transports/grpc.py new file mode 100644 index 0000000..a40fae1 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/video/transcoder_v1/services/transcoder_service/transports/grpc.py @@ -0,0 +1,442 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import warnings +from typing import Callable, Dict, Optional, Sequence, Tuple, Union + +from google.api_core import grpc_helpers # type: ignore +from google.api_core import gapic_v1 # type: ignore +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore + +import grpc # type: ignore + +from google.cloud.video.transcoder_v1.types import resources +from google.cloud.video.transcoder_v1.types import services +from google.protobuf import empty_pb2 # type: ignore +from .base import TranscoderServiceTransport, DEFAULT_CLIENT_INFO + + +class TranscoderServiceGrpcTransport(TranscoderServiceTransport): + """gRPC backend transport for TranscoderService. + + Using the Transcoder API, you can queue asynchronous jobs for + transcoding media into various output formats. Output formats + may include different streaming standards such as HTTP Live + Streaming (HLS) and Dynamic Adaptive Streaming over HTTP (DASH). + You can also customize jobs using advanced features such as + Digital Rights Management (DRM), audio equalization, content + concatenation, and digital ad-stitch ready content generation. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + _stubs: Dict[str, Callable] + + def __init__(self, *, + host: str = 'transcoder.googleapis.com', + credentials: ga_credentials.Credentials = None, + credentials_file: str = None, + scopes: Sequence[str] = None, + channel: grpc.Channel = None, + api_mtls_endpoint: str = None, + client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, + ssl_channel_credentials: grpc.ChannelCredentials = None, + client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + channel (Optional[grpc.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or applicatin default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + credentials=self._credentials, + credentials_file=credentials_file, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @classmethod + def create_channel(cls, + host: str = 'transcoder.googleapis.com', + credentials: ga_credentials.Credentials = None, + credentials_file: str = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Return the channel designed to connect to this service. + """ + return self._grpc_channel + + @property + def create_job(self) -> Callable[ + [services.CreateJobRequest], + resources.Job]: + r"""Return a callable for the create job method over gRPC. + + Creates a job in the specified region. + + Returns: + Callable[[~.CreateJobRequest], + ~.Job]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_job' not in self._stubs: + self._stubs['create_job'] = self.grpc_channel.unary_unary( + '/google.cloud.video.transcoder.v1.TranscoderService/CreateJob', + request_serializer=services.CreateJobRequest.serialize, + response_deserializer=resources.Job.deserialize, + ) + return self._stubs['create_job'] + + @property + def list_jobs(self) -> Callable[ + [services.ListJobsRequest], + services.ListJobsResponse]: + r"""Return a callable for the list jobs method over gRPC. + + Lists jobs in the specified region. + + Returns: + Callable[[~.ListJobsRequest], + ~.ListJobsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_jobs' not in self._stubs: + self._stubs['list_jobs'] = self.grpc_channel.unary_unary( + '/google.cloud.video.transcoder.v1.TranscoderService/ListJobs', + request_serializer=services.ListJobsRequest.serialize, + response_deserializer=services.ListJobsResponse.deserialize, + ) + return self._stubs['list_jobs'] + + @property + def get_job(self) -> Callable[ + [services.GetJobRequest], + resources.Job]: + r"""Return a callable for the get job method over gRPC. + + Returns the job data. + + Returns: + Callable[[~.GetJobRequest], + ~.Job]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_job' not in self._stubs: + self._stubs['get_job'] = self.grpc_channel.unary_unary( + '/google.cloud.video.transcoder.v1.TranscoderService/GetJob', + request_serializer=services.GetJobRequest.serialize, + response_deserializer=resources.Job.deserialize, + ) + return self._stubs['get_job'] + + @property + def delete_job(self) -> Callable[ + [services.DeleteJobRequest], + empty_pb2.Empty]: + r"""Return a callable for the delete job method over gRPC. + + Deletes a job. + + Returns: + Callable[[~.DeleteJobRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_job' not in self._stubs: + self._stubs['delete_job'] = self.grpc_channel.unary_unary( + '/google.cloud.video.transcoder.v1.TranscoderService/DeleteJob', + request_serializer=services.DeleteJobRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs['delete_job'] + + @property + def create_job_template(self) -> Callable[ + [services.CreateJobTemplateRequest], + resources.JobTemplate]: + r"""Return a callable for the create job template method over gRPC. + + Creates a job template in the specified region. + + Returns: + Callable[[~.CreateJobTemplateRequest], + ~.JobTemplate]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_job_template' not in self._stubs: + self._stubs['create_job_template'] = self.grpc_channel.unary_unary( + '/google.cloud.video.transcoder.v1.TranscoderService/CreateJobTemplate', + request_serializer=services.CreateJobTemplateRequest.serialize, + response_deserializer=resources.JobTemplate.deserialize, + ) + return self._stubs['create_job_template'] + + @property + def list_job_templates(self) -> Callable[ + [services.ListJobTemplatesRequest], + services.ListJobTemplatesResponse]: + r"""Return a callable for the list job templates method over gRPC. + + Lists job templates in the specified region. + + Returns: + Callable[[~.ListJobTemplatesRequest], + ~.ListJobTemplatesResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_job_templates' not in self._stubs: + self._stubs['list_job_templates'] = self.grpc_channel.unary_unary( + '/google.cloud.video.transcoder.v1.TranscoderService/ListJobTemplates', + request_serializer=services.ListJobTemplatesRequest.serialize, + response_deserializer=services.ListJobTemplatesResponse.deserialize, + ) + return self._stubs['list_job_templates'] + + @property + def get_job_template(self) -> Callable[ + [services.GetJobTemplateRequest], + resources.JobTemplate]: + r"""Return a callable for the get job template method over gRPC. + + Returns the job template data. + + Returns: + Callable[[~.GetJobTemplateRequest], + ~.JobTemplate]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_job_template' not in self._stubs: + self._stubs['get_job_template'] = self.grpc_channel.unary_unary( + '/google.cloud.video.transcoder.v1.TranscoderService/GetJobTemplate', + request_serializer=services.GetJobTemplateRequest.serialize, + response_deserializer=resources.JobTemplate.deserialize, + ) + return self._stubs['get_job_template'] + + @property + def delete_job_template(self) -> Callable[ + [services.DeleteJobTemplateRequest], + empty_pb2.Empty]: + r"""Return a callable for the delete job template method over gRPC. + + Deletes a job template. + + Returns: + Callable[[~.DeleteJobTemplateRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_job_template' not in self._stubs: + self._stubs['delete_job_template'] = self.grpc_channel.unary_unary( + '/google.cloud.video.transcoder.v1.TranscoderService/DeleteJobTemplate', + request_serializer=services.DeleteJobTemplateRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs['delete_job_template'] + + +__all__ = ( + 'TranscoderServiceGrpcTransport', +) diff --git a/owl-bot-staging/v1/google/cloud/video/transcoder_v1/services/transcoder_service/transports/grpc_asyncio.py b/owl-bot-staging/v1/google/cloud/video/transcoder_v1/services/transcoder_service/transports/grpc_asyncio.py new file mode 100644 index 0000000..7eef79b --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/video/transcoder_v1/services/transcoder_service/transports/grpc_asyncio.py @@ -0,0 +1,446 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import warnings +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union + +from google.api_core import gapic_v1 # type: ignore +from google.api_core import grpc_helpers_async # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +import packaging.version + +import grpc # type: ignore +from grpc.experimental import aio # type: ignore + +from google.cloud.video.transcoder_v1.types import resources +from google.cloud.video.transcoder_v1.types import services +from google.protobuf import empty_pb2 # type: ignore +from .base import TranscoderServiceTransport, DEFAULT_CLIENT_INFO +from .grpc import TranscoderServiceGrpcTransport + + +class TranscoderServiceGrpcAsyncIOTransport(TranscoderServiceTransport): + """gRPC AsyncIO backend transport for TranscoderService. + + Using the Transcoder API, you can queue asynchronous jobs for + transcoding media into various output formats. Output formats + may include different streaming standards such as HTTP Live + Streaming (HLS) and Dynamic Adaptive Streaming over HTTP (DASH). + You can also customize jobs using advanced features such as + Digital Rights Management (DRM), audio equalization, content + concatenation, and digital ad-stitch ready content generation. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel(cls, + host: str = 'transcoder.googleapis.com', + credentials: ga_credentials.Credentials = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs + ) + + def __init__(self, *, + host: str = 'transcoder.googleapis.com', + credentials: ga_credentials.Credentials = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: aio.Channel = None, + api_mtls_endpoint: str = None, + client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, + ssl_channel_credentials: grpc.ChannelCredentials = None, + client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, + quota_project_id=None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[aio.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or applicatin default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + credentials=self._credentials, + credentials_file=credentials_file, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def create_job(self) -> Callable[ + [services.CreateJobRequest], + Awaitable[resources.Job]]: + r"""Return a callable for the create job method over gRPC. + + Creates a job in the specified region. + + Returns: + Callable[[~.CreateJobRequest], + Awaitable[~.Job]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_job' not in self._stubs: + self._stubs['create_job'] = self.grpc_channel.unary_unary( + '/google.cloud.video.transcoder.v1.TranscoderService/CreateJob', + request_serializer=services.CreateJobRequest.serialize, + response_deserializer=resources.Job.deserialize, + ) + return self._stubs['create_job'] + + @property + def list_jobs(self) -> Callable[ + [services.ListJobsRequest], + Awaitable[services.ListJobsResponse]]: + r"""Return a callable for the list jobs method over gRPC. + + Lists jobs in the specified region. + + Returns: + Callable[[~.ListJobsRequest], + Awaitable[~.ListJobsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_jobs' not in self._stubs: + self._stubs['list_jobs'] = self.grpc_channel.unary_unary( + '/google.cloud.video.transcoder.v1.TranscoderService/ListJobs', + request_serializer=services.ListJobsRequest.serialize, + response_deserializer=services.ListJobsResponse.deserialize, + ) + return self._stubs['list_jobs'] + + @property + def get_job(self) -> Callable[ + [services.GetJobRequest], + Awaitable[resources.Job]]: + r"""Return a callable for the get job method over gRPC. + + Returns the job data. + + Returns: + Callable[[~.GetJobRequest], + Awaitable[~.Job]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_job' not in self._stubs: + self._stubs['get_job'] = self.grpc_channel.unary_unary( + '/google.cloud.video.transcoder.v1.TranscoderService/GetJob', + request_serializer=services.GetJobRequest.serialize, + response_deserializer=resources.Job.deserialize, + ) + return self._stubs['get_job'] + + @property + def delete_job(self) -> Callable[ + [services.DeleteJobRequest], + Awaitable[empty_pb2.Empty]]: + r"""Return a callable for the delete job method over gRPC. + + Deletes a job. + + Returns: + Callable[[~.DeleteJobRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_job' not in self._stubs: + self._stubs['delete_job'] = self.grpc_channel.unary_unary( + '/google.cloud.video.transcoder.v1.TranscoderService/DeleteJob', + request_serializer=services.DeleteJobRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs['delete_job'] + + @property + def create_job_template(self) -> Callable[ + [services.CreateJobTemplateRequest], + Awaitable[resources.JobTemplate]]: + r"""Return a callable for the create job template method over gRPC. + + Creates a job template in the specified region. + + Returns: + Callable[[~.CreateJobTemplateRequest], + Awaitable[~.JobTemplate]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_job_template' not in self._stubs: + self._stubs['create_job_template'] = self.grpc_channel.unary_unary( + '/google.cloud.video.transcoder.v1.TranscoderService/CreateJobTemplate', + request_serializer=services.CreateJobTemplateRequest.serialize, + response_deserializer=resources.JobTemplate.deserialize, + ) + return self._stubs['create_job_template'] + + @property + def list_job_templates(self) -> Callable[ + [services.ListJobTemplatesRequest], + Awaitable[services.ListJobTemplatesResponse]]: + r"""Return a callable for the list job templates method over gRPC. + + Lists job templates in the specified region. + + Returns: + Callable[[~.ListJobTemplatesRequest], + Awaitable[~.ListJobTemplatesResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_job_templates' not in self._stubs: + self._stubs['list_job_templates'] = self.grpc_channel.unary_unary( + '/google.cloud.video.transcoder.v1.TranscoderService/ListJobTemplates', + request_serializer=services.ListJobTemplatesRequest.serialize, + response_deserializer=services.ListJobTemplatesResponse.deserialize, + ) + return self._stubs['list_job_templates'] + + @property + def get_job_template(self) -> Callable[ + [services.GetJobTemplateRequest], + Awaitable[resources.JobTemplate]]: + r"""Return a callable for the get job template method over gRPC. + + Returns the job template data. + + Returns: + Callable[[~.GetJobTemplateRequest], + Awaitable[~.JobTemplate]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_job_template' not in self._stubs: + self._stubs['get_job_template'] = self.grpc_channel.unary_unary( + '/google.cloud.video.transcoder.v1.TranscoderService/GetJobTemplate', + request_serializer=services.GetJobTemplateRequest.serialize, + response_deserializer=resources.JobTemplate.deserialize, + ) + return self._stubs['get_job_template'] + + @property + def delete_job_template(self) -> Callable[ + [services.DeleteJobTemplateRequest], + Awaitable[empty_pb2.Empty]]: + r"""Return a callable for the delete job template method over gRPC. + + Deletes a job template. + + Returns: + Callable[[~.DeleteJobTemplateRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_job_template' not in self._stubs: + self._stubs['delete_job_template'] = self.grpc_channel.unary_unary( + '/google.cloud.video.transcoder.v1.TranscoderService/DeleteJobTemplate', + request_serializer=services.DeleteJobTemplateRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs['delete_job_template'] + + +__all__ = ( + 'TranscoderServiceGrpcAsyncIOTransport', +) diff --git a/owl-bot-staging/v1/google/cloud/video/transcoder_v1/types/__init__.py b/owl-bot-staging/v1/google/cloud/video/transcoder_v1/types/__init__.py new file mode 100644 index 0000000..51231fa --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/video/transcoder_v1/types/__init__.py @@ -0,0 +1,80 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .resources import ( + AdBreak, + AudioStream, + EditAtom, + ElementaryStream, + Encryption, + Input, + Job, + JobConfig, + JobTemplate, + Manifest, + MuxStream, + Output, + Overlay, + PreprocessingConfig, + PubsubDestination, + SegmentSettings, + SpriteSheet, + TextStream, + VideoStream, +) +from .services import ( + CreateJobRequest, + CreateJobTemplateRequest, + DeleteJobRequest, + DeleteJobTemplateRequest, + GetJobRequest, + GetJobTemplateRequest, + ListJobsRequest, + ListJobsResponse, + ListJobTemplatesRequest, + ListJobTemplatesResponse, +) + +__all__ = ( + 'AdBreak', + 'AudioStream', + 'EditAtom', + 'ElementaryStream', + 'Encryption', + 'Input', + 'Job', + 'JobConfig', + 'JobTemplate', + 'Manifest', + 'MuxStream', + 'Output', + 'Overlay', + 'PreprocessingConfig', + 'PubsubDestination', + 'SegmentSettings', + 'SpriteSheet', + 'TextStream', + 'VideoStream', + 'CreateJobRequest', + 'CreateJobTemplateRequest', + 'DeleteJobRequest', + 'DeleteJobTemplateRequest', + 'GetJobRequest', + 'GetJobTemplateRequest', + 'ListJobsRequest', + 'ListJobsResponse', + 'ListJobTemplatesRequest', + 'ListJobTemplatesResponse', +) diff --git a/owl-bot-staging/v1/google/cloud/video/transcoder_v1/types/resources.py b/owl-bot-staging/v1/google/cloud/video/transcoder_v1/types/resources.py new file mode 100644 index 0000000..a3130a6 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/video/transcoder_v1/types/resources.py @@ -0,0 +1,1942 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import proto # type: ignore + +from google.protobuf import duration_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +from google.rpc import status_pb2 # type: ignore + + +__protobuf__ = proto.module( + package='google.cloud.video.transcoder.v1', + manifest={ + 'Job', + 'JobTemplate', + 'JobConfig', + 'Input', + 'Output', + 'EditAtom', + 'AdBreak', + 'ElementaryStream', + 'MuxStream', + 'Manifest', + 'PubsubDestination', + 'SpriteSheet', + 'Overlay', + 'PreprocessingConfig', + 'VideoStream', + 'AudioStream', + 'TextStream', + 'SegmentSettings', + 'Encryption', + }, +) + + +class Job(proto.Message): + r"""Transcoding job resource. + Attributes: + name (str): + The resource name of the job. Format: + ``projects/{project}/locations/{location}/jobs/{job}`` + input_uri (str): + Input only. Specify the ``input_uri`` to populate empty + ``uri`` fields in each element of ``Job.config.inputs`` or + ``JobTemplate.config.inputs`` when using template. URI of + the media. Input files must be at least 5 seconds in + duration and stored in Cloud Storage (for example, + ``gs://bucket/inputs/file.mp4``). + output_uri (str): + Input only. Specify the ``output_uri`` to populate an empty + ``Job.config.output.uri`` or + ``JobTemplate.config.output.uri`` when using template. URI + for the output file(s). For example, + ``gs://my-bucket/outputs/``. + template_id (str): + Input only. Specify the ``template_id`` to use for + populating ``Job.config``. The default is ``preset/web-hd``. + + Preset Transcoder templates: + + - ``preset/{preset_id}`` + + - User defined JobTemplate: ``{job_template_id}`` + config (google.cloud.video.transcoder_v1.types.JobConfig): + The configuration for this job. + state (google.cloud.video.transcoder_v1.types.Job.ProcessingState): + Output only. The current state of the job. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time the job was created. + start_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time the transcoding + started. + end_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time the transcoding + finished. + ttl_after_completion_days (int): + Job time to live value in days, which will be + effective after job completion. Job should be + deleted automatically after the given TTL. Enter + a value between 1 and 90. The default is 30. + error (google.rpc.status_pb2.Status): + Output only. An error object that describes the reason for + the failure. This property is always present when ``state`` + is ``FAILED``. + """ + class ProcessingState(proto.Enum): + r"""The current state of the job.""" + PROCESSING_STATE_UNSPECIFIED = 0 + PENDING = 1 + RUNNING = 2 + SUCCEEDED = 3 + FAILED = 4 + + name = proto.Field( + proto.STRING, + number=1, + ) + input_uri = proto.Field( + proto.STRING, + number=2, + ) + output_uri = proto.Field( + proto.STRING, + number=3, + ) + template_id = proto.Field( + proto.STRING, + number=4, + oneof='job_config', + ) + config = proto.Field( + proto.MESSAGE, + number=5, + oneof='job_config', + message='JobConfig', + ) + state = proto.Field( + proto.ENUM, + number=8, + enum=ProcessingState, + ) + create_time = proto.Field( + proto.MESSAGE, + number=12, + message=timestamp_pb2.Timestamp, + ) + start_time = proto.Field( + proto.MESSAGE, + number=13, + message=timestamp_pb2.Timestamp, + ) + end_time = proto.Field( + proto.MESSAGE, + number=14, + message=timestamp_pb2.Timestamp, + ) + ttl_after_completion_days = proto.Field( + proto.INT32, + number=15, + ) + error = proto.Field( + proto.MESSAGE, + number=17, + message=status_pb2.Status, + ) + + +class JobTemplate(proto.Message): + r"""Transcoding job template resource. + Attributes: + name (str): + The resource name of the job template. Format: + ``projects/{project}/locations/{location}/jobTemplates/{job_template}`` + config (google.cloud.video.transcoder_v1.types.JobConfig): + The configuration for this template. + """ + + name = proto.Field( + proto.STRING, + number=1, + ) + config = proto.Field( + proto.MESSAGE, + number=2, + message='JobConfig', + ) + + +class JobConfig(proto.Message): + r"""Job configuration + Attributes: + inputs (Sequence[google.cloud.video.transcoder_v1.types.Input]): + List of input assets stored in Cloud Storage. + edit_list (Sequence[google.cloud.video.transcoder_v1.types.EditAtom]): + List of ``Edit atom``\ s. Defines the ultimate timeline of + the resulting file or manifest. + elementary_streams (Sequence[google.cloud.video.transcoder_v1.types.ElementaryStream]): + List of elementary streams. + mux_streams (Sequence[google.cloud.video.transcoder_v1.types.MuxStream]): + List of multiplexing settings for output + streams. + manifests (Sequence[google.cloud.video.transcoder_v1.types.Manifest]): + List of output manifests. + output (google.cloud.video.transcoder_v1.types.Output): + Output configuration. + ad_breaks (Sequence[google.cloud.video.transcoder_v1.types.AdBreak]): + List of ad breaks. Specifies where to insert + ad break tags in the output manifests. + pubsub_destination (google.cloud.video.transcoder_v1.types.PubsubDestination): + Destination on Pub/Sub. + sprite_sheets (Sequence[google.cloud.video.transcoder_v1.types.SpriteSheet]): + List of output sprite sheets. + overlays (Sequence[google.cloud.video.transcoder_v1.types.Overlay]): + List of overlays on the output video, in + descending Z-order. + """ + + inputs = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='Input', + ) + edit_list = proto.RepeatedField( + proto.MESSAGE, + number=2, + message='EditAtom', + ) + elementary_streams = proto.RepeatedField( + proto.MESSAGE, + number=3, + message='ElementaryStream', + ) + mux_streams = proto.RepeatedField( + proto.MESSAGE, + number=4, + message='MuxStream', + ) + manifests = proto.RepeatedField( + proto.MESSAGE, + number=5, + message='Manifest', + ) + output = proto.Field( + proto.MESSAGE, + number=6, + message='Output', + ) + ad_breaks = proto.RepeatedField( + proto.MESSAGE, + number=7, + message='AdBreak', + ) + pubsub_destination = proto.Field( + proto.MESSAGE, + number=8, + message='PubsubDestination', + ) + sprite_sheets = proto.RepeatedField( + proto.MESSAGE, + number=9, + message='SpriteSheet', + ) + overlays = proto.RepeatedField( + proto.MESSAGE, + number=10, + message='Overlay', + ) + + +class Input(proto.Message): + r"""Input asset. + Attributes: + key (str): + A unique key for this input. Must be + specified when using advanced mapping and edit + lists. + uri (str): + URI of the media. Input files must be at least 5 seconds in + duration and stored in Cloud Storage (for example, + ``gs://bucket/inputs/file.mp4``). If empty, the value will + be populated from ``Job.input_uri``. + preprocessing_config (google.cloud.video.transcoder_v1.types.PreprocessingConfig): + Preprocessing configurations. + """ + + key = proto.Field( + proto.STRING, + number=1, + ) + uri = proto.Field( + proto.STRING, + number=2, + ) + preprocessing_config = proto.Field( + proto.MESSAGE, + number=3, + message='PreprocessingConfig', + ) + + +class Output(proto.Message): + r"""Location of output file(s) in a Cloud Storage bucket. + Attributes: + uri (str): + URI for the output file(s). For example, + ``gs://my-bucket/outputs/``. If empty the value is populated + from ``Job.output_uri``. + """ + + uri = proto.Field( + proto.STRING, + number=1, + ) + + +class EditAtom(proto.Message): + r"""Edit atom. + Attributes: + key (str): + A unique key for this atom. Must be specified + when using advanced mapping. + inputs (Sequence[str]): + List of ``Input.key``\ s identifying files that should be + used in this atom. The listed ``inputs`` must have the same + timeline. + end_time_offset (google.protobuf.duration_pb2.Duration): + End time in seconds for the atom, relative to the input file + timeline. When ``end_time_offset`` is not specified, the + ``inputs`` are used until the end of the atom. + start_time_offset (google.protobuf.duration_pb2.Duration): + Start time in seconds for the atom, relative to the input + file timeline. The default is ``0s``. + """ + + key = proto.Field( + proto.STRING, + number=1, + ) + inputs = proto.RepeatedField( + proto.STRING, + number=2, + ) + end_time_offset = proto.Field( + proto.MESSAGE, + number=3, + message=duration_pb2.Duration, + ) + start_time_offset = proto.Field( + proto.MESSAGE, + number=4, + message=duration_pb2.Duration, + ) + + +class AdBreak(proto.Message): + r"""Ad break. + Attributes: + start_time_offset (google.protobuf.duration_pb2.Duration): + Start time in seconds for the ad break, relative to the + output file timeline. The default is ``0s``. + """ + + start_time_offset = proto.Field( + proto.MESSAGE, + number=1, + message=duration_pb2.Duration, + ) + + +class ElementaryStream(proto.Message): + r"""Encoding of an input file such as an audio, video, or text + track. Elementary streams must be packaged before + mapping and sharing between different output formats. + + Attributes: + key (str): + A unique key for this elementary stream. + video_stream (google.cloud.video.transcoder_v1.types.VideoStream): + Encoding of a video stream. + audio_stream (google.cloud.video.transcoder_v1.types.AudioStream): + Encoding of an audio stream. + text_stream (google.cloud.video.transcoder_v1.types.TextStream): + Encoding of a text stream. For example, + closed captions or subtitles. + """ + + key = proto.Field( + proto.STRING, + number=4, + ) + video_stream = proto.Field( + proto.MESSAGE, + number=1, + oneof='elementary_stream', + message='VideoStream', + ) + audio_stream = proto.Field( + proto.MESSAGE, + number=2, + oneof='elementary_stream', + message='AudioStream', + ) + text_stream = proto.Field( + proto.MESSAGE, + number=3, + oneof='elementary_stream', + message='TextStream', + ) + + +class MuxStream(proto.Message): + r"""Multiplexing settings for output stream. + Attributes: + key (str): + A unique key for this multiplexed stream. HLS media + manifests will be named ``MuxStream.key`` with the + ``".m3u8"`` extension suffix. + file_name (str): + The name of the generated file. The default is + ``MuxStream.key`` with the extension suffix corresponding to + the ``MuxStream.container``. + + Individual segments also have an incremental 10-digit + zero-padded suffix starting from 0 before the extension, + such as ``"mux_stream0000000123.ts"``. + container (str): + The container format. The default is ``"mp4"`` + + Supported container formats: + + - 'ts' + - 'fmp4'- the corresponding file extension is ``".m4s"`` + - 'mp4' + - 'vtt' + elementary_streams (Sequence[str]): + List of ``ElementaryStream.key``\ s multiplexed in this + stream. + segment_settings (google.cloud.video.transcoder_v1.types.SegmentSettings): + Segment settings for ``"ts"``, ``"fmp4"`` and ``"vtt"``. + encryption (google.cloud.video.transcoder_v1.types.Encryption): + Encryption settings. + """ + + key = proto.Field( + proto.STRING, + number=1, + ) + file_name = proto.Field( + proto.STRING, + number=2, + ) + container = proto.Field( + proto.STRING, + number=3, + ) + elementary_streams = proto.RepeatedField( + proto.STRING, + number=4, + ) + segment_settings = proto.Field( + proto.MESSAGE, + number=5, + message='SegmentSettings', + ) + encryption = proto.Field( + proto.MESSAGE, + number=6, + message='Encryption', + ) + + +class Manifest(proto.Message): + r"""Manifest configuration. + Attributes: + file_name (str): + The name of the generated file. The default is + ``"manifest"`` with the extension suffix corresponding to + the ``Manifest.type``. + type_ (google.cloud.video.transcoder_v1.types.Manifest.ManifestType): + Required. Type of the manifest, can be "HLS" + or "DASH". + mux_streams (Sequence[str]): + Required. List of user given ``MuxStream.key``\ s that + should appear in this manifest. + + When ``Manifest.type`` is ``HLS``, a media manifest with + name ``MuxStream.key`` and ``.m3u8`` extension is generated + for each element of the ``Manifest.mux_streams``. + """ + class ManifestType(proto.Enum): + r"""The manifest type can be either ``"HLS"`` or ``"DASH"``.""" + MANIFEST_TYPE_UNSPECIFIED = 0 + HLS = 1 + DASH = 2 + + file_name = proto.Field( + proto.STRING, + number=1, + ) + type_ = proto.Field( + proto.ENUM, + number=2, + enum=ManifestType, + ) + mux_streams = proto.RepeatedField( + proto.STRING, + number=3, + ) + + +class PubsubDestination(proto.Message): + r"""A Pub/Sub destination. + Attributes: + topic (str): + The name of the Pub/Sub topic to publish job completion + notification to. For example: + ``projects/{project}/topics/{topic}``. + """ + + topic = proto.Field( + proto.STRING, + number=1, + ) + + +class SpriteSheet(proto.Message): + r"""Sprite sheet configuration. + Attributes: + format_ (str): + Format type. The default is ``"jpeg"``. + + Supported formats: + + - 'jpeg' + file_prefix (str): + Required. File name prefix for the generated sprite sheets. + + Each sprite sheet has an incremental 10-digit zero-padded + suffix starting from 0 before the extension, such as + ``"sprite_sheet0000000123.jpeg"``. + sprite_width_pixels (int): + Required. The width of sprite in pixels. Must be an even + integer. To preserve the source aspect ratio, set the + [SpriteSheet.sprite_width_pixels][google.cloud.video.transcoder.v1.SpriteSheet.sprite_width_pixels] + field or the + [SpriteSheet.sprite_height_pixels][google.cloud.video.transcoder.v1.SpriteSheet.sprite_height_pixels] + field, but not both (the API will automatically calculate + the missing field). + sprite_height_pixels (int): + Required. The height of sprite in pixels. Must be an even + integer. To preserve the source aspect ratio, set the + [SpriteSheet.sprite_height_pixels][google.cloud.video.transcoder.v1.SpriteSheet.sprite_height_pixels] + field or the + [SpriteSheet.sprite_width_pixels][google.cloud.video.transcoder.v1.SpriteSheet.sprite_width_pixels] + field, but not both (the API will automatically calculate + the missing field). + column_count (int): + The maximum number of sprites per row in a + sprite sheet. The default is 0, which indicates + no maximum limit. + row_count (int): + The maximum number of rows per sprite sheet. + When the sprite sheet is full, a new sprite + sheet is created. The default is 0, which + indicates no maximum limit. + start_time_offset (google.protobuf.duration_pb2.Duration): + Start time in seconds, relative to the output file timeline. + Determines the first sprite to pick. The default is ``0s``. + end_time_offset (google.protobuf.duration_pb2.Duration): + End time in seconds, relative to the output file timeline. + When ``end_time_offset`` is not specified, the sprites are + generated until the end of the output file. + total_count (int): + Total number of sprites. Create the specified + number of sprites distributed evenly across the + timeline of the output media. The default is + 100. + interval (google.protobuf.duration_pb2.Duration): + Starting from ``0s``, create sprites at regular intervals. + Specify the interval value in seconds. + quality (int): + The quality of the generated sprite sheet. + Enter a value between 1 and 100, where 1 is the + lowest quality and 100 is the highest quality. + The default is 100. A high quality value + corresponds to a low image data compression + ratio. + """ + + format_ = proto.Field( + proto.STRING, + number=1, + ) + file_prefix = proto.Field( + proto.STRING, + number=2, + ) + sprite_width_pixels = proto.Field( + proto.INT32, + number=3, + ) + sprite_height_pixels = proto.Field( + proto.INT32, + number=4, + ) + column_count = proto.Field( + proto.INT32, + number=5, + ) + row_count = proto.Field( + proto.INT32, + number=6, + ) + start_time_offset = proto.Field( + proto.MESSAGE, + number=7, + message=duration_pb2.Duration, + ) + end_time_offset = proto.Field( + proto.MESSAGE, + number=8, + message=duration_pb2.Duration, + ) + total_count = proto.Field( + proto.INT32, + number=9, + oneof='extraction_strategy', + ) + interval = proto.Field( + proto.MESSAGE, + number=10, + oneof='extraction_strategy', + message=duration_pb2.Duration, + ) + quality = proto.Field( + proto.INT32, + number=11, + ) + + +class Overlay(proto.Message): + r"""Overlay configuration. + Attributes: + image (google.cloud.video.transcoder_v1.types.Overlay.Image): + Image overlay. + animations (Sequence[google.cloud.video.transcoder_v1.types.Overlay.Animation]): + List of Animations. The list should be + chronological, without any time overlap. + """ + class FadeType(proto.Enum): + r"""Fade type for the overlay: ``FADE_IN`` or ``FADE_OUT``.""" + FADE_TYPE_UNSPECIFIED = 0 + FADE_IN = 1 + FADE_OUT = 2 + + class NormalizedCoordinate(proto.Message): + r"""2D normalized coordinates. Default: ``{0.0, 0.0}`` + Attributes: + x (float): + Normalized x coordinate. + y (float): + Normalized y coordinate. + """ + + x = proto.Field( + proto.DOUBLE, + number=1, + ) + y = proto.Field( + proto.DOUBLE, + number=2, + ) + + class Image(proto.Message): + r"""Overlaid jpeg image. + Attributes: + uri (str): + Required. URI of the JPEG image in Cloud Storage. For + example, ``gs://bucket/inputs/image.jpeg``. JPEG is the only + supported image type. + resolution (google.cloud.video.transcoder_v1.types.Overlay.NormalizedCoordinate): + Normalized image resolution, based on output video + resolution. Valid values: ``0.0``–``1.0``. To respect the + original image aspect ratio, set either ``x`` or ``y`` to + ``0.0``. To use the original image resolution, set both + ``x`` and ``y`` to ``0.0``. + alpha (float): + Target image opacity. Valid values are from ``1.0`` (solid, + default) to ``0.0`` (transparent), exclusive. Set this to a + value greater than ``0.0``. + """ + + uri = proto.Field( + proto.STRING, + number=1, + ) + resolution = proto.Field( + proto.MESSAGE, + number=2, + message='Overlay.NormalizedCoordinate', + ) + alpha = proto.Field( + proto.DOUBLE, + number=3, + ) + + class AnimationStatic(proto.Message): + r"""Display static overlay object. + Attributes: + xy (google.cloud.video.transcoder_v1.types.Overlay.NormalizedCoordinate): + Normalized coordinates based on output video resolution. + Valid values: ``0.0``–``1.0``. ``xy`` is the upper-left + coordinate of the overlay object. For example, use the x and + y coordinates {0,0} to position the top-left corner of the + overlay animation in the top-left corner of the output + video. + start_time_offset (google.protobuf.duration_pb2.Duration): + The time to start displaying the overlay + object, in seconds. Default: 0 + """ + + xy = proto.Field( + proto.MESSAGE, + number=1, + message='Overlay.NormalizedCoordinate', + ) + start_time_offset = proto.Field( + proto.MESSAGE, + number=2, + message=duration_pb2.Duration, + ) + + class AnimationFade(proto.Message): + r"""Display overlay object with fade animation. + Attributes: + fade_type (google.cloud.video.transcoder_v1.types.Overlay.FadeType): + Required. Type of fade animation: ``FADE_IN`` or + ``FADE_OUT``. + xy (google.cloud.video.transcoder_v1.types.Overlay.NormalizedCoordinate): + Normalized coordinates based on output video resolution. + Valid values: ``0.0``–``1.0``. ``xy`` is the upper-left + coordinate of the overlay object. For example, use the x and + y coordinates {0,0} to position the top-left corner of the + overlay animation in the top-left corner of the output + video. + start_time_offset (google.protobuf.duration_pb2.Duration): + The time to start the fade animation, in + seconds. Default: 0 + end_time_offset (google.protobuf.duration_pb2.Duration): + The time to end the fade animation, in seconds. Default: + ``start_time_offset`` + 1s + """ + + fade_type = proto.Field( + proto.ENUM, + number=1, + enum='Overlay.FadeType', + ) + xy = proto.Field( + proto.MESSAGE, + number=2, + message='Overlay.NormalizedCoordinate', + ) + start_time_offset = proto.Field( + proto.MESSAGE, + number=3, + message=duration_pb2.Duration, + ) + end_time_offset = proto.Field( + proto.MESSAGE, + number=4, + message=duration_pb2.Duration, + ) + + class AnimationEnd(proto.Message): + r"""End previous overlay animation from the video. Without + AnimationEnd, the overlay object will keep the state of previous + animation until the end of the video. + + Attributes: + start_time_offset (google.protobuf.duration_pb2.Duration): + The time to end overlay object, in seconds. + Default: 0 + """ + + start_time_offset = proto.Field( + proto.MESSAGE, + number=1, + message=duration_pb2.Duration, + ) + + class Animation(proto.Message): + r"""Animation types. + Attributes: + animation_static (google.cloud.video.transcoder_v1.types.Overlay.AnimationStatic): + Display static overlay object. + animation_fade (google.cloud.video.transcoder_v1.types.Overlay.AnimationFade): + Display overlay object with fade animation. + animation_end (google.cloud.video.transcoder_v1.types.Overlay.AnimationEnd): + End previous animation. + """ + + animation_static = proto.Field( + proto.MESSAGE, + number=1, + oneof='animation_type', + message='Overlay.AnimationStatic', + ) + animation_fade = proto.Field( + proto.MESSAGE, + number=2, + oneof='animation_type', + message='Overlay.AnimationFade', + ) + animation_end = proto.Field( + proto.MESSAGE, + number=3, + oneof='animation_type', + message='Overlay.AnimationEnd', + ) + + image = proto.Field( + proto.MESSAGE, + number=1, + message=Image, + ) + animations = proto.RepeatedField( + proto.MESSAGE, + number=2, + message=Animation, + ) + + +class PreprocessingConfig(proto.Message): + r"""Preprocessing configurations. + Attributes: + color (google.cloud.video.transcoder_v1.types.PreprocessingConfig.Color): + Color preprocessing configuration. + denoise (google.cloud.video.transcoder_v1.types.PreprocessingConfig.Denoise): + Denoise preprocessing configuration. + deblock (google.cloud.video.transcoder_v1.types.PreprocessingConfig.Deblock): + Deblock preprocessing configuration. + audio (google.cloud.video.transcoder_v1.types.PreprocessingConfig.Audio): + Audio preprocessing configuration. + crop (google.cloud.video.transcoder_v1.types.PreprocessingConfig.Crop): + Specify the video cropping configuration. + pad (google.cloud.video.transcoder_v1.types.PreprocessingConfig.Pad): + Specify the video pad filter configuration. + """ + + class Color(proto.Message): + r"""Color preprocessing configuration. + Attributes: + saturation (float): + Control color saturation of the video. Enter + a value between -1 and 1, where -1 is fully + desaturated and 1 is maximum saturation. 0 is no + change. The default is 0. + contrast (float): + Control black and white contrast of the + video. Enter a value between -1 and 1, where -1 + is minimum contrast and 1 is maximum contrast. 0 + is no change. The default is 0. + brightness (float): + Control brightness of the video. Enter a + value between -1 and 1, where -1 is minimum + brightness and 1 is maximum brightness. 0 is no + change. The default is 0. + """ + + saturation = proto.Field( + proto.DOUBLE, + number=1, + ) + contrast = proto.Field( + proto.DOUBLE, + number=2, + ) + brightness = proto.Field( + proto.DOUBLE, + number=3, + ) + + class Denoise(proto.Message): + r"""Denoise preprocessing configuration. + Attributes: + strength (float): + Set strength of the denoise. Enter a value + between 0 and 1. The higher the value, the + smoother the image. 0 is no denoising. The + default is 0. + tune (str): + Set the denoiser mode. The default is ``"standard"``. + + Supported denoiser modes: + + - 'standard' + - 'grain' + """ + + strength = proto.Field( + proto.DOUBLE, + number=1, + ) + tune = proto.Field( + proto.STRING, + number=2, + ) + + class Deblock(proto.Message): + r"""Deblock preprocessing configuration. + Attributes: + strength (float): + Set strength of the deblocker. Enter a value + between 0 and 1. The higher the value, the + stronger the block removal. 0 is no deblocking. + The default is 0. + enabled (bool): + Enable deblocker. The default is ``false``. + """ + + strength = proto.Field( + proto.DOUBLE, + number=1, + ) + enabled = proto.Field( + proto.BOOL, + number=2, + ) + + class Audio(proto.Message): + r"""Audio preprocessing configuration. + Attributes: + lufs (float): + Specify audio loudness normalization in loudness units + relative to full scale (LUFS). Enter a value between -24 and + 0 (the default), where: + + - -24 is the Advanced Television Systems Committee (ATSC + A/85) standard + - -23 is the EU R128 broadcast standard + - -19 is the prior standard for online mono audio + - -18 is the ReplayGain standard + - -16 is the prior standard for stereo audio + - -14 is the new online audio standard recommended by + Spotify, as well as Amazon Echo + - 0 disables normalization + high_boost (bool): + Enable boosting high frequency components. The default is + ``false``. + low_boost (bool): + Enable boosting low frequency components. The default is + ``false``. + """ + + lufs = proto.Field( + proto.DOUBLE, + number=1, + ) + high_boost = proto.Field( + proto.BOOL, + number=2, + ) + low_boost = proto.Field( + proto.BOOL, + number=3, + ) + + class Crop(proto.Message): + r"""Video cropping configuration for the input video. The cropped + input video is scaled to match the output resolution. + + Attributes: + top_pixels (int): + The number of pixels to crop from the top. + The default is 0. + bottom_pixels (int): + The number of pixels to crop from the bottom. + The default is 0. + left_pixels (int): + The number of pixels to crop from the left. + The default is 0. + right_pixels (int): + The number of pixels to crop from the right. + The default is 0. + """ + + top_pixels = proto.Field( + proto.INT32, + number=1, + ) + bottom_pixels = proto.Field( + proto.INT32, + number=2, + ) + left_pixels = proto.Field( + proto.INT32, + number=3, + ) + right_pixels = proto.Field( + proto.INT32, + number=4, + ) + + class Pad(proto.Message): + r"""Pad filter configuration for the input video. The padded + input video is scaled after padding with black to match the + output resolution. + + Attributes: + top_pixels (int): + The number of pixels to add to the top. The + default is 0. + bottom_pixels (int): + The number of pixels to add to the bottom. + The default is 0. + left_pixels (int): + The number of pixels to add to the left. The + default is 0. + right_pixels (int): + The number of pixels to add to the right. The + default is 0. + """ + + top_pixels = proto.Field( + proto.INT32, + number=1, + ) + bottom_pixels = proto.Field( + proto.INT32, + number=2, + ) + left_pixels = proto.Field( + proto.INT32, + number=3, + ) + right_pixels = proto.Field( + proto.INT32, + number=4, + ) + + color = proto.Field( + proto.MESSAGE, + number=1, + message=Color, + ) + denoise = proto.Field( + proto.MESSAGE, + number=2, + message=Denoise, + ) + deblock = proto.Field( + proto.MESSAGE, + number=3, + message=Deblock, + ) + audio = proto.Field( + proto.MESSAGE, + number=4, + message=Audio, + ) + crop = proto.Field( + proto.MESSAGE, + number=5, + message=Crop, + ) + pad = proto.Field( + proto.MESSAGE, + number=6, + message=Pad, + ) + + +class VideoStream(proto.Message): + r"""Video stream resource. + Attributes: + h264 (google.cloud.video.transcoder_v1.types.VideoStream.H264CodecSettings): + H264 codec settings. + h265 (google.cloud.video.transcoder_v1.types.VideoStream.H265CodecSettings): + H265 codec settings. + vp9 (google.cloud.video.transcoder_v1.types.VideoStream.Vp9CodecSettings): + VP9 codec settings. + """ + + class H264CodecSettings(proto.Message): + r"""H264 codec settings. + Attributes: + width_pixels (int): + The width of the video in pixels. Must be an + even integer. When not specified, the width is + adjusted to match the specified height and input + aspect ratio. If both are omitted, the input + width is used. + height_pixels (int): + The height of the video in pixels. Must be an + even integer. When not specified, the height is + adjusted to match the specified width and input + aspect ratio. If both are omitted, the input + height is used. + frame_rate (float): + Required. The target video frame rate in frames per second + (FPS). Must be less than or equal to 120. Will default to + the input frame rate if larger than the input frame rate. + The API will generate an output FPS that is divisible by the + input FPS, and smaller or equal to the target FPS. See + `Calculating frame + rate `__ + for more information. + bitrate_bps (int): + Required. The video bitrate in bits per + second. Must be between 1 and 1,000,000,000. + pixel_format (str): + Pixel format to use. The default is ``"yuv420p"``. + + Supported pixel formats: + + - 'yuv420p' pixel format. + - 'yuv422p' pixel format. + - 'yuv444p' pixel format. + - 'yuv420p10' 10-bit HDR pixel format. + - 'yuv422p10' 10-bit HDR pixel format. + - 'yuv444p10' 10-bit HDR pixel format. + - 'yuv420p12' 12-bit HDR pixel format. + - 'yuv422p12' 12-bit HDR pixel format. + - 'yuv444p12' 12-bit HDR pixel format. + rate_control_mode (str): + Specify the ``rate_control_mode``. The default is ``"vbr"``. + + Supported rate control modes: + + - 'vbr' - variable bitrate + - 'crf' - constant rate factor + crf_level (int): + Target CRF level. Must be between 10 and 36, + where 10 is the highest quality and 36 is the + most efficient compression. The default is 21. + allow_open_gop (bool): + Specifies whether an open Group of Pictures (GOP) structure + should be allowed or not. The default is ``false``. + gop_frame_count (int): + Select the GOP size based on the specified + frame count. Must be greater than zero. + gop_duration (google.protobuf.duration_pb2.Duration): + Select the GOP size based on the specified duration. The + default is ``"3s"``. Note that ``gopDuration`` must be less + than or equal to ```segmentDuration`` <#SegmentSettings>`__, + and ```segmentDuration`` <#SegmentSettings>`__ must be + divisible by ``gopDuration``. + enable_two_pass (bool): + Use two-pass encoding strategy to achieve better video + quality. ``VideoStream.rate_control_mode`` must be + ``"vbr"``. The default is ``false``. + vbv_size_bits (int): + Size of the Video Buffering Verifier (VBV) buffer in bits. + Must be greater than zero. The default is equal to + ``VideoStream.bitrate_bps``. + vbv_fullness_bits (int): + Initial fullness of the Video Buffering Verifier (VBV) + buffer in bits. Must be greater than zero. The default is + equal to 90% of ``VideoStream.vbv_size_bits``. + entropy_coder (str): + The entropy coder to use. The default is ``"cabac"``. + + Supported entropy coders: + + - 'cavlc' + - 'cabac' + b_pyramid (bool): + Allow B-pyramid for reference frame selection. This may not + be supported on all decoders. The default is ``false``. + b_frame_count (int): + The number of consecutive B-frames. Must be greater than or + equal to zero. Must be less than + ``VideoStream.gop_frame_count`` if set. The default is 0. + aq_strength (float): + Specify the intensity of the adaptive + quantizer (AQ). Must be between 0 and 1, where 0 + disables the quantizer and 1 maximizes the + quantizer. A higher value equals a lower bitrate + but smoother image. The default is 0. + profile (str): + Enforces the specified codec profile. The following profiles + are supported: + + - ``baseline`` + - ``main`` + - ``high`` (default) + + The available options are + `FFmpeg-compatible `__\ {: + class="external" }. Note that certain values for this field + may cause the transcoder to override other fields you set in + the ``H264CodecSettings`` message. + tune (str): + Enforces the specified codec tune. The available options are + `FFmpeg-compatible `__\ {: + class="external" }. Note that certain values for this field + may cause the transcoder to override other fields you set in + the ``H264CodecSettings`` message. + preset (str): + Enforces the specified codec preset. The default is + ``veryfast``. The available options are + `FFmpeg-compatible `__\ {: + class="external" }. Note that certain values for this field + may cause the transcoder to override other fields you set in + the ``H264CodecSettings`` message. + """ + + width_pixels = proto.Field( + proto.INT32, + number=1, + ) + height_pixels = proto.Field( + proto.INT32, + number=2, + ) + frame_rate = proto.Field( + proto.DOUBLE, + number=3, + ) + bitrate_bps = proto.Field( + proto.INT32, + number=4, + ) + pixel_format = proto.Field( + proto.STRING, + number=5, + ) + rate_control_mode = proto.Field( + proto.STRING, + number=6, + ) + crf_level = proto.Field( + proto.INT32, + number=7, + ) + allow_open_gop = proto.Field( + proto.BOOL, + number=8, + ) + gop_frame_count = proto.Field( + proto.INT32, + number=9, + oneof='gop_mode', + ) + gop_duration = proto.Field( + proto.MESSAGE, + number=10, + oneof='gop_mode', + message=duration_pb2.Duration, + ) + enable_two_pass = proto.Field( + proto.BOOL, + number=11, + ) + vbv_size_bits = proto.Field( + proto.INT32, + number=12, + ) + vbv_fullness_bits = proto.Field( + proto.INT32, + number=13, + ) + entropy_coder = proto.Field( + proto.STRING, + number=14, + ) + b_pyramid = proto.Field( + proto.BOOL, + number=15, + ) + b_frame_count = proto.Field( + proto.INT32, + number=16, + ) + aq_strength = proto.Field( + proto.DOUBLE, + number=17, + ) + profile = proto.Field( + proto.STRING, + number=18, + ) + tune = proto.Field( + proto.STRING, + number=19, + ) + preset = proto.Field( + proto.STRING, + number=20, + ) + + class H265CodecSettings(proto.Message): + r"""H265 codec settings. + Attributes: + width_pixels (int): + The width of the video in pixels. Must be an + even integer. When not specified, the width is + adjusted to match the specified height and input + aspect ratio. If both are omitted, the input + width is used. + height_pixels (int): + The height of the video in pixels. Must be an + even integer. When not specified, the height is + adjusted to match the specified width and input + aspect ratio. If both are omitted, the input + height is used. + frame_rate (float): + Required. The target video frame rate in frames per second + (FPS). Must be less than or equal to 120. Will default to + the input frame rate if larger than the input frame rate. + The API will generate an output FPS that is divisible by the + input FPS, and smaller or equal to the target FPS. See + `Calculating frame + rate `__ + for more information. + bitrate_bps (int): + Required. The video bitrate in bits per + second. Must be between 1 and 1,000,000,000. + pixel_format (str): + Pixel format to use. The default is ``"yuv420p"``. + + Supported pixel formats: + + - 'yuv420p' pixel format. + - 'yuv422p' pixel format. + - 'yuv444p' pixel format. + - 'yuv420p10' 10-bit HDR pixel format. + - 'yuv422p10' 10-bit HDR pixel format. + - 'yuv444p10' 10-bit HDR pixel format. + - 'yuv420p12' 12-bit HDR pixel format. + - 'yuv422p12' 12-bit HDR pixel format. + - 'yuv444p12' 12-bit HDR pixel format. + rate_control_mode (str): + Specify the ``rate_control_mode``. The default is ``"vbr"``. + + Supported rate control modes: + + - 'vbr' - variable bitrate + - 'crf' - constant rate factor + crf_level (int): + Target CRF level. Must be between 10 and 36, + where 10 is the highest quality and 36 is the + most efficient compression. The default is 21. + allow_open_gop (bool): + Specifies whether an open Group of Pictures (GOP) structure + should be allowed or not. The default is ``false``. + gop_frame_count (int): + Select the GOP size based on the specified + frame count. Must be greater than zero. + gop_duration (google.protobuf.duration_pb2.Duration): + Select the GOP size based on the specified duration. The + default is ``"3s"``. Note that ``gopDuration`` must be less + than or equal to ```segmentDuration`` <#SegmentSettings>`__, + and ```segmentDuration`` <#SegmentSettings>`__ must be + divisible by ``gopDuration``. + enable_two_pass (bool): + Use two-pass encoding strategy to achieve better video + quality. ``VideoStream.rate_control_mode`` must be + ``"vbr"``. The default is ``false``. + vbv_size_bits (int): + Size of the Video Buffering Verifier (VBV) buffer in bits. + Must be greater than zero. The default is equal to + ``VideoStream.bitrate_bps``. + vbv_fullness_bits (int): + Initial fullness of the Video Buffering Verifier (VBV) + buffer in bits. Must be greater than zero. The default is + equal to 90% of ``VideoStream.vbv_size_bits``. + b_pyramid (bool): + Allow B-pyramid for reference frame selection. This may not + be supported on all decoders. The default is ``false``. + b_frame_count (int): + The number of consecutive B-frames. Must be greater than or + equal to zero. Must be less than + ``VideoStream.gop_frame_count`` if set. The default is 0. + aq_strength (float): + Specify the intensity of the adaptive + quantizer (AQ). Must be between 0 and 1, where 0 + disables the quantizer and 1 maximizes the + quantizer. A higher value equals a lower bitrate + but smoother image. The default is 0. + profile (str): + Enforces the specified codec profile. The following profiles + are supported: + + 8bit profiles + + - ``main`` (default) + - ``main-intra`` + - ``mainstillpicture`` + + 10bit profiles + + - ``main10`` (default) + - ``main10-intra`` + - ``main422-10`` + - ``main422-10-intra`` + - ``main444-10`` + - ``main444-10-intra`` + + 12bit profiles + + - ``main12`` (default) + - ``main12-intra`` + - ``main422-12`` + - ``main422-12-intra`` + - ``main444-12`` + - ``main444-12-intra`` + + The available options are + `FFmpeg-compatible `__\ {: + class="external" }. Note that certain values for this field + may cause the transcoder to override other fields you set in + the ``H265CodecSettings`` message. + tune (str): + Enforces the specified codec tune. The available options are + `FFmpeg-compatible `__\ {: + class="external" }. Note that certain values for this field + may cause the transcoder to override other fields you set in + the ``H265CodecSettings`` message. + preset (str): + Enforces the specified codec preset. The default is + ``veryfast``. The available options are + `FFmpeg-compatible `__\ {: + class="external" }. Note that certain values for this field + may cause the transcoder to override other fields you set in + the ``H265CodecSettings`` message. + """ + + width_pixels = proto.Field( + proto.INT32, + number=1, + ) + height_pixels = proto.Field( + proto.INT32, + number=2, + ) + frame_rate = proto.Field( + proto.DOUBLE, + number=3, + ) + bitrate_bps = proto.Field( + proto.INT32, + number=4, + ) + pixel_format = proto.Field( + proto.STRING, + number=5, + ) + rate_control_mode = proto.Field( + proto.STRING, + number=6, + ) + crf_level = proto.Field( + proto.INT32, + number=7, + ) + allow_open_gop = proto.Field( + proto.BOOL, + number=8, + ) + gop_frame_count = proto.Field( + proto.INT32, + number=9, + oneof='gop_mode', + ) + gop_duration = proto.Field( + proto.MESSAGE, + number=10, + oneof='gop_mode', + message=duration_pb2.Duration, + ) + enable_two_pass = proto.Field( + proto.BOOL, + number=11, + ) + vbv_size_bits = proto.Field( + proto.INT32, + number=12, + ) + vbv_fullness_bits = proto.Field( + proto.INT32, + number=13, + ) + b_pyramid = proto.Field( + proto.BOOL, + number=14, + ) + b_frame_count = proto.Field( + proto.INT32, + number=15, + ) + aq_strength = proto.Field( + proto.DOUBLE, + number=16, + ) + profile = proto.Field( + proto.STRING, + number=17, + ) + tune = proto.Field( + proto.STRING, + number=18, + ) + preset = proto.Field( + proto.STRING, + number=19, + ) + + class Vp9CodecSettings(proto.Message): + r"""VP9 codec settings. + Attributes: + width_pixels (int): + The width of the video in pixels. Must be an + even integer. When not specified, the width is + adjusted to match the specified height and input + aspect ratio. If both are omitted, the input + width is used. + height_pixels (int): + The height of the video in pixels. Must be an + even integer. When not specified, the height is + adjusted to match the specified width and input + aspect ratio. If both are omitted, the input + height is used. + frame_rate (float): + Required. The target video frame rate in frames per second + (FPS). Must be less than or equal to 120. Will default to + the input frame rate if larger than the input frame rate. + The API will generate an output FPS that is divisible by the + input FPS, and smaller or equal to the target FPS. See + `Calculating frame + rate `__ + for more information. + bitrate_bps (int): + Required. The video bitrate in bits per + second. Must be between 1 and 1,000,000,000. + pixel_format (str): + Pixel format to use. The default is ``"yuv420p"``. + + Supported pixel formats: + + - 'yuv420p' pixel format. + - 'yuv422p' pixel format. + - 'yuv444p' pixel format. + - 'yuv420p10' 10-bit HDR pixel format. + - 'yuv422p10' 10-bit HDR pixel format. + - 'yuv444p10' 10-bit HDR pixel format. + - 'yuv420p12' 12-bit HDR pixel format. + - 'yuv422p12' 12-bit HDR pixel format. + - 'yuv444p12' 12-bit HDR pixel format. + rate_control_mode (str): + Specify the ``rate_control_mode``. The default is ``"vbr"``. + + Supported rate control modes: + + - 'vbr' - variable bitrate + - 'crf' - constant rate factor + crf_level (int): + Target CRF level. Must be between 10 and 36, + where 10 is the highest quality and 36 is the + most efficient compression. The default is 21. + gop_frame_count (int): + Select the GOP size based on the specified + frame count. Must be greater than zero. + gop_duration (google.protobuf.duration_pb2.Duration): + Select the GOP size based on the specified duration. The + default is ``"3s"``. Note that ``gopDuration`` must be less + than or equal to ```segmentDuration`` <#SegmentSettings>`__, + and ```segmentDuration`` <#SegmentSettings>`__ must be + divisible by ``gopDuration``. + profile (str): + Enforces the specified codec profile. The following profiles + are supported: + + - ``profile0`` (default) + - ``profile1`` + - ``profile2`` + - ``profile3`` + + The available options are + `WebM-compatible `__\ {: + class="external" }. Note that certain values for this field + may cause the transcoder to override other fields you set in + the ``Vp9CodecSettings`` message. + """ + + width_pixels = proto.Field( + proto.INT32, + number=1, + ) + height_pixels = proto.Field( + proto.INT32, + number=2, + ) + frame_rate = proto.Field( + proto.DOUBLE, + number=3, + ) + bitrate_bps = proto.Field( + proto.INT32, + number=4, + ) + pixel_format = proto.Field( + proto.STRING, + number=5, + ) + rate_control_mode = proto.Field( + proto.STRING, + number=6, + ) + crf_level = proto.Field( + proto.INT32, + number=7, + ) + gop_frame_count = proto.Field( + proto.INT32, + number=8, + oneof='gop_mode', + ) + gop_duration = proto.Field( + proto.MESSAGE, + number=9, + oneof='gop_mode', + message=duration_pb2.Duration, + ) + profile = proto.Field( + proto.STRING, + number=10, + ) + + h264 = proto.Field( + proto.MESSAGE, + number=1, + oneof='codec_settings', + message=H264CodecSettings, + ) + h265 = proto.Field( + proto.MESSAGE, + number=2, + oneof='codec_settings', + message=H265CodecSettings, + ) + vp9 = proto.Field( + proto.MESSAGE, + number=3, + oneof='codec_settings', + message=Vp9CodecSettings, + ) + + +class AudioStream(proto.Message): + r"""Audio stream resource. + Attributes: + codec (str): + The codec for this audio stream. The default is ``"aac"``. + + Supported audio codecs: + + - 'aac' + - 'aac-he' + - 'aac-he-v2' + - 'mp3' + - 'ac3' + - 'eac3' + bitrate_bps (int): + Required. Audio bitrate in bits per second. + Must be between 1 and 10,000,000. + channel_count (int): + Number of audio channels. Must be between 1 + and 6. The default is 2. + channel_layout (Sequence[str]): + A list of channel names specifying layout of the audio + channels. This only affects the metadata embedded in the + container headers, if supported by the specified format. The + default is ``["fl", "fr"]``. + + Supported channel names: + + - 'fl' - Front left channel + - 'fr' - Front right channel + - 'sl' - Side left channel + - 'sr' - Side right channel + - 'fc' - Front center channel + - 'lfe' - Low frequency + mapping (Sequence[google.cloud.video.transcoder_v1.types.AudioStream.AudioMapping]): + The mapping for the ``Job.edit_list`` atoms with audio + ``EditAtom.inputs``. + sample_rate_hertz (int): + The audio sample rate in Hertz. The default + is 48000 Hertz. + """ + + class AudioMapping(proto.Message): + r"""The mapping for the ``Job.edit_list`` atoms with audio + ``EditAtom.inputs``. + + Attributes: + atom_key (str): + Required. The ``EditAtom.key`` that references the atom with + audio inputs in the ``Job.edit_list``. + input_key (str): + Required. The ``Input.key`` that identifies the input file. + input_track (int): + Required. The zero-based index of the track + in the input file. + input_channel (int): + Required. The zero-based index of the channel + in the input audio stream. + output_channel (int): + Required. The zero-based index of the channel + in the output audio stream. + gain_db (float): + Audio volume control in dB. Negative values + decrease volume, positive values increase. The + default is 0. + """ + + atom_key = proto.Field( + proto.STRING, + number=1, + ) + input_key = proto.Field( + proto.STRING, + number=2, + ) + input_track = proto.Field( + proto.INT32, + number=3, + ) + input_channel = proto.Field( + proto.INT32, + number=4, + ) + output_channel = proto.Field( + proto.INT32, + number=5, + ) + gain_db = proto.Field( + proto.DOUBLE, + number=6, + ) + + codec = proto.Field( + proto.STRING, + number=1, + ) + bitrate_bps = proto.Field( + proto.INT32, + number=2, + ) + channel_count = proto.Field( + proto.INT32, + number=3, + ) + channel_layout = proto.RepeatedField( + proto.STRING, + number=4, + ) + mapping = proto.RepeatedField( + proto.MESSAGE, + number=5, + message=AudioMapping, + ) + sample_rate_hertz = proto.Field( + proto.INT32, + number=6, + ) + + +class TextStream(proto.Message): + r"""Encoding of a text stream. For example, closed captions or + subtitles. + + Attributes: + codec (str): + The codec for this text stream. The default is ``"webvtt"``. + + Supported text codecs: + + - 'srt' + - 'ttml' + - 'cea608' + - 'cea708' + - 'webvtt' + mapping (Sequence[google.cloud.video.transcoder_v1.types.TextStream.TextMapping]): + The mapping for the ``Job.edit_list`` atoms with text + ``EditAtom.inputs``. + """ + + class TextMapping(proto.Message): + r"""The mapping for the ``Job.edit_list`` atoms with text + ``EditAtom.inputs``. + + Attributes: + atom_key (str): + Required. The ``EditAtom.key`` that references atom with + text inputs in the ``Job.edit_list``. + input_key (str): + Required. The ``Input.key`` that identifies the input file. + input_track (int): + Required. The zero-based index of the track + in the input file. + """ + + atom_key = proto.Field( + proto.STRING, + number=1, + ) + input_key = proto.Field( + proto.STRING, + number=2, + ) + input_track = proto.Field( + proto.INT32, + number=3, + ) + + codec = proto.Field( + proto.STRING, + number=1, + ) + mapping = proto.RepeatedField( + proto.MESSAGE, + number=3, + message=TextMapping, + ) + + +class SegmentSettings(proto.Message): + r"""Segment settings for ``"ts"``, ``"fmp4"`` and ``"vtt"``. + Attributes: + segment_duration (google.protobuf.duration_pb2.Duration): + Duration of the segments in seconds. The default is + ``"6.0s"``. Note that ``segmentDuration`` must be greater + than or equal to ```gopDuration`` <#videostream>`__, and + ``segmentDuration`` must be divisible by + ```gopDuration`` <#videostream>`__. + individual_segments (bool): + Required. Create an individual segment file. The default is + ``false``. + """ + + segment_duration = proto.Field( + proto.MESSAGE, + number=1, + message=duration_pb2.Duration, + ) + individual_segments = proto.Field( + proto.BOOL, + number=3, + ) + + +class Encryption(proto.Message): + r"""Encryption settings. + Attributes: + key (str): + Required. 128 bit encryption key represented + as lowercase hexadecimal digits. + iv (str): + Required. 128 bit Initialization Vector (IV) + represented as lowercase hexadecimal digits. + aes_128 (google.cloud.video.transcoder_v1.types.Encryption.Aes128Encryption): + Configuration for AES-128 encryption. + sample_aes (google.cloud.video.transcoder_v1.types.Encryption.SampleAesEncryption): + Configuration for SAMPLE-AES encryption. + mpeg_cenc (google.cloud.video.transcoder_v1.types.Encryption.MpegCommonEncryption): + Configuration for MPEG Common Encryption + (MPEG-CENC). + """ + + class Aes128Encryption(proto.Message): + r"""Configuration for AES-128 encryption. + Attributes: + key_uri (str): + Required. URI of the key delivery service. + This URI is inserted into the M3U8 header. + """ + + key_uri = proto.Field( + proto.STRING, + number=1, + ) + + class SampleAesEncryption(proto.Message): + r"""Configuration for SAMPLE-AES encryption. + Attributes: + key_uri (str): + Required. URI of the key delivery service. + This URI is inserted into the M3U8 header. + """ + + key_uri = proto.Field( + proto.STRING, + number=1, + ) + + class MpegCommonEncryption(proto.Message): + r"""Configuration for MPEG Common Encryption (MPEG-CENC). + Attributes: + key_id (str): + Required. 128 bit Key ID represented as + lowercase hexadecimal digits for use with common + encryption. + scheme (str): + Required. Specify the encryption scheme. + Supported encryption schemes: + - 'cenc' + - 'cbcs' + """ + + key_id = proto.Field( + proto.STRING, + number=1, + ) + scheme = proto.Field( + proto.STRING, + number=2, + ) + + key = proto.Field( + proto.STRING, + number=1, + ) + iv = proto.Field( + proto.STRING, + number=2, + ) + aes_128 = proto.Field( + proto.MESSAGE, + number=3, + oneof='encryption_mode', + message=Aes128Encryption, + ) + sample_aes = proto.Field( + proto.MESSAGE, + number=4, + oneof='encryption_mode', + message=SampleAesEncryption, + ) + mpeg_cenc = proto.Field( + proto.MESSAGE, + number=5, + oneof='encryption_mode', + message=MpegCommonEncryption, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/v1/google/cloud/video/transcoder_v1/types/services.py b/owl-bot-staging/v1/google/cloud/video/transcoder_v1/types/services.py new file mode 100644 index 0000000..3f885d7 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/video/transcoder_v1/types/services.py @@ -0,0 +1,298 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import proto # type: ignore + +from google.cloud.video.transcoder_v1.types import resources + + +__protobuf__ = proto.module( + package='google.cloud.video.transcoder.v1', + manifest={ + 'CreateJobRequest', + 'ListJobsRequest', + 'GetJobRequest', + 'DeleteJobRequest', + 'ListJobsResponse', + 'CreateJobTemplateRequest', + 'ListJobTemplatesRequest', + 'GetJobTemplateRequest', + 'DeleteJobTemplateRequest', + 'ListJobTemplatesResponse', + }, +) + + +class CreateJobRequest(proto.Message): + r"""Request message for ``TranscoderService.CreateJob``. + Attributes: + parent (str): + Required. The parent location to create and process this + job. Format: ``projects/{project}/locations/{location}`` + job (google.cloud.video.transcoder_v1.types.Job): + Required. Parameters for creating transcoding + job. + """ + + parent = proto.Field( + proto.STRING, + number=1, + ) + job = proto.Field( + proto.MESSAGE, + number=2, + message=resources.Job, + ) + + +class ListJobsRequest(proto.Message): + r"""Request message for ``TranscoderService.ListJobs``. The parent + location from which to retrieve the collection of jobs. + + Attributes: + parent (str): + Required. Format: + ``projects/{project}/locations/{location}`` + page_size (int): + The maximum number of items to return. + page_token (str): + The ``next_page_token`` value returned from a previous List + request, if any. + filter (str): + The filter expression, following the syntax + outlined in https://google.aip.dev/160. + order_by (str): + One or more fields to compare and use to sort + the output. See + https://google.aip.dev/132#ordering. + """ + + parent = proto.Field( + proto.STRING, + number=1, + ) + page_size = proto.Field( + proto.INT32, + number=2, + ) + page_token = proto.Field( + proto.STRING, + number=3, + ) + filter = proto.Field( + proto.STRING, + number=4, + ) + order_by = proto.Field( + proto.STRING, + number=5, + ) + + +class GetJobRequest(proto.Message): + r"""Request message for ``TranscoderService.GetJob``. + Attributes: + name (str): + Required. The name of the job to retrieve. Format: + ``projects/{project}/locations/{location}/jobs/{job}`` + """ + + name = proto.Field( + proto.STRING, + number=1, + ) + + +class DeleteJobRequest(proto.Message): + r"""Request message for ``TranscoderService.DeleteJob``. + Attributes: + name (str): + Required. The name of the job to delete. Format: + ``projects/{project}/locations/{location}/jobs/{job}`` + """ + + name = proto.Field( + proto.STRING, + number=1, + ) + + +class ListJobsResponse(proto.Message): + r"""Response message for ``TranscoderService.ListJobs``. + Attributes: + jobs (Sequence[google.cloud.video.transcoder_v1.types.Job]): + List of jobs in the specified region. + next_page_token (str): + The pagination token. + unreachable (Sequence[str]): + List of regions that could not be reached. + """ + + @property + def raw_page(self): + return self + + jobs = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=resources.Job, + ) + next_page_token = proto.Field( + proto.STRING, + number=2, + ) + unreachable = proto.RepeatedField( + proto.STRING, + number=3, + ) + + +class CreateJobTemplateRequest(proto.Message): + r"""Request message for ``TranscoderService.CreateJobTemplate``. + Attributes: + parent (str): + Required. The parent location to create this job template. + Format: ``projects/{project}/locations/{location}`` + job_template (google.cloud.video.transcoder_v1.types.JobTemplate): + Required. Parameters for creating job + template. + job_template_id (str): + Required. The ID to use for the job template, which will + become the final component of the job template's resource + name. + + This value should be 4-63 characters, and valid characters + must match the regular expression + ``[a-zA-Z][a-zA-Z0-9_-]*``. + """ + + parent = proto.Field( + proto.STRING, + number=1, + ) + job_template = proto.Field( + proto.MESSAGE, + number=2, + message=resources.JobTemplate, + ) + job_template_id = proto.Field( + proto.STRING, + number=3, + ) + + +class ListJobTemplatesRequest(proto.Message): + r"""Request message for ``TranscoderService.ListJobTemplates``. + Attributes: + parent (str): + Required. The parent location from which to retrieve the + collection of job templates. Format: + ``projects/{project}/locations/{location}`` + page_size (int): + The maximum number of items to return. + page_token (str): + The ``next_page_token`` value returned from a previous List + request, if any. + filter (str): + The filter expression, following the syntax + outlined in https://google.aip.dev/160. + order_by (str): + One or more fields to compare and use to sort + the output. See + https://google.aip.dev/132#ordering. + """ + + parent = proto.Field( + proto.STRING, + number=1, + ) + page_size = proto.Field( + proto.INT32, + number=2, + ) + page_token = proto.Field( + proto.STRING, + number=3, + ) + filter = proto.Field( + proto.STRING, + number=4, + ) + order_by = proto.Field( + proto.STRING, + number=5, + ) + + +class GetJobTemplateRequest(proto.Message): + r"""Request message for ``TranscoderService.GetJobTemplate``. + Attributes: + name (str): + Required. The name of the job template to retrieve. Format: + ``projects/{project}/locations/{location}/jobTemplates/{job_template}`` + """ + + name = proto.Field( + proto.STRING, + number=1, + ) + + +class DeleteJobTemplateRequest(proto.Message): + r"""Request message for ``TranscoderService.DeleteJobTemplate``. + Attributes: + name (str): + Required. The name of the job template to delete. + ``projects/{project}/locations/{location}/jobTemplates/{job_template}`` + """ + + name = proto.Field( + proto.STRING, + number=1, + ) + + +class ListJobTemplatesResponse(proto.Message): + r"""Response message for ``TranscoderService.ListJobTemplates``. + Attributes: + job_templates (Sequence[google.cloud.video.transcoder_v1.types.JobTemplate]): + List of job templates in the specified + region. + next_page_token (str): + The pagination token. + unreachable (Sequence[str]): + List of regions that could not be reached. + """ + + @property + def raw_page(self): + return self + + job_templates = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=resources.JobTemplate, + ) + next_page_token = proto.Field( + proto.STRING, + number=2, + ) + unreachable = proto.RepeatedField( + proto.STRING, + number=3, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/v1/mypy.ini b/owl-bot-staging/v1/mypy.ini new file mode 100644 index 0000000..4505b48 --- /dev/null +++ b/owl-bot-staging/v1/mypy.ini @@ -0,0 +1,3 @@ +[mypy] +python_version = 3.6 +namespace_packages = True diff --git a/owl-bot-staging/v1/noxfile.py b/owl-bot-staging/v1/noxfile.py new file mode 100644 index 0000000..5c33374 --- /dev/null +++ b/owl-bot-staging/v1/noxfile.py @@ -0,0 +1,132 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os +import pathlib +import shutil +import subprocess +import sys + + +import nox # type: ignore + +CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() + +LOWER_BOUND_CONSTRAINTS_FILE = CURRENT_DIRECTORY / "constraints.txt" +PACKAGE_NAME = subprocess.check_output([sys.executable, "setup.py", "--name"], encoding="utf-8") + + +nox.sessions = [ + "unit", + "cover", + "mypy", + "check_lower_bounds" + # exclude update_lower_bounds from default + "docs", +] + +@nox.session(python=['3.6', '3.7', '3.8', '3.9']) +def unit(session): + """Run the unit test suite.""" + + session.install('coverage', 'pytest', 'pytest-cov', 'asyncmock', 'pytest-asyncio') + session.install('-e', '.') + + session.run( + 'py.test', + '--quiet', + '--cov=google/cloud/video/transcoder_v1/', + '--cov-config=.coveragerc', + '--cov-report=term', + '--cov-report=html', + os.path.join('tests', 'unit', ''.join(session.posargs)) + ) + + +@nox.session(python='3.7') +def cover(session): + """Run the final coverage report. + This outputs the coverage report aggregating coverage from the unit + test runs (not system test runs), and then erases coverage data. + """ + session.install("coverage", "pytest-cov") + session.run("coverage", "report", "--show-missing", "--fail-under=100") + + session.run("coverage", "erase") + + +@nox.session(python=['3.6', '3.7']) +def mypy(session): + """Run the type checker.""" + session.install('mypy', 'types-pkg_resources') + session.install('.') + session.run( + 'mypy', + '--explicit-package-bases', + 'google', + ) + + +@nox.session +def update_lower_bounds(session): + """Update lower bounds in constraints.txt to match setup.py""" + session.install('google-cloud-testutils') + session.install('.') + + session.run( + 'lower-bound-checker', + 'update', + '--package-name', + PACKAGE_NAME, + '--constraints-file', + str(LOWER_BOUND_CONSTRAINTS_FILE), + ) + + +@nox.session +def check_lower_bounds(session): + """Check lower bounds in setup.py are reflected in constraints file""" + session.install('google-cloud-testutils') + session.install('.') + + session.run( + 'lower-bound-checker', + 'check', + '--package-name', + PACKAGE_NAME, + '--constraints-file', + str(LOWER_BOUND_CONSTRAINTS_FILE), + ) + +@nox.session(python='3.6') +def docs(session): + """Build the docs for this library.""" + + session.install("-e", ".") + session.install("sphinx<3.0.0", "alabaster", "recommonmark") + + shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) + session.run( + "sphinx-build", + "-W", # warnings as errors + "-T", # show full traceback on exception + "-N", # no colors + "-b", + "html", + "-d", + os.path.join("docs", "_build", "doctrees", ""), + os.path.join("docs", ""), + os.path.join("docs", "_build", "html", ""), + ) diff --git a/owl-bot-staging/v1/scripts/fixup_transcoder_v1_keywords.py b/owl-bot-staging/v1/scripts/fixup_transcoder_v1_keywords.py new file mode 100644 index 0000000..912892a --- /dev/null +++ b/owl-bot-staging/v1/scripts/fixup_transcoder_v1_keywords.py @@ -0,0 +1,183 @@ +#! /usr/bin/env python3 +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import argparse +import os +import libcst as cst +import pathlib +import sys +from typing import (Any, Callable, Dict, List, Sequence, Tuple) + + +def partition( + predicate: Callable[[Any], bool], + iterator: Sequence[Any] +) -> Tuple[List[Any], List[Any]]: + """A stable, out-of-place partition.""" + results = ([], []) + + for i in iterator: + results[int(predicate(i))].append(i) + + # Returns trueList, falseList + return results[1], results[0] + + +class transcoderCallTransformer(cst.CSTTransformer): + CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') + METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { + 'create_job': ('parent', 'job', ), + 'create_job_template': ('parent', 'job_template', 'job_template_id', ), + 'delete_job': ('name', ), + 'delete_job_template': ('name', ), + 'get_job': ('name', ), + 'get_job_template': ('name', ), + 'list_jobs': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), + 'list_job_templates': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), + } + + def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: + try: + key = original.func.attr.value + kword_params = self.METHOD_TO_PARAMS[key] + except (AttributeError, KeyError): + # Either not a method from the API or too convoluted to be sure. + return updated + + # If the existing code is valid, keyword args come after positional args. + # Therefore, all positional args must map to the first parameters. + args, kwargs = partition(lambda a: not bool(a.keyword), updated.args) + if any(k.keyword.value == "request" for k in kwargs): + # We've already fixed this file, don't fix it again. + return updated + + kwargs, ctrl_kwargs = partition( + lambda a: not a.keyword.value in self.CTRL_PARAMS, + kwargs + ) + + args, ctrl_args = args[:len(kword_params)], args[len(kword_params):] + ctrl_kwargs.extend(cst.Arg(value=a.value, keyword=cst.Name(value=ctrl)) + for a, ctrl in zip(ctrl_args, self.CTRL_PARAMS)) + + request_arg = cst.Arg( + value=cst.Dict([ + cst.DictElement( + cst.SimpleString("'{}'".format(name)), +cst.Element(value=arg.value) + ) + # Note: the args + kwargs looks silly, but keep in mind that + # the control parameters had to be stripped out, and that + # those could have been passed positionally or by keyword. + for name, arg in zip(kword_params, args + kwargs)]), + keyword=cst.Name("request") + ) + + return updated.with_changes( + args=[request_arg] + ctrl_kwargs + ) + + +def fix_files( + in_dir: pathlib.Path, + out_dir: pathlib.Path, + *, + transformer=transcoderCallTransformer(), +): + """Duplicate the input dir to the output dir, fixing file method calls. + + Preconditions: + * in_dir is a real directory + * out_dir is a real, empty directory + """ + pyfile_gen = ( + pathlib.Path(os.path.join(root, f)) + for root, _, files in os.walk(in_dir) + for f in files if os.path.splitext(f)[1] == ".py" + ) + + for fpath in pyfile_gen: + with open(fpath, 'r') as f: + src = f.read() + + # Parse the code and insert method call fixes. + tree = cst.parse_module(src) + updated = tree.visit(transformer) + + # Create the path and directory structure for the new file. + updated_path = out_dir.joinpath(fpath.relative_to(in_dir)) + updated_path.parent.mkdir(parents=True, exist_ok=True) + + # Generate the updated source file at the corresponding path. + with open(updated_path, 'w') as f: + f.write(updated.code) + + +if __name__ == '__main__': + parser = argparse.ArgumentParser( + description="""Fix up source that uses the transcoder client library. + +The existing sources are NOT overwritten but are copied to output_dir with changes made. + +Note: This tool operates at a best-effort level at converting positional + parameters in client method calls to keyword based parameters. + Cases where it WILL FAIL include + A) * or ** expansion in a method call. + B) Calls via function or method alias (includes free function calls) + C) Indirect or dispatched calls (e.g. the method is looked up dynamically) + + These all constitute false negatives. The tool will also detect false + positives when an API method shares a name with another method. +""") + parser.add_argument( + '-d', + '--input-directory', + required=True, + dest='input_dir', + help='the input directory to walk for python files to fix up', + ) + parser.add_argument( + '-o', + '--output-directory', + required=True, + dest='output_dir', + help='the directory to output files fixed via un-flattening', + ) + args = parser.parse_args() + input_dir = pathlib.Path(args.input_dir) + output_dir = pathlib.Path(args.output_dir) + if not input_dir.is_dir(): + print( + f"input directory '{input_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if not output_dir.is_dir(): + print( + f"output directory '{output_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if os.listdir(output_dir): + print( + f"output directory '{output_dir}' is not empty", + file=sys.stderr, + ) + sys.exit(-1) + + fix_files(input_dir, output_dir) diff --git a/owl-bot-staging/v1/setup.py b/owl-bot-staging/v1/setup.py new file mode 100644 index 0000000..8f17241 --- /dev/null +++ b/owl-bot-staging/v1/setup.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import io +import os +import setuptools # type: ignore + +version = '0.1.0' + +package_root = os.path.abspath(os.path.dirname(__file__)) + +readme_filename = os.path.join(package_root, 'README.rst') +with io.open(readme_filename, encoding='utf-8') as readme_file: + readme = readme_file.read() + +setuptools.setup( + name='google-cloud-video-transcoder', + version=version, + long_description=readme, + packages=setuptools.PEP420PackageFinder.find(), + namespace_packages=('google', 'google.cloud', 'google.cloud.video'), + platforms='Posix; MacOS X; Windows', + include_package_data=True, + install_requires=( + 'google-api-core[grpc] >= 1.27.0, < 3.0.0dev', + 'libcst >= 0.2.5', + 'proto-plus >= 1.15.0', + 'packaging >= 14.3', ), + python_requires='>=3.6', + classifiers=[ + 'Development Status :: 3 - Alpha', + 'Intended Audience :: Developers', + 'Operating System :: OS Independent', + 'Programming Language :: Python :: 3.6', + 'Programming Language :: Python :: 3.7', + 'Programming Language :: Python :: 3.8', + 'Programming Language :: Python :: 3.9', + 'Topic :: Internet', + 'Topic :: Software Development :: Libraries :: Python Modules', + ], + zip_safe=False, +) diff --git a/owl-bot-staging/v1/tests/__init__.py b/owl-bot-staging/v1/tests/__init__.py new file mode 100644 index 0000000..b54a5fc --- /dev/null +++ b/owl-bot-staging/v1/tests/__init__.py @@ -0,0 +1,16 @@ + +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/owl-bot-staging/v1/tests/unit/__init__.py b/owl-bot-staging/v1/tests/unit/__init__.py new file mode 100644 index 0000000..b54a5fc --- /dev/null +++ b/owl-bot-staging/v1/tests/unit/__init__.py @@ -0,0 +1,16 @@ + +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/owl-bot-staging/v1/tests/unit/gapic/__init__.py b/owl-bot-staging/v1/tests/unit/gapic/__init__.py new file mode 100644 index 0000000..b54a5fc --- /dev/null +++ b/owl-bot-staging/v1/tests/unit/gapic/__init__.py @@ -0,0 +1,16 @@ + +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/owl-bot-staging/v1/tests/unit/gapic/transcoder_v1/__init__.py b/owl-bot-staging/v1/tests/unit/gapic/transcoder_v1/__init__.py new file mode 100644 index 0000000..b54a5fc --- /dev/null +++ b/owl-bot-staging/v1/tests/unit/gapic/transcoder_v1/__init__.py @@ -0,0 +1,16 @@ + +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/owl-bot-staging/v1/tests/unit/gapic/transcoder_v1/test_transcoder_service.py b/owl-bot-staging/v1/tests/unit/gapic/transcoder_v1/test_transcoder_service.py new file mode 100644 index 0000000..1e521e0 --- /dev/null +++ b/owl-bot-staging/v1/tests/unit/gapic/transcoder_v1/test_transcoder_service.py @@ -0,0 +1,3237 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os +import mock +import packaging.version + +import grpc +from grpc.experimental import aio +import math +import pytest +from proto.marshal.rules.dates import DurationRule, TimestampRule + + +from google.api_core import client_options +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers +from google.api_core import grpc_helpers_async +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.cloud.video.transcoder_v1.services.transcoder_service import TranscoderServiceAsyncClient +from google.cloud.video.transcoder_v1.services.transcoder_service import TranscoderServiceClient +from google.cloud.video.transcoder_v1.services.transcoder_service import pagers +from google.cloud.video.transcoder_v1.services.transcoder_service import transports +from google.cloud.video.transcoder_v1.services.transcoder_service.transports.base import _GOOGLE_AUTH_VERSION +from google.cloud.video.transcoder_v1.types import resources +from google.cloud.video.transcoder_v1.types import services +from google.oauth2 import service_account +from google.protobuf import any_pb2 # type: ignore +from google.protobuf import duration_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +from google.rpc import status_pb2 # type: ignore +import google.auth + + +# TODO(busunkim): Once google-auth >= 1.25.0 is required transitively +# through google-api-core: +# - Delete the auth "less than" test cases +# - Delete these pytest markers (Make the "greater than or equal to" tests the default). +requires_google_auth_lt_1_25_0 = pytest.mark.skipif( + packaging.version.parse(_GOOGLE_AUTH_VERSION) >= packaging.version.parse("1.25.0"), + reason="This test requires google-auth < 1.25.0", +) +requires_google_auth_gte_1_25_0 = pytest.mark.skipif( + packaging.version.parse(_GOOGLE_AUTH_VERSION) < packaging.version.parse("1.25.0"), + reason="This test requires google-auth >= 1.25.0", +) + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert TranscoderServiceClient._get_default_mtls_endpoint(None) is None + assert TranscoderServiceClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint + assert TranscoderServiceClient._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint + assert TranscoderServiceClient._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint + assert TranscoderServiceClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint + assert TranscoderServiceClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi + + +@pytest.mark.parametrize("client_class", [ + TranscoderServiceClient, + TranscoderServiceAsyncClient, +]) +def test_transcoder_service_client_from_service_account_info(client_class): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == 'transcoder.googleapis.com:443' + + +@pytest.mark.parametrize("transport_class,transport_name", [ + (transports.TranscoderServiceGrpcTransport, "grpc"), + (transports.TranscoderServiceGrpcAsyncIOTransport, "grpc_asyncio"), +]) +def test_transcoder_service_client_service_account_always_use_jwt(transport_class, transport_name): + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize("client_class", [ + TranscoderServiceClient, + TranscoderServiceAsyncClient, +]) +def test_transcoder_service_client_from_service_account_file(client_class): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: + factory.return_value = creds + client = client_class.from_service_account_file("dummy/file/path.json") + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json("dummy/file/path.json") + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == 'transcoder.googleapis.com:443' + + +def test_transcoder_service_client_get_transport_class(): + transport = TranscoderServiceClient.get_transport_class() + available_transports = [ + transports.TranscoderServiceGrpcTransport, + ] + assert transport in available_transports + + transport = TranscoderServiceClient.get_transport_class("grpc") + assert transport == transports.TranscoderServiceGrpcTransport + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (TranscoderServiceClient, transports.TranscoderServiceGrpcTransport, "grpc"), + (TranscoderServiceAsyncClient, transports.TranscoderServiceGrpcAsyncIOTransport, "grpc_asyncio"), +]) +@mock.patch.object(TranscoderServiceClient, "DEFAULT_ENDPOINT", modify_default_endpoint(TranscoderServiceClient)) +@mock.patch.object(TranscoderServiceAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(TranscoderServiceAsyncClient)) +def test_transcoder_service_client_client_options(client_class, transport_class, transport_name): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(TranscoderServiceClient, 'get_transport_class') as gtc: + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(TranscoderServiceClient, 'get_transport_class') as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class() + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class() + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError): + client = client_class() + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + with pytest.raises(ValueError): + client = client_class() + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ + (TranscoderServiceClient, transports.TranscoderServiceGrpcTransport, "grpc", "true"), + (TranscoderServiceAsyncClient, transports.TranscoderServiceGrpcAsyncIOTransport, "grpc_asyncio", "true"), + (TranscoderServiceClient, transports.TranscoderServiceGrpcTransport, "grpc", "false"), + (TranscoderServiceAsyncClient, transports.TranscoderServiceGrpcAsyncIOTransport, "grpc_asyncio", "false"), +]) +@mock.patch.object(TranscoderServiceClient, "DEFAULT_ENDPOINT", modify_default_endpoint(TranscoderServiceClient)) +@mock.patch.object(TranscoderServiceAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(TranscoderServiceAsyncClient)) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_transcoder_service_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client.DEFAULT_ENDPOINT + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback): + if use_client_cert_env == "false": + expected_host = client.DEFAULT_ENDPOINT + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class() + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False): + patched.return_value = None + client = client_class() + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + ) + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (TranscoderServiceClient, transports.TranscoderServiceGrpcTransport, "grpc"), + (TranscoderServiceAsyncClient, transports.TranscoderServiceGrpcAsyncIOTransport, "grpc_asyncio"), +]) +def test_transcoder_service_client_client_options_scopes(client_class, transport_class, transport_name): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (TranscoderServiceClient, transports.TranscoderServiceGrpcTransport, "grpc"), + (TranscoderServiceAsyncClient, transports.TranscoderServiceGrpcAsyncIOTransport, "grpc_asyncio"), +]) +def test_transcoder_service_client_client_options_credentials_file(client_class, transport_class, transport_name): + # Check the case credentials file is provided. + options = client_options.ClientOptions( + credentials_file="credentials.json" + ) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + ) + + +def test_transcoder_service_client_client_options_from_dict(): + with mock.patch('google.cloud.video.transcoder_v1.services.transcoder_service.transports.TranscoderServiceGrpcTransport.__init__') as grpc_transport: + grpc_transport.return_value = None + client = TranscoderServiceClient( + client_options={'api_endpoint': 'squid.clam.whelk'} + ) + grpc_transport.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + ) + + +def test_create_job(transport: str = 'grpc', request_type=services.CreateJobRequest): + client = TranscoderServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_job), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = resources.Job( + name='name_value', + input_uri='input_uri_value', + output_uri='output_uri_value', + state=resources.Job.ProcessingState.PENDING, + ttl_after_completion_days=2670, + template_id='template_id_value', + ) + response = client.create_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == services.CreateJobRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, resources.Job) + assert response.name == 'name_value' + assert response.input_uri == 'input_uri_value' + assert response.output_uri == 'output_uri_value' + assert response.state == resources.Job.ProcessingState.PENDING + assert response.ttl_after_completion_days == 2670 + + +def test_create_job_from_dict(): + test_create_job(request_type=dict) + + +def test_create_job_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = TranscoderServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_job), + '__call__') as call: + client.create_job() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == services.CreateJobRequest() + + +@pytest.mark.asyncio +async def test_create_job_async(transport: str = 'grpc_asyncio', request_type=services.CreateJobRequest): + client = TranscoderServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_job), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(resources.Job( + name='name_value', + input_uri='input_uri_value', + output_uri='output_uri_value', + state=resources.Job.ProcessingState.PENDING, + ttl_after_completion_days=2670, + )) + response = await client.create_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == services.CreateJobRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, resources.Job) + assert response.name == 'name_value' + assert response.input_uri == 'input_uri_value' + assert response.output_uri == 'output_uri_value' + assert response.state == resources.Job.ProcessingState.PENDING + assert response.ttl_after_completion_days == 2670 + + +@pytest.mark.asyncio +async def test_create_job_async_from_dict(): + await test_create_job_async(request_type=dict) + + +def test_create_job_field_headers(): + client = TranscoderServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = services.CreateJobRequest() + + request.parent = 'parent/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_job), + '__call__') as call: + call.return_value = resources.Job() + client.create_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent/value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_create_job_field_headers_async(): + client = TranscoderServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = services.CreateJobRequest() + + request.parent = 'parent/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_job), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.Job()) + await client.create_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent/value', + ) in kw['metadata'] + + +def test_create_job_flattened(): + client = TranscoderServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_job), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = resources.Job() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_job( + parent='parent_value', + job=resources.Job(name='name_value'), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0].parent == 'parent_value' + assert args[0].job == resources.Job(name='name_value') + + +def test_create_job_flattened_error(): + client = TranscoderServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_job( + services.CreateJobRequest(), + parent='parent_value', + job=resources.Job(name='name_value'), + ) + + +@pytest.mark.asyncio +async def test_create_job_flattened_async(): + client = TranscoderServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_job), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = resources.Job() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.Job()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_job( + parent='parent_value', + job=resources.Job(name='name_value'), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0].parent == 'parent_value' + assert args[0].job == resources.Job(name='name_value') + + +@pytest.mark.asyncio +async def test_create_job_flattened_error_async(): + client = TranscoderServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_job( + services.CreateJobRequest(), + parent='parent_value', + job=resources.Job(name='name_value'), + ) + + +def test_list_jobs(transport: str = 'grpc', request_type=services.ListJobsRequest): + client = TranscoderServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_jobs), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = services.ListJobsResponse( + next_page_token='next_page_token_value', + unreachable=['unreachable_value'], + ) + response = client.list_jobs(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == services.ListJobsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListJobsPager) + assert response.next_page_token == 'next_page_token_value' + assert response.unreachable == ['unreachable_value'] + + +def test_list_jobs_from_dict(): + test_list_jobs(request_type=dict) + + +def test_list_jobs_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = TranscoderServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_jobs), + '__call__') as call: + client.list_jobs() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == services.ListJobsRequest() + + +@pytest.mark.asyncio +async def test_list_jobs_async(transport: str = 'grpc_asyncio', request_type=services.ListJobsRequest): + client = TranscoderServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_jobs), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(services.ListJobsResponse( + next_page_token='next_page_token_value', + unreachable=['unreachable_value'], + )) + response = await client.list_jobs(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == services.ListJobsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListJobsAsyncPager) + assert response.next_page_token == 'next_page_token_value' + assert response.unreachable == ['unreachable_value'] + + +@pytest.mark.asyncio +async def test_list_jobs_async_from_dict(): + await test_list_jobs_async(request_type=dict) + + +def test_list_jobs_field_headers(): + client = TranscoderServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = services.ListJobsRequest() + + request.parent = 'parent/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_jobs), + '__call__') as call: + call.return_value = services.ListJobsResponse() + client.list_jobs(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent/value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_list_jobs_field_headers_async(): + client = TranscoderServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = services.ListJobsRequest() + + request.parent = 'parent/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_jobs), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(services.ListJobsResponse()) + await client.list_jobs(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent/value', + ) in kw['metadata'] + + +def test_list_jobs_flattened(): + client = TranscoderServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_jobs), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = services.ListJobsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_jobs( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0].parent == 'parent_value' + + +def test_list_jobs_flattened_error(): + client = TranscoderServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_jobs( + services.ListJobsRequest(), + parent='parent_value', + ) + + +@pytest.mark.asyncio +async def test_list_jobs_flattened_async(): + client = TranscoderServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_jobs), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = services.ListJobsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(services.ListJobsResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_jobs( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0].parent == 'parent_value' + + +@pytest.mark.asyncio +async def test_list_jobs_flattened_error_async(): + client = TranscoderServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_jobs( + services.ListJobsRequest(), + parent='parent_value', + ) + + +def test_list_jobs_pager(): + client = TranscoderServiceClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_jobs), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + services.ListJobsResponse( + jobs=[ + resources.Job(), + resources.Job(), + resources.Job(), + ], + next_page_token='abc', + ), + services.ListJobsResponse( + jobs=[], + next_page_token='def', + ), + services.ListJobsResponse( + jobs=[ + resources.Job(), + ], + next_page_token='ghi', + ), + services.ListJobsResponse( + jobs=[ + resources.Job(), + resources.Job(), + ], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ('parent', ''), + )), + ) + pager = client.list_jobs(request={}) + + assert pager._metadata == metadata + + results = [i for i in pager] + assert len(results) == 6 + assert all(isinstance(i, resources.Job) + for i in results) + +def test_list_jobs_pages(): + client = TranscoderServiceClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_jobs), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + services.ListJobsResponse( + jobs=[ + resources.Job(), + resources.Job(), + resources.Job(), + ], + next_page_token='abc', + ), + services.ListJobsResponse( + jobs=[], + next_page_token='def', + ), + services.ListJobsResponse( + jobs=[ + resources.Job(), + ], + next_page_token='ghi', + ), + services.ListJobsResponse( + jobs=[ + resources.Job(), + resources.Job(), + ], + ), + RuntimeError, + ) + pages = list(client.list_jobs(request={}).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.asyncio +async def test_list_jobs_async_pager(): + client = TranscoderServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_jobs), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + services.ListJobsResponse( + jobs=[ + resources.Job(), + resources.Job(), + resources.Job(), + ], + next_page_token='abc', + ), + services.ListJobsResponse( + jobs=[], + next_page_token='def', + ), + services.ListJobsResponse( + jobs=[ + resources.Job(), + ], + next_page_token='ghi', + ), + services.ListJobsResponse( + jobs=[ + resources.Job(), + resources.Job(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_jobs(request={},) + assert async_pager.next_page_token == 'abc' + responses = [] + async for response in async_pager: + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, resources.Job) + for i in responses) + +@pytest.mark.asyncio +async def test_list_jobs_async_pages(): + client = TranscoderServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_jobs), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + services.ListJobsResponse( + jobs=[ + resources.Job(), + resources.Job(), + resources.Job(), + ], + next_page_token='abc', + ), + services.ListJobsResponse( + jobs=[], + next_page_token='def', + ), + services.ListJobsResponse( + jobs=[ + resources.Job(), + ], + next_page_token='ghi', + ), + services.ListJobsResponse( + jobs=[ + resources.Job(), + resources.Job(), + ], + ), + RuntimeError, + ) + pages = [] + async for page_ in (await client.list_jobs(request={})).pages: + pages.append(page_) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +def test_get_job(transport: str = 'grpc', request_type=services.GetJobRequest): + client = TranscoderServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_job), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = resources.Job( + name='name_value', + input_uri='input_uri_value', + output_uri='output_uri_value', + state=resources.Job.ProcessingState.PENDING, + ttl_after_completion_days=2670, + template_id='template_id_value', + ) + response = client.get_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == services.GetJobRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, resources.Job) + assert response.name == 'name_value' + assert response.input_uri == 'input_uri_value' + assert response.output_uri == 'output_uri_value' + assert response.state == resources.Job.ProcessingState.PENDING + assert response.ttl_after_completion_days == 2670 + + +def test_get_job_from_dict(): + test_get_job(request_type=dict) + + +def test_get_job_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = TranscoderServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_job), + '__call__') as call: + client.get_job() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == services.GetJobRequest() + + +@pytest.mark.asyncio +async def test_get_job_async(transport: str = 'grpc_asyncio', request_type=services.GetJobRequest): + client = TranscoderServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_job), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(resources.Job( + name='name_value', + input_uri='input_uri_value', + output_uri='output_uri_value', + state=resources.Job.ProcessingState.PENDING, + ttl_after_completion_days=2670, + )) + response = await client.get_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == services.GetJobRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, resources.Job) + assert response.name == 'name_value' + assert response.input_uri == 'input_uri_value' + assert response.output_uri == 'output_uri_value' + assert response.state == resources.Job.ProcessingState.PENDING + assert response.ttl_after_completion_days == 2670 + + +@pytest.mark.asyncio +async def test_get_job_async_from_dict(): + await test_get_job_async(request_type=dict) + + +def test_get_job_field_headers(): + client = TranscoderServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = services.GetJobRequest() + + request.name = 'name/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_job), + '__call__') as call: + call.return_value = resources.Job() + client.get_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name/value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_get_job_field_headers_async(): + client = TranscoderServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = services.GetJobRequest() + + request.name = 'name/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_job), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.Job()) + await client.get_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name/value', + ) in kw['metadata'] + + +def test_get_job_flattened(): + client = TranscoderServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_job), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = resources.Job() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_job( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0].name == 'name_value' + + +def test_get_job_flattened_error(): + client = TranscoderServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_job( + services.GetJobRequest(), + name='name_value', + ) + + +@pytest.mark.asyncio +async def test_get_job_flattened_async(): + client = TranscoderServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_job), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = resources.Job() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.Job()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_job( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0].name == 'name_value' + + +@pytest.mark.asyncio +async def test_get_job_flattened_error_async(): + client = TranscoderServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_job( + services.GetJobRequest(), + name='name_value', + ) + + +def test_delete_job(transport: str = 'grpc', request_type=services.DeleteJobRequest): + client = TranscoderServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_job), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.delete_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == services.DeleteJobRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_job_from_dict(): + test_delete_job(request_type=dict) + + +def test_delete_job_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = TranscoderServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_job), + '__call__') as call: + client.delete_job() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == services.DeleteJobRequest() + + +@pytest.mark.asyncio +async def test_delete_job_async(transport: str = 'grpc_asyncio', request_type=services.DeleteJobRequest): + client = TranscoderServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_job), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == services.DeleteJobRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_delete_job_async_from_dict(): + await test_delete_job_async(request_type=dict) + + +def test_delete_job_field_headers(): + client = TranscoderServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = services.DeleteJobRequest() + + request.name = 'name/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_job), + '__call__') as call: + call.return_value = None + client.delete_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name/value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_delete_job_field_headers_async(): + client = TranscoderServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = services.DeleteJobRequest() + + request.name = 'name/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_job), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name/value', + ) in kw['metadata'] + + +def test_delete_job_flattened(): + client = TranscoderServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_job), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_job( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0].name == 'name_value' + + +def test_delete_job_flattened_error(): + client = TranscoderServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_job( + services.DeleteJobRequest(), + name='name_value', + ) + + +@pytest.mark.asyncio +async def test_delete_job_flattened_async(): + client = TranscoderServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_job), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_job( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0].name == 'name_value' + + +@pytest.mark.asyncio +async def test_delete_job_flattened_error_async(): + client = TranscoderServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_job( + services.DeleteJobRequest(), + name='name_value', + ) + + +def test_create_job_template(transport: str = 'grpc', request_type=services.CreateJobTemplateRequest): + client = TranscoderServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_job_template), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = resources.JobTemplate( + name='name_value', + ) + response = client.create_job_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == services.CreateJobTemplateRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, resources.JobTemplate) + assert response.name == 'name_value' + + +def test_create_job_template_from_dict(): + test_create_job_template(request_type=dict) + + +def test_create_job_template_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = TranscoderServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_job_template), + '__call__') as call: + client.create_job_template() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == services.CreateJobTemplateRequest() + + +@pytest.mark.asyncio +async def test_create_job_template_async(transport: str = 'grpc_asyncio', request_type=services.CreateJobTemplateRequest): + client = TranscoderServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_job_template), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(resources.JobTemplate( + name='name_value', + )) + response = await client.create_job_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == services.CreateJobTemplateRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, resources.JobTemplate) + assert response.name == 'name_value' + + +@pytest.mark.asyncio +async def test_create_job_template_async_from_dict(): + await test_create_job_template_async(request_type=dict) + + +def test_create_job_template_field_headers(): + client = TranscoderServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = services.CreateJobTemplateRequest() + + request.parent = 'parent/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_job_template), + '__call__') as call: + call.return_value = resources.JobTemplate() + client.create_job_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent/value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_create_job_template_field_headers_async(): + client = TranscoderServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = services.CreateJobTemplateRequest() + + request.parent = 'parent/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_job_template), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.JobTemplate()) + await client.create_job_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent/value', + ) in kw['metadata'] + + +def test_create_job_template_flattened(): + client = TranscoderServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_job_template), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = resources.JobTemplate() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_job_template( + parent='parent_value', + job_template=resources.JobTemplate(name='name_value'), + job_template_id='job_template_id_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0].parent == 'parent_value' + assert args[0].job_template == resources.JobTemplate(name='name_value') + assert args[0].job_template_id == 'job_template_id_value' + + +def test_create_job_template_flattened_error(): + client = TranscoderServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_job_template( + services.CreateJobTemplateRequest(), + parent='parent_value', + job_template=resources.JobTemplate(name='name_value'), + job_template_id='job_template_id_value', + ) + + +@pytest.mark.asyncio +async def test_create_job_template_flattened_async(): + client = TranscoderServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_job_template), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = resources.JobTemplate() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.JobTemplate()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_job_template( + parent='parent_value', + job_template=resources.JobTemplate(name='name_value'), + job_template_id='job_template_id_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0].parent == 'parent_value' + assert args[0].job_template == resources.JobTemplate(name='name_value') + assert args[0].job_template_id == 'job_template_id_value' + + +@pytest.mark.asyncio +async def test_create_job_template_flattened_error_async(): + client = TranscoderServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_job_template( + services.CreateJobTemplateRequest(), + parent='parent_value', + job_template=resources.JobTemplate(name='name_value'), + job_template_id='job_template_id_value', + ) + + +def test_list_job_templates(transport: str = 'grpc', request_type=services.ListJobTemplatesRequest): + client = TranscoderServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_job_templates), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = services.ListJobTemplatesResponse( + next_page_token='next_page_token_value', + unreachable=['unreachable_value'], + ) + response = client.list_job_templates(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == services.ListJobTemplatesRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListJobTemplatesPager) + assert response.next_page_token == 'next_page_token_value' + assert response.unreachable == ['unreachable_value'] + + +def test_list_job_templates_from_dict(): + test_list_job_templates(request_type=dict) + + +def test_list_job_templates_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = TranscoderServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_job_templates), + '__call__') as call: + client.list_job_templates() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == services.ListJobTemplatesRequest() + + +@pytest.mark.asyncio +async def test_list_job_templates_async(transport: str = 'grpc_asyncio', request_type=services.ListJobTemplatesRequest): + client = TranscoderServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_job_templates), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(services.ListJobTemplatesResponse( + next_page_token='next_page_token_value', + unreachable=['unreachable_value'], + )) + response = await client.list_job_templates(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == services.ListJobTemplatesRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListJobTemplatesAsyncPager) + assert response.next_page_token == 'next_page_token_value' + assert response.unreachable == ['unreachable_value'] + + +@pytest.mark.asyncio +async def test_list_job_templates_async_from_dict(): + await test_list_job_templates_async(request_type=dict) + + +def test_list_job_templates_field_headers(): + client = TranscoderServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = services.ListJobTemplatesRequest() + + request.parent = 'parent/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_job_templates), + '__call__') as call: + call.return_value = services.ListJobTemplatesResponse() + client.list_job_templates(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent/value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_list_job_templates_field_headers_async(): + client = TranscoderServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = services.ListJobTemplatesRequest() + + request.parent = 'parent/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_job_templates), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(services.ListJobTemplatesResponse()) + await client.list_job_templates(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent/value', + ) in kw['metadata'] + + +def test_list_job_templates_flattened(): + client = TranscoderServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_job_templates), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = services.ListJobTemplatesResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_job_templates( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0].parent == 'parent_value' + + +def test_list_job_templates_flattened_error(): + client = TranscoderServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_job_templates( + services.ListJobTemplatesRequest(), + parent='parent_value', + ) + + +@pytest.mark.asyncio +async def test_list_job_templates_flattened_async(): + client = TranscoderServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_job_templates), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = services.ListJobTemplatesResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(services.ListJobTemplatesResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_job_templates( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0].parent == 'parent_value' + + +@pytest.mark.asyncio +async def test_list_job_templates_flattened_error_async(): + client = TranscoderServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_job_templates( + services.ListJobTemplatesRequest(), + parent='parent_value', + ) + + +def test_list_job_templates_pager(): + client = TranscoderServiceClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_job_templates), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + services.ListJobTemplatesResponse( + job_templates=[ + resources.JobTemplate(), + resources.JobTemplate(), + resources.JobTemplate(), + ], + next_page_token='abc', + ), + services.ListJobTemplatesResponse( + job_templates=[], + next_page_token='def', + ), + services.ListJobTemplatesResponse( + job_templates=[ + resources.JobTemplate(), + ], + next_page_token='ghi', + ), + services.ListJobTemplatesResponse( + job_templates=[ + resources.JobTemplate(), + resources.JobTemplate(), + ], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ('parent', ''), + )), + ) + pager = client.list_job_templates(request={}) + + assert pager._metadata == metadata + + results = [i for i in pager] + assert len(results) == 6 + assert all(isinstance(i, resources.JobTemplate) + for i in results) + +def test_list_job_templates_pages(): + client = TranscoderServiceClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_job_templates), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + services.ListJobTemplatesResponse( + job_templates=[ + resources.JobTemplate(), + resources.JobTemplate(), + resources.JobTemplate(), + ], + next_page_token='abc', + ), + services.ListJobTemplatesResponse( + job_templates=[], + next_page_token='def', + ), + services.ListJobTemplatesResponse( + job_templates=[ + resources.JobTemplate(), + ], + next_page_token='ghi', + ), + services.ListJobTemplatesResponse( + job_templates=[ + resources.JobTemplate(), + resources.JobTemplate(), + ], + ), + RuntimeError, + ) + pages = list(client.list_job_templates(request={}).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.asyncio +async def test_list_job_templates_async_pager(): + client = TranscoderServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_job_templates), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + services.ListJobTemplatesResponse( + job_templates=[ + resources.JobTemplate(), + resources.JobTemplate(), + resources.JobTemplate(), + ], + next_page_token='abc', + ), + services.ListJobTemplatesResponse( + job_templates=[], + next_page_token='def', + ), + services.ListJobTemplatesResponse( + job_templates=[ + resources.JobTemplate(), + ], + next_page_token='ghi', + ), + services.ListJobTemplatesResponse( + job_templates=[ + resources.JobTemplate(), + resources.JobTemplate(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_job_templates(request={},) + assert async_pager.next_page_token == 'abc' + responses = [] + async for response in async_pager: + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, resources.JobTemplate) + for i in responses) + +@pytest.mark.asyncio +async def test_list_job_templates_async_pages(): + client = TranscoderServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_job_templates), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + services.ListJobTemplatesResponse( + job_templates=[ + resources.JobTemplate(), + resources.JobTemplate(), + resources.JobTemplate(), + ], + next_page_token='abc', + ), + services.ListJobTemplatesResponse( + job_templates=[], + next_page_token='def', + ), + services.ListJobTemplatesResponse( + job_templates=[ + resources.JobTemplate(), + ], + next_page_token='ghi', + ), + services.ListJobTemplatesResponse( + job_templates=[ + resources.JobTemplate(), + resources.JobTemplate(), + ], + ), + RuntimeError, + ) + pages = [] + async for page_ in (await client.list_job_templates(request={})).pages: + pages.append(page_) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +def test_get_job_template(transport: str = 'grpc', request_type=services.GetJobTemplateRequest): + client = TranscoderServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_job_template), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = resources.JobTemplate( + name='name_value', + ) + response = client.get_job_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == services.GetJobTemplateRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, resources.JobTemplate) + assert response.name == 'name_value' + + +def test_get_job_template_from_dict(): + test_get_job_template(request_type=dict) + + +def test_get_job_template_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = TranscoderServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_job_template), + '__call__') as call: + client.get_job_template() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == services.GetJobTemplateRequest() + + +@pytest.mark.asyncio +async def test_get_job_template_async(transport: str = 'grpc_asyncio', request_type=services.GetJobTemplateRequest): + client = TranscoderServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_job_template), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(resources.JobTemplate( + name='name_value', + )) + response = await client.get_job_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == services.GetJobTemplateRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, resources.JobTemplate) + assert response.name == 'name_value' + + +@pytest.mark.asyncio +async def test_get_job_template_async_from_dict(): + await test_get_job_template_async(request_type=dict) + + +def test_get_job_template_field_headers(): + client = TranscoderServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = services.GetJobTemplateRequest() + + request.name = 'name/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_job_template), + '__call__') as call: + call.return_value = resources.JobTemplate() + client.get_job_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name/value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_get_job_template_field_headers_async(): + client = TranscoderServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = services.GetJobTemplateRequest() + + request.name = 'name/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_job_template), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.JobTemplate()) + await client.get_job_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name/value', + ) in kw['metadata'] + + +def test_get_job_template_flattened(): + client = TranscoderServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_job_template), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = resources.JobTemplate() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_job_template( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0].name == 'name_value' + + +def test_get_job_template_flattened_error(): + client = TranscoderServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_job_template( + services.GetJobTemplateRequest(), + name='name_value', + ) + + +@pytest.mark.asyncio +async def test_get_job_template_flattened_async(): + client = TranscoderServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_job_template), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = resources.JobTemplate() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.JobTemplate()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_job_template( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0].name == 'name_value' + + +@pytest.mark.asyncio +async def test_get_job_template_flattened_error_async(): + client = TranscoderServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_job_template( + services.GetJobTemplateRequest(), + name='name_value', + ) + + +def test_delete_job_template(transport: str = 'grpc', request_type=services.DeleteJobTemplateRequest): + client = TranscoderServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_job_template), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.delete_job_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == services.DeleteJobTemplateRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_job_template_from_dict(): + test_delete_job_template(request_type=dict) + + +def test_delete_job_template_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = TranscoderServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_job_template), + '__call__') as call: + client.delete_job_template() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == services.DeleteJobTemplateRequest() + + +@pytest.mark.asyncio +async def test_delete_job_template_async(transport: str = 'grpc_asyncio', request_type=services.DeleteJobTemplateRequest): + client = TranscoderServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_job_template), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_job_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == services.DeleteJobTemplateRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_delete_job_template_async_from_dict(): + await test_delete_job_template_async(request_type=dict) + + +def test_delete_job_template_field_headers(): + client = TranscoderServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = services.DeleteJobTemplateRequest() + + request.name = 'name/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_job_template), + '__call__') as call: + call.return_value = None + client.delete_job_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name/value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_delete_job_template_field_headers_async(): + client = TranscoderServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = services.DeleteJobTemplateRequest() + + request.name = 'name/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_job_template), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_job_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name/value', + ) in kw['metadata'] + + +def test_delete_job_template_flattened(): + client = TranscoderServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_job_template), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_job_template( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0].name == 'name_value' + + +def test_delete_job_template_flattened_error(): + client = TranscoderServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_job_template( + services.DeleteJobTemplateRequest(), + name='name_value', + ) + + +@pytest.mark.asyncio +async def test_delete_job_template_flattened_async(): + client = TranscoderServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_job_template), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_job_template( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0].name == 'name_value' + + +@pytest.mark.asyncio +async def test_delete_job_template_flattened_error_async(): + client = TranscoderServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_job_template( + services.DeleteJobTemplateRequest(), + name='name_value', + ) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.TranscoderServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = TranscoderServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.TranscoderServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = TranscoderServiceClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.TranscoderServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = TranscoderServiceClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.TranscoderServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = TranscoderServiceClient(transport=transport) + assert client.transport is transport + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.TranscoderServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.TranscoderServiceGrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + +@pytest.mark.parametrize("transport_class", [ + transports.TranscoderServiceGrpcTransport, + transports.TranscoderServiceGrpcAsyncIOTransport, +]) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, 'default') as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = TranscoderServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.TranscoderServiceGrpcTransport, + ) + +def test_transcoder_service_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.TranscoderServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json" + ) + + +def test_transcoder_service_base_transport(): + # Instantiate the base transport. + with mock.patch('google.cloud.video.transcoder_v1.services.transcoder_service.transports.TranscoderServiceTransport.__init__') as Transport: + Transport.return_value = None + transport = transports.TranscoderServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + 'create_job', + 'list_jobs', + 'get_job', + 'delete_job', + 'create_job_template', + 'list_job_templates', + 'get_job_template', + 'delete_job_template', + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + +@requires_google_auth_gte_1_25_0 +def test_transcoder_service_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.video.transcoder_v1.services.transcoder_service.transports.TranscoderServiceTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.TranscoderServiceTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with("credentials.json", + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', +), + quota_project_id="octopus", + ) + + +@requires_google_auth_lt_1_25_0 +def test_transcoder_service_base_transport_with_credentials_file_old_google_auth(): + # Instantiate the base transport with a credentials file + with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.video.transcoder_v1.services.transcoder_service.transports.TranscoderServiceTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.TranscoderServiceTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with("credentials.json", scopes=( + 'https://www.googleapis.com/auth/cloud-platform', + ), + quota_project_id="octopus", + ) + + +def test_transcoder_service_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.video.transcoder_v1.services.transcoder_service.transports.TranscoderServiceTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.TranscoderServiceTransport() + adc.assert_called_once() + + +@requires_google_auth_gte_1_25_0 +def test_transcoder_service_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + TranscoderServiceClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', +), + quota_project_id=None, + ) + + +@requires_google_auth_lt_1_25_0 +def test_transcoder_service_auth_adc_old_google_auth(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + TranscoderServiceClient() + adc.assert_called_once_with( + scopes=( 'https://www.googleapis.com/auth/cloud-platform',), + quota_project_id=None, + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.TranscoderServiceGrpcTransport, + transports.TranscoderServiceGrpcAsyncIOTransport, + ], +) +@requires_google_auth_gte_1_25_0 +def test_transcoder_service_transport_auth_adc(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=( 'https://www.googleapis.com/auth/cloud-platform',), + quota_project_id="octopus", + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.TranscoderServiceGrpcTransport, + transports.TranscoderServiceGrpcAsyncIOTransport, + ], +) +@requires_google_auth_lt_1_25_0 +def test_transcoder_service_transport_auth_adc_old_google_auth(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus") + adc.assert_called_once_with(scopes=( + 'https://www.googleapis.com/auth/cloud-platform', +), + quota_project_id="octopus", + ) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.TranscoderServiceGrpcTransport, grpc_helpers), + (transports.TranscoderServiceGrpcAsyncIOTransport, grpc_helpers_async) + ], +) +def test_transcoder_service_transport_create_channel(transport_class, grpc_helpers): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class( + quota_project_id="octopus", + scopes=["1", "2"] + ) + + create_channel.assert_called_with( + "transcoder.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', +), + scopes=["1", "2"], + default_host="transcoder.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize("transport_class", [transports.TranscoderServiceGrpcTransport, transports.TranscoderServiceGrpcAsyncIOTransport]) +def test_transcoder_service_grpc_transport_client_cert_source_for_mtls( + transport_class +): + cred = ga_credentials.AnonymousCredentials() + + # Check ssl_channel_credentials is used if provided. + with mock.patch.object(transport_class, "create_channel") as mock_create_channel: + mock_ssl_channel_creds = mock.Mock() + transport_class( + host="squid.clam.whelk", + credentials=cred, + ssl_channel_credentials=mock_ssl_channel_creds + ) + mock_create_channel.assert_called_once_with( + "squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_channel_creds, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls + # is used. + with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): + with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: + transport_class( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback + ) + expected_cert, expected_key = client_cert_source_callback() + mock_ssl_cred.assert_called_once_with( + certificate_chain=expected_cert, + private_key=expected_key + ) + + +def test_transcoder_service_host_no_port(): + client = TranscoderServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='transcoder.googleapis.com'), + ) + assert client.transport._host == 'transcoder.googleapis.com:443' + + +def test_transcoder_service_host_with_port(): + client = TranscoderServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='transcoder.googleapis.com:8000'), + ) + assert client.transport._host == 'transcoder.googleapis.com:8000' + +def test_transcoder_service_grpc_transport_channel(): + channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.TranscoderServiceGrpcTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +def test_transcoder_service_grpc_asyncio_transport_channel(): + channel = aio.secure_channel('http://localhost/', grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.TranscoderServiceGrpcAsyncIOTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize("transport_class", [transports.TranscoderServiceGrpcTransport, transports.TranscoderServiceGrpcAsyncIOTransport]) +def test_transcoder_service_transport_channel_mtls_with_client_cert_source( + transport_class +): + with mock.patch("grpc.ssl_channel_credentials", autospec=True) as grpc_ssl_channel_cred: + with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + cred = ga_credentials.AnonymousCredentials() + with pytest.warns(DeprecationWarning): + with mock.patch.object(google.auth, 'default') as adc: + adc.return_value = (cred, None) + transport = transport_class( + host="squid.clam.whelk", + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + adc.assert_called_once() + + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + assert transport._ssl_channel_credentials == mock_ssl_cred + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize("transport_class", [transports.TranscoderServiceGrpcTransport, transports.TranscoderServiceGrpcAsyncIOTransport]) +def test_transcoder_service_transport_channel_mtls_with_adc( + transport_class +): + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + mock_cred = mock.Mock() + + with pytest.warns(DeprecationWarning): + transport = transport_class( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=None, + ) + + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + + +def test_job_path(): + project = "squid" + location = "clam" + job = "whelk" + expected = "projects/{project}/locations/{location}/jobs/{job}".format(project=project, location=location, job=job, ) + actual = TranscoderServiceClient.job_path(project, location, job) + assert expected == actual + + +def test_parse_job_path(): + expected = { + "project": "octopus", + "location": "oyster", + "job": "nudibranch", + } + path = TranscoderServiceClient.job_path(**expected) + + # Check that the path construction is reversible. + actual = TranscoderServiceClient.parse_job_path(path) + assert expected == actual + +def test_job_template_path(): + project = "cuttlefish" + location = "mussel" + job_template = "winkle" + expected = "projects/{project}/locations/{location}/jobTemplates/{job_template}".format(project=project, location=location, job_template=job_template, ) + actual = TranscoderServiceClient.job_template_path(project, location, job_template) + assert expected == actual + + +def test_parse_job_template_path(): + expected = { + "project": "nautilus", + "location": "scallop", + "job_template": "abalone", + } + path = TranscoderServiceClient.job_template_path(**expected) + + # Check that the path construction is reversible. + actual = TranscoderServiceClient.parse_job_template_path(path) + assert expected == actual + +def test_common_billing_account_path(): + billing_account = "squid" + expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + actual = TranscoderServiceClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "clam", + } + path = TranscoderServiceClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = TranscoderServiceClient.parse_common_billing_account_path(path) + assert expected == actual + +def test_common_folder_path(): + folder = "whelk" + expected = "folders/{folder}".format(folder=folder, ) + actual = TranscoderServiceClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "octopus", + } + path = TranscoderServiceClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = TranscoderServiceClient.parse_common_folder_path(path) + assert expected == actual + +def test_common_organization_path(): + organization = "oyster" + expected = "organizations/{organization}".format(organization=organization, ) + actual = TranscoderServiceClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "nudibranch", + } + path = TranscoderServiceClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = TranscoderServiceClient.parse_common_organization_path(path) + assert expected == actual + +def test_common_project_path(): + project = "cuttlefish" + expected = "projects/{project}".format(project=project, ) + actual = TranscoderServiceClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "mussel", + } + path = TranscoderServiceClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = TranscoderServiceClient.parse_common_project_path(path) + assert expected == actual + +def test_common_location_path(): + project = "winkle" + location = "nautilus" + expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) + actual = TranscoderServiceClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "scallop", + "location": "abalone", + } + path = TranscoderServiceClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = TranscoderServiceClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_withDEFAULT_CLIENT_INFO(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object(transports.TranscoderServiceTransport, '_prep_wrapped_messages') as prep: + client = TranscoderServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object(transports.TranscoderServiceTransport, '_prep_wrapped_messages') as prep: + transport_class = TranscoderServiceClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) diff --git a/owl-bot-staging/v1beta1/.coveragerc b/owl-bot-staging/v1beta1/.coveragerc new file mode 100644 index 0000000..6926d19 --- /dev/null +++ b/owl-bot-staging/v1beta1/.coveragerc @@ -0,0 +1,17 @@ +[run] +branch = True + +[report] +show_missing = True +omit = + google/cloud/video/transcoder/__init__.py +exclude_lines = + # Re-enable the standard pragma + pragma: NO COVER + # Ignore debug-only repr + def __repr__ + # Ignore pkg_resources exceptions. + # This is added at the module level as a safeguard for if someone + # generates the code and tries to run it without pip installing. This + # makes it virtually impossible to test properly. + except pkg_resources.DistributionNotFound diff --git a/owl-bot-staging/v1beta1/MANIFEST.in b/owl-bot-staging/v1beta1/MANIFEST.in new file mode 100644 index 0000000..64bd549 --- /dev/null +++ b/owl-bot-staging/v1beta1/MANIFEST.in @@ -0,0 +1,2 @@ +recursive-include google/cloud/video/transcoder *.py +recursive-include google/cloud/video/transcoder_v1beta1 *.py diff --git a/owl-bot-staging/v1beta1/README.rst b/owl-bot-staging/v1beta1/README.rst new file mode 100644 index 0000000..43621a1 --- /dev/null +++ b/owl-bot-staging/v1beta1/README.rst @@ -0,0 +1,49 @@ +Python Client for Google Cloud Video Transcoder API +================================================= + +Quick Start +----------- + +In order to use this library, you first need to go through the following steps: + +1. `Select or create a Cloud Platform project.`_ +2. `Enable billing for your project.`_ +3. Enable the Google Cloud Video Transcoder API. +4. `Setup Authentication.`_ + +.. _Select or create a Cloud Platform project.: https://console.cloud.google.com/project +.. _Enable billing for your project.: https://cloud.google.com/billing/docs/how-to/modify-project#enable_billing_for_a_project +.. _Setup Authentication.: https://googleapis.dev/python/google-api-core/latest/auth.html + +Installation +~~~~~~~~~~~~ + +Install this library in a `virtualenv`_ using pip. `virtualenv`_ is a tool to +create isolated Python environments. The basic problem it addresses is one of +dependencies and versions, and indirectly permissions. + +With `virtualenv`_, it's possible to install this library without needing system +install permissions, and without clashing with the installed system +dependencies. + +.. _`virtualenv`: https://virtualenv.pypa.io/en/latest/ + + +Mac/Linux +^^^^^^^^^ + +.. code-block:: console + + python3 -m venv + source /bin/activate + /bin/pip install /path/to/library + + +Windows +^^^^^^^ + +.. code-block:: console + + python3 -m venv + \Scripts\activate + \Scripts\pip.exe install \path\to\library diff --git a/owl-bot-staging/v1beta1/docs/conf.py b/owl-bot-staging/v1beta1/docs/conf.py new file mode 100644 index 0000000..d739f6d --- /dev/null +++ b/owl-bot-staging/v1beta1/docs/conf.py @@ -0,0 +1,376 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# +# google-cloud-video-transcoder documentation build configuration file +# +# This file is execfile()d with the current directory set to its +# containing dir. +# +# Note that not all possible configuration values are present in this +# autogenerated file. +# +# All configuration values have a default; values that are commented out +# serve to show the default. + +import sys +import os +import shlex + +# If extensions (or modules to document with autodoc) are in another directory, +# add these directories to sys.path here. If the directory is relative to the +# documentation root, use os.path.abspath to make it absolute, like shown here. +sys.path.insert(0, os.path.abspath("..")) + +__version__ = "0.1.0" + +# -- General configuration ------------------------------------------------ + +# If your documentation needs a minimal Sphinx version, state it here. +needs_sphinx = "1.6.3" + +# Add any Sphinx extension module names here, as strings. They can be +# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom +# ones. +extensions = [ + "sphinx.ext.autodoc", + "sphinx.ext.autosummary", + "sphinx.ext.intersphinx", + "sphinx.ext.coverage", + "sphinx.ext.napoleon", + "sphinx.ext.todo", + "sphinx.ext.viewcode", +] + +# autodoc/autosummary flags +autoclass_content = "both" +autodoc_default_flags = ["members"] +autosummary_generate = True + + +# Add any paths that contain templates here, relative to this directory. +templates_path = ["_templates"] + +# Allow markdown includes (so releases.md can include CHANGLEOG.md) +# http://www.sphinx-doc.org/en/master/markdown.html +source_parsers = {".md": "recommonmark.parser.CommonMarkParser"} + +# The suffix(es) of source filenames. +# You can specify multiple suffix as a list of string: +source_suffix = [".rst", ".md"] + +# The encoding of source files. +# source_encoding = 'utf-8-sig' + +# The master toctree document. +master_doc = "index" + +# General information about the project. +project = u"google-cloud-video-transcoder" +copyright = u"2020, Google, LLC" +author = u"Google APIs" # TODO: autogenerate this bit + +# The version info for the project you're documenting, acts as replacement for +# |version| and |release|, also used in various other places throughout the +# built documents. +# +# The full version, including alpha/beta/rc tags. +release = __version__ +# The short X.Y version. +version = ".".join(release.split(".")[0:2]) + +# The language for content autogenerated by Sphinx. Refer to documentation +# for a list of supported languages. +# +# This is also used if you do content translation via gettext catalogs. +# Usually you set "language" from the command line for these cases. +language = None + +# There are two options for replacing |today|: either, you set today to some +# non-false value, then it is used: +# today = '' +# Else, today_fmt is used as the format for a strftime call. +# today_fmt = '%B %d, %Y' + +# List of patterns, relative to source directory, that match files and +# directories to ignore when looking for source files. +exclude_patterns = ["_build"] + +# The reST default role (used for this markup: `text`) to use for all +# documents. +# default_role = None + +# If true, '()' will be appended to :func: etc. cross-reference text. +# add_function_parentheses = True + +# If true, the current module name will be prepended to all description +# unit titles (such as .. function::). +# add_module_names = True + +# If true, sectionauthor and moduleauthor directives will be shown in the +# output. They are ignored by default. +# show_authors = False + +# The name of the Pygments (syntax highlighting) style to use. +pygments_style = "sphinx" + +# A list of ignored prefixes for module index sorting. +# modindex_common_prefix = [] + +# If true, keep warnings as "system message" paragraphs in the built documents. +# keep_warnings = False + +# If true, `todo` and `todoList` produce output, else they produce nothing. +todo_include_todos = True + + +# -- Options for HTML output ---------------------------------------------- + +# The theme to use for HTML and HTML Help pages. See the documentation for +# a list of builtin themes. +html_theme = "alabaster" + +# Theme options are theme-specific and customize the look and feel of a theme +# further. For a list of options available for each theme, see the +# documentation. +html_theme_options = { + "description": "Google Cloud Video Client Libraries for Python", + "github_user": "googleapis", + "github_repo": "google-cloud-python", + "github_banner": True, + "font_family": "'Roboto', Georgia, sans", + "head_font_family": "'Roboto', Georgia, serif", + "code_font_family": "'Roboto Mono', 'Consolas', monospace", +} + +# Add any paths that contain custom themes here, relative to this directory. +# html_theme_path = [] + +# The name for this set of Sphinx documents. If None, it defaults to +# " v documentation". +# html_title = None + +# A shorter title for the navigation bar. Default is the same as html_title. +# html_short_title = None + +# The name of an image file (relative to this directory) to place at the top +# of the sidebar. +# html_logo = None + +# The name of an image file (within the static path) to use as favicon of the +# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 +# pixels large. +# html_favicon = None + +# Add any paths that contain custom static files (such as style sheets) here, +# relative to this directory. They are copied after the builtin static files, +# so a file named "default.css" will overwrite the builtin "default.css". +html_static_path = ["_static"] + +# Add any extra paths that contain custom files (such as robots.txt or +# .htaccess) here, relative to this directory. These files are copied +# directly to the root of the documentation. +# html_extra_path = [] + +# If not '', a 'Last updated on:' timestamp is inserted at every page bottom, +# using the given strftime format. +# html_last_updated_fmt = '%b %d, %Y' + +# If true, SmartyPants will be used to convert quotes and dashes to +# typographically correct entities. +# html_use_smartypants = True + +# Custom sidebar templates, maps document names to template names. +# html_sidebars = {} + +# Additional templates that should be rendered to pages, maps page names to +# template names. +# html_additional_pages = {} + +# If false, no module index is generated. +# html_domain_indices = True + +# If false, no index is generated. +# html_use_index = True + +# If true, the index is split into individual pages for each letter. +# html_split_index = False + +# If true, links to the reST sources are added to the pages. +# html_show_sourcelink = True + +# If true, "Created using Sphinx" is shown in the HTML footer. Default is True. +# html_show_sphinx = True + +# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. +# html_show_copyright = True + +# If true, an OpenSearch description file will be output, and all pages will +# contain a tag referring to it. The value of this option must be the +# base URL from which the finished HTML is served. +# html_use_opensearch = '' + +# This is the file name suffix for HTML files (e.g. ".xhtml"). +# html_file_suffix = None + +# Language to be used for generating the HTML full-text search index. +# Sphinx supports the following languages: +# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja' +# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr' +# html_search_language = 'en' + +# A dictionary with options for the search language support, empty by default. +# Now only 'ja' uses this config value +# html_search_options = {'type': 'default'} + +# The name of a javascript file (relative to the configuration directory) that +# implements a search results scorer. If empty, the default will be used. +# html_search_scorer = 'scorer.js' + +# Output file base name for HTML help builder. +htmlhelp_basename = "google-cloud-video-transcoder-doc" + +# -- Options for warnings ------------------------------------------------------ + + +suppress_warnings = [ + # Temporarily suppress this to avoid "more than one target found for + # cross-reference" warning, which are intractable for us to avoid while in + # a mono-repo. + # See https://github.com/sphinx-doc/sphinx/blob + # /2a65ffeef5c107c19084fabdd706cdff3f52d93c/sphinx/domains/python.py#L843 + "ref.python" +] + +# -- Options for LaTeX output --------------------------------------------- + +latex_elements = { + # The paper size ('letterpaper' or 'a4paper'). + # 'papersize': 'letterpaper', + # The font size ('10pt', '11pt' or '12pt'). + # 'pointsize': '10pt', + # Additional stuff for the LaTeX preamble. + # 'preamble': '', + # Latex figure (float) alignment + # 'figure_align': 'htbp', +} + +# Grouping the document tree into LaTeX files. List of tuples +# (source start file, target name, title, +# author, documentclass [howto, manual, or own class]). +latex_documents = [ + ( + master_doc, + "google-cloud-video-transcoder.tex", + u"google-cloud-video-transcoder Documentation", + author, + "manual", + ) +] + +# The name of an image file (relative to this directory) to place at the top of +# the title page. +# latex_logo = None + +# For "manual" documents, if this is true, then toplevel headings are parts, +# not chapters. +# latex_use_parts = False + +# If true, show page references after internal links. +# latex_show_pagerefs = False + +# If true, show URL addresses after external links. +# latex_show_urls = False + +# Documents to append as an appendix to all manuals. +# latex_appendices = [] + +# If false, no module index is generated. +# latex_domain_indices = True + + +# -- Options for manual page output --------------------------------------- + +# One entry per manual page. List of tuples +# (source start file, name, description, authors, manual section). +man_pages = [ + ( + master_doc, + "google-cloud-video-transcoder", + u"Google Cloud Video Transcoder Documentation", + [author], + 1, + ) +] + +# If true, show URL addresses after external links. +# man_show_urls = False + + +# -- Options for Texinfo output ------------------------------------------- + +# Grouping the document tree into Texinfo files. List of tuples +# (source start file, target name, title, author, +# dir menu entry, description, category) +texinfo_documents = [ + ( + master_doc, + "google-cloud-video-transcoder", + u"google-cloud-video-transcoder Documentation", + author, + "google-cloud-video-transcoder", + "GAPIC library for Google Cloud Video Transcoder API", + "APIs", + ) +] + +# Documents to append as an appendix to all manuals. +# texinfo_appendices = [] + +# If false, no module index is generated. +# texinfo_domain_indices = True + +# How to display URL addresses: 'footnote', 'no', or 'inline'. +# texinfo_show_urls = 'footnote' + +# If true, do not generate a @detailmenu in the "Top" node's menu. +# texinfo_no_detailmenu = False + + +# Example configuration for intersphinx: refer to the Python standard library. +intersphinx_mapping = { + "python": ("http://python.readthedocs.org/en/latest/", None), + "gax": ("https://gax-python.readthedocs.org/en/latest/", None), + "google-auth": ("https://google-auth.readthedocs.io/en/stable", None), + "google-gax": ("https://gax-python.readthedocs.io/en/latest/", None), + "google.api_core": ("https://googleapis.dev/python/google-api-core/latest/", None), + "grpc": ("https://grpc.io/grpc/python/", None), + "requests": ("http://requests.kennethreitz.org/en/stable/", None), + "proto": ("https://proto-plus-python.readthedocs.io/en/stable", None), + "protobuf": ("https://googleapis.dev/python/protobuf/latest/", None), +} + + +# Napoleon settings +napoleon_google_docstring = True +napoleon_numpy_docstring = True +napoleon_include_private_with_doc = False +napoleon_include_special_with_doc = True +napoleon_use_admonition_for_examples = False +napoleon_use_admonition_for_notes = False +napoleon_use_admonition_for_references = False +napoleon_use_ivar = False +napoleon_use_param = True +napoleon_use_rtype = True diff --git a/owl-bot-staging/v1beta1/docs/index.rst b/owl-bot-staging/v1beta1/docs/index.rst new file mode 100644 index 0000000..b5ddfd3 --- /dev/null +++ b/owl-bot-staging/v1beta1/docs/index.rst @@ -0,0 +1,7 @@ +API Reference +------------- +.. toctree:: + :maxdepth: 2 + + transcoder_v1beta1/services + transcoder_v1beta1/types diff --git a/owl-bot-staging/v1beta1/docs/transcoder_v1beta1/services.rst b/owl-bot-staging/v1beta1/docs/transcoder_v1beta1/services.rst new file mode 100644 index 0000000..a3b6569 --- /dev/null +++ b/owl-bot-staging/v1beta1/docs/transcoder_v1beta1/services.rst @@ -0,0 +1,6 @@ +Services for Google Cloud Video Transcoder v1beta1 API +====================================================== +.. toctree:: + :maxdepth: 2 + + transcoder_service diff --git a/owl-bot-staging/v1beta1/docs/transcoder_v1beta1/transcoder_service.rst b/owl-bot-staging/v1beta1/docs/transcoder_v1beta1/transcoder_service.rst new file mode 100644 index 0000000..c631a53 --- /dev/null +++ b/owl-bot-staging/v1beta1/docs/transcoder_v1beta1/transcoder_service.rst @@ -0,0 +1,10 @@ +TranscoderService +----------------------------------- + +.. automodule:: google.cloud.video.transcoder_v1beta1.services.transcoder_service + :members: + :inherited-members: + +.. automodule:: google.cloud.video.transcoder_v1beta1.services.transcoder_service.pagers + :members: + :inherited-members: diff --git a/owl-bot-staging/v1beta1/docs/transcoder_v1beta1/types.rst b/owl-bot-staging/v1beta1/docs/transcoder_v1beta1/types.rst new file mode 100644 index 0000000..cb38b8a --- /dev/null +++ b/owl-bot-staging/v1beta1/docs/transcoder_v1beta1/types.rst @@ -0,0 +1,7 @@ +Types for Google Cloud Video Transcoder v1beta1 API +=================================================== + +.. automodule:: google.cloud.video.transcoder_v1beta1.types + :members: + :undoc-members: + :show-inheritance: diff --git a/owl-bot-staging/v1beta1/google/cloud/video/transcoder/__init__.py b/owl-bot-staging/v1beta1/google/cloud/video/transcoder/__init__.py new file mode 100644 index 0000000..6ceb79f --- /dev/null +++ b/owl-bot-staging/v1beta1/google/cloud/video/transcoder/__init__.py @@ -0,0 +1,85 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from google.cloud.video.transcoder_v1beta1.services.transcoder_service.client import TranscoderServiceClient +from google.cloud.video.transcoder_v1beta1.services.transcoder_service.async_client import TranscoderServiceAsyncClient + +from google.cloud.video.transcoder_v1beta1.types.resources import AdBreak +from google.cloud.video.transcoder_v1beta1.types.resources import AudioStream +from google.cloud.video.transcoder_v1beta1.types.resources import EditAtom +from google.cloud.video.transcoder_v1beta1.types.resources import ElementaryStream +from google.cloud.video.transcoder_v1beta1.types.resources import Encryption +from google.cloud.video.transcoder_v1beta1.types.resources import FailureDetail +from google.cloud.video.transcoder_v1beta1.types.resources import Input +from google.cloud.video.transcoder_v1beta1.types.resources import Job +from google.cloud.video.transcoder_v1beta1.types.resources import JobConfig +from google.cloud.video.transcoder_v1beta1.types.resources import JobTemplate +from google.cloud.video.transcoder_v1beta1.types.resources import Manifest +from google.cloud.video.transcoder_v1beta1.types.resources import MuxStream +from google.cloud.video.transcoder_v1beta1.types.resources import Output +from google.cloud.video.transcoder_v1beta1.types.resources import Overlay +from google.cloud.video.transcoder_v1beta1.types.resources import PreprocessingConfig +from google.cloud.video.transcoder_v1beta1.types.resources import Progress +from google.cloud.video.transcoder_v1beta1.types.resources import PubsubDestination +from google.cloud.video.transcoder_v1beta1.types.resources import SegmentSettings +from google.cloud.video.transcoder_v1beta1.types.resources import SpriteSheet +from google.cloud.video.transcoder_v1beta1.types.resources import TextStream +from google.cloud.video.transcoder_v1beta1.types.resources import VideoStream +from google.cloud.video.transcoder_v1beta1.types.services import CreateJobRequest +from google.cloud.video.transcoder_v1beta1.types.services import CreateJobTemplateRequest +from google.cloud.video.transcoder_v1beta1.types.services import DeleteJobRequest +from google.cloud.video.transcoder_v1beta1.types.services import DeleteJobTemplateRequest +from google.cloud.video.transcoder_v1beta1.types.services import GetJobRequest +from google.cloud.video.transcoder_v1beta1.types.services import GetJobTemplateRequest +from google.cloud.video.transcoder_v1beta1.types.services import ListJobsRequest +from google.cloud.video.transcoder_v1beta1.types.services import ListJobsResponse +from google.cloud.video.transcoder_v1beta1.types.services import ListJobTemplatesRequest +from google.cloud.video.transcoder_v1beta1.types.services import ListJobTemplatesResponse + +__all__ = ('TranscoderServiceClient', + 'TranscoderServiceAsyncClient', + 'AdBreak', + 'AudioStream', + 'EditAtom', + 'ElementaryStream', + 'Encryption', + 'FailureDetail', + 'Input', + 'Job', + 'JobConfig', + 'JobTemplate', + 'Manifest', + 'MuxStream', + 'Output', + 'Overlay', + 'PreprocessingConfig', + 'Progress', + 'PubsubDestination', + 'SegmentSettings', + 'SpriteSheet', + 'TextStream', + 'VideoStream', + 'CreateJobRequest', + 'CreateJobTemplateRequest', + 'DeleteJobRequest', + 'DeleteJobTemplateRequest', + 'GetJobRequest', + 'GetJobTemplateRequest', + 'ListJobsRequest', + 'ListJobsResponse', + 'ListJobTemplatesRequest', + 'ListJobTemplatesResponse', +) diff --git a/owl-bot-staging/v1beta1/google/cloud/video/transcoder/py.typed b/owl-bot-staging/v1beta1/google/cloud/video/transcoder/py.typed new file mode 100644 index 0000000..a2716a6 --- /dev/null +++ b/owl-bot-staging/v1beta1/google/cloud/video/transcoder/py.typed @@ -0,0 +1,2 @@ +# Marker file for PEP 561. +# The google-cloud-video-transcoder package uses inline types. diff --git a/owl-bot-staging/v1beta1/google/cloud/video/transcoder_v1beta1/__init__.py b/owl-bot-staging/v1beta1/google/cloud/video/transcoder_v1beta1/__init__.py new file mode 100644 index 0000000..85efcb9 --- /dev/null +++ b/owl-bot-staging/v1beta1/google/cloud/video/transcoder_v1beta1/__init__.py @@ -0,0 +1,86 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + +from .services.transcoder_service import TranscoderServiceClient +from .services.transcoder_service import TranscoderServiceAsyncClient + +from .types.resources import AdBreak +from .types.resources import AudioStream +from .types.resources import EditAtom +from .types.resources import ElementaryStream +from .types.resources import Encryption +from .types.resources import FailureDetail +from .types.resources import Input +from .types.resources import Job +from .types.resources import JobConfig +from .types.resources import JobTemplate +from .types.resources import Manifest +from .types.resources import MuxStream +from .types.resources import Output +from .types.resources import Overlay +from .types.resources import PreprocessingConfig +from .types.resources import Progress +from .types.resources import PubsubDestination +from .types.resources import SegmentSettings +from .types.resources import SpriteSheet +from .types.resources import TextStream +from .types.resources import VideoStream +from .types.services import CreateJobRequest +from .types.services import CreateJobTemplateRequest +from .types.services import DeleteJobRequest +from .types.services import DeleteJobTemplateRequest +from .types.services import GetJobRequest +from .types.services import GetJobTemplateRequest +from .types.services import ListJobsRequest +from .types.services import ListJobsResponse +from .types.services import ListJobTemplatesRequest +from .types.services import ListJobTemplatesResponse + +__all__ = ( + 'TranscoderServiceAsyncClient', +'AdBreak', +'AudioStream', +'CreateJobRequest', +'CreateJobTemplateRequest', +'DeleteJobRequest', +'DeleteJobTemplateRequest', +'EditAtom', +'ElementaryStream', +'Encryption', +'FailureDetail', +'GetJobRequest', +'GetJobTemplateRequest', +'Input', +'Job', +'JobConfig', +'JobTemplate', +'ListJobTemplatesRequest', +'ListJobTemplatesResponse', +'ListJobsRequest', +'ListJobsResponse', +'Manifest', +'MuxStream', +'Output', +'Overlay', +'PreprocessingConfig', +'Progress', +'PubsubDestination', +'SegmentSettings', +'SpriteSheet', +'TextStream', +'TranscoderServiceClient', +'VideoStream', +) diff --git a/owl-bot-staging/v1beta1/google/cloud/video/transcoder_v1beta1/gapic_metadata.json b/owl-bot-staging/v1beta1/google/cloud/video/transcoder_v1beta1/gapic_metadata.json new file mode 100644 index 0000000..ebf08b7 --- /dev/null +++ b/owl-bot-staging/v1beta1/google/cloud/video/transcoder_v1beta1/gapic_metadata.json @@ -0,0 +1,103 @@ + { + "comment": "This file maps proto services/RPCs to the corresponding library clients/methods", + "language": "python", + "libraryPackage": "google.cloud.video.transcoder_v1beta1", + "protoPackage": "google.cloud.video.transcoder.v1beta1", + "schema": "1.0", + "services": { + "TranscoderService": { + "clients": { + "grpc": { + "libraryClient": "TranscoderServiceClient", + "rpcs": { + "CreateJob": { + "methods": [ + "create_job" + ] + }, + "CreateJobTemplate": { + "methods": [ + "create_job_template" + ] + }, + "DeleteJob": { + "methods": [ + "delete_job" + ] + }, + "DeleteJobTemplate": { + "methods": [ + "delete_job_template" + ] + }, + "GetJob": { + "methods": [ + "get_job" + ] + }, + "GetJobTemplate": { + "methods": [ + "get_job_template" + ] + }, + "ListJobTemplates": { + "methods": [ + "list_job_templates" + ] + }, + "ListJobs": { + "methods": [ + "list_jobs" + ] + } + } + }, + "grpc-async": { + "libraryClient": "TranscoderServiceAsyncClient", + "rpcs": { + "CreateJob": { + "methods": [ + "create_job" + ] + }, + "CreateJobTemplate": { + "methods": [ + "create_job_template" + ] + }, + "DeleteJob": { + "methods": [ + "delete_job" + ] + }, + "DeleteJobTemplate": { + "methods": [ + "delete_job_template" + ] + }, + "GetJob": { + "methods": [ + "get_job" + ] + }, + "GetJobTemplate": { + "methods": [ + "get_job_template" + ] + }, + "ListJobTemplates": { + "methods": [ + "list_job_templates" + ] + }, + "ListJobs": { + "methods": [ + "list_jobs" + ] + } + } + } + } + } + } +} diff --git a/owl-bot-staging/v1beta1/google/cloud/video/transcoder_v1beta1/py.typed b/owl-bot-staging/v1beta1/google/cloud/video/transcoder_v1beta1/py.typed new file mode 100644 index 0000000..a2716a6 --- /dev/null +++ b/owl-bot-staging/v1beta1/google/cloud/video/transcoder_v1beta1/py.typed @@ -0,0 +1,2 @@ +# Marker file for PEP 561. +# The google-cloud-video-transcoder package uses inline types. diff --git a/owl-bot-staging/v1beta1/google/cloud/video/transcoder_v1beta1/services/__init__.py b/owl-bot-staging/v1beta1/google/cloud/video/transcoder_v1beta1/services/__init__.py new file mode 100644 index 0000000..4de6597 --- /dev/null +++ b/owl-bot-staging/v1beta1/google/cloud/video/transcoder_v1beta1/services/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/owl-bot-staging/v1beta1/google/cloud/video/transcoder_v1beta1/services/transcoder_service/__init__.py b/owl-bot-staging/v1beta1/google/cloud/video/transcoder_v1beta1/services/transcoder_service/__init__.py new file mode 100644 index 0000000..1688786 --- /dev/null +++ b/owl-bot-staging/v1beta1/google/cloud/video/transcoder_v1beta1/services/transcoder_service/__init__.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .client import TranscoderServiceClient +from .async_client import TranscoderServiceAsyncClient + +__all__ = ( + 'TranscoderServiceClient', + 'TranscoderServiceAsyncClient', +) diff --git a/owl-bot-staging/v1beta1/google/cloud/video/transcoder_v1beta1/services/transcoder_service/async_client.py b/owl-bot-staging/v1beta1/google/cloud/video/transcoder_v1beta1/services/transcoder_service/async_client.py new file mode 100644 index 0000000..de9d823 --- /dev/null +++ b/owl-bot-staging/v1beta1/google/cloud/video/transcoder_v1beta1/services/transcoder_service/async_client.py @@ -0,0 +1,809 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import functools +import re +from typing import Dict, Sequence, Tuple, Type, Union +import pkg_resources + +import google.api_core.client_options as ClientOptions # type: ignore +from google.api_core import exceptions as core_exceptions # type: ignore +from google.api_core import gapic_v1 # type: ignore +from google.api_core import retry as retries # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.video.transcoder_v1beta1.services.transcoder_service import pagers +from google.cloud.video.transcoder_v1beta1.types import resources +from google.cloud.video.transcoder_v1beta1.types import services +from google.protobuf import timestamp_pb2 # type: ignore +from .transports.base import TranscoderServiceTransport, DEFAULT_CLIENT_INFO +from .transports.grpc_asyncio import TranscoderServiceGrpcAsyncIOTransport +from .client import TranscoderServiceClient + + +class TranscoderServiceAsyncClient: + """Using the Transcoder API, you can queue asynchronous jobs for + transcoding media into various output formats. Output formats + may include different streaming standards such as HTTP Live + Streaming (HLS) and Dynamic Adaptive Streaming over HTTP (DASH). + You can also customize jobs using advanced features such as + Digital Rights Management (DRM), audio equalization, content + concatenation, and digital ad-stitch ready content generation. + """ + + _client: TranscoderServiceClient + + DEFAULT_ENDPOINT = TranscoderServiceClient.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = TranscoderServiceClient.DEFAULT_MTLS_ENDPOINT + + job_path = staticmethod(TranscoderServiceClient.job_path) + parse_job_path = staticmethod(TranscoderServiceClient.parse_job_path) + job_template_path = staticmethod(TranscoderServiceClient.job_template_path) + parse_job_template_path = staticmethod(TranscoderServiceClient.parse_job_template_path) + common_billing_account_path = staticmethod(TranscoderServiceClient.common_billing_account_path) + parse_common_billing_account_path = staticmethod(TranscoderServiceClient.parse_common_billing_account_path) + common_folder_path = staticmethod(TranscoderServiceClient.common_folder_path) + parse_common_folder_path = staticmethod(TranscoderServiceClient.parse_common_folder_path) + common_organization_path = staticmethod(TranscoderServiceClient.common_organization_path) + parse_common_organization_path = staticmethod(TranscoderServiceClient.parse_common_organization_path) + common_project_path = staticmethod(TranscoderServiceClient.common_project_path) + parse_common_project_path = staticmethod(TranscoderServiceClient.parse_common_project_path) + common_location_path = staticmethod(TranscoderServiceClient.common_location_path) + parse_common_location_path = staticmethod(TranscoderServiceClient.parse_common_location_path) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + TranscoderServiceAsyncClient: The constructed client. + """ + return TranscoderServiceClient.from_service_account_info.__func__(TranscoderServiceAsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + TranscoderServiceAsyncClient: The constructed client. + """ + return TranscoderServiceClient.from_service_account_file.__func__(TranscoderServiceAsyncClient, filename, *args, **kwargs) # type: ignore + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> TranscoderServiceTransport: + """Returns the transport used by the client instance. + + Returns: + TranscoderServiceTransport: The transport used by the client instance. + """ + return self._client.transport + + get_transport_class = functools.partial(type(TranscoderServiceClient).get_transport_class, type(TranscoderServiceClient)) + + def __init__(self, *, + credentials: ga_credentials.Credentials = None, + transport: Union[str, TranscoderServiceTransport] = "grpc_asyncio", + client_options: ClientOptions = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the transcoder service client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, ~.TranscoderServiceTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (ClientOptions): Custom options for the client. It + won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client = TranscoderServiceClient( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + + ) + + async def create_job(self, + request: services.CreateJobRequest = None, + *, + parent: str = None, + job: resources.Job = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> resources.Job: + r"""Creates a job in the specified region. + + Args: + request (:class:`google.cloud.video.transcoder_v1beta1.types.CreateJobRequest`): + The request object. Request message for + `TranscoderService.CreateJob`. + parent (:class:`str`): + Required. The parent location to create and process this + job. Format: ``projects/{project}/locations/{location}`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + job (:class:`google.cloud.video.transcoder_v1beta1.types.Job`): + Required. Parameters for creating + transcoding job. + + This corresponds to the ``job`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.video.transcoder_v1beta1.types.Job: + Transcoding job resource. + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, job]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = services.CreateJobRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if job is not None: + request.job = job + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.create_job, + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_jobs(self, + request: services.ListJobsRequest = None, + *, + parent: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListJobsAsyncPager: + r"""Lists jobs in the specified region. + + Args: + request (:class:`google.cloud.video.transcoder_v1beta1.types.ListJobsRequest`): + The request object. Request message for + `TranscoderService.ListJobs`. The parent location from + which to retrieve the collection of jobs. + parent (:class:`str`): + Required. Format: + ``projects/{project}/locations/{location}`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.video.transcoder_v1beta1.services.transcoder_service.pagers.ListJobsAsyncPager: + Response message for TranscoderService.ListJobs. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = services.ListJobsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_jobs, + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListJobsAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_job(self, + request: services.GetJobRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> resources.Job: + r"""Returns the job data. + + Args: + request (:class:`google.cloud.video.transcoder_v1beta1.types.GetJobRequest`): + The request object. Request message for + `TranscoderService.GetJob`. + name (:class:`str`): + Required. The name of the job to retrieve. Format: + ``projects/{project}/locations/{location}/jobs/{job}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.video.transcoder_v1beta1.types.Job: + Transcoding job resource. + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = services.GetJobRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_job, + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_job(self, + request: services.DeleteJobRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a job. + + Args: + request (:class:`google.cloud.video.transcoder_v1beta1.types.DeleteJobRequest`): + The request object. Request message for + `TranscoderService.DeleteJob`. + name (:class:`str`): + Required. The name of the job to delete. Format: + ``projects/{project}/locations/{location}/jobs/{job}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = services.DeleteJobRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.delete_job, + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def create_job_template(self, + request: services.CreateJobTemplateRequest = None, + *, + parent: str = None, + job_template: resources.JobTemplate = None, + job_template_id: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> resources.JobTemplate: + r"""Creates a job template in the specified region. + + Args: + request (:class:`google.cloud.video.transcoder_v1beta1.types.CreateJobTemplateRequest`): + The request object. Request message for + `TranscoderService.CreateJobTemplate`. + parent (:class:`str`): + Required. The parent location to create this job + template. Format: + ``projects/{project}/locations/{location}`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + job_template (:class:`google.cloud.video.transcoder_v1beta1.types.JobTemplate`): + Required. Parameters for creating job + template. + + This corresponds to the ``job_template`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + job_template_id (:class:`str`): + Required. The ID to use for the job template, which will + become the final component of the job template's + resource name. + + This value should be 4-63 characters, and valid + characters must match the regular expression + ``[a-zA-Z][a-zA-Z0-9_-]*``. + + This corresponds to the ``job_template_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.video.transcoder_v1beta1.types.JobTemplate: + Transcoding job template resource. + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, job_template, job_template_id]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = services.CreateJobTemplateRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if job_template is not None: + request.job_template = job_template + if job_template_id is not None: + request.job_template_id = job_template_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.create_job_template, + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_job_templates(self, + request: services.ListJobTemplatesRequest = None, + *, + parent: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListJobTemplatesAsyncPager: + r"""Lists job templates in the specified region. + + Args: + request (:class:`google.cloud.video.transcoder_v1beta1.types.ListJobTemplatesRequest`): + The request object. Request message for + `TranscoderService.ListJobTemplates`. + parent (:class:`str`): + Required. The parent location from which to retrieve the + collection of job templates. Format: + ``projects/{project}/locations/{location}`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.video.transcoder_v1beta1.services.transcoder_service.pagers.ListJobTemplatesAsyncPager: + Response message for TranscoderService.ListJobTemplates. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = services.ListJobTemplatesRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_job_templates, + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListJobTemplatesAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_job_template(self, + request: services.GetJobTemplateRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> resources.JobTemplate: + r"""Returns the job template data. + + Args: + request (:class:`google.cloud.video.transcoder_v1beta1.types.GetJobTemplateRequest`): + The request object. Request message for + `TranscoderService.GetJobTemplate`. + name (:class:`str`): + Required. The name of the job template to retrieve. + Format: + ``projects/{project}/locations/{location}/jobTemplates/{job_template}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.video.transcoder_v1beta1.types.JobTemplate: + Transcoding job template resource. + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = services.GetJobTemplateRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_job_template, + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_job_template(self, + request: services.DeleteJobTemplateRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a job template. + + Args: + request (:class:`google.cloud.video.transcoder_v1beta1.types.DeleteJobTemplateRequest`): + The request object. Request message for + `TranscoderService.DeleteJobTemplate`. + name (:class:`str`): + Required. The name of the job template to delete. + ``projects/{project}/locations/{location}/jobTemplates/{job_template}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = services.DeleteJobTemplateRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.delete_job_template, + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + + + + +try: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=pkg_resources.get_distribution( + "google-cloud-video-transcoder", + ).version, + ) +except pkg_resources.DistributionNotFound: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() + + +__all__ = ( + "TranscoderServiceAsyncClient", +) diff --git a/owl-bot-staging/v1beta1/google/cloud/video/transcoder_v1beta1/services/transcoder_service/client.py b/owl-bot-staging/v1beta1/google/cloud/video/transcoder_v1beta1/services/transcoder_service/client.py new file mode 100644 index 0000000..a12d3ce --- /dev/null +++ b/owl-bot-staging/v1beta1/google/cloud/video/transcoder_v1beta1/services/transcoder_service/client.py @@ -0,0 +1,1009 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from distutils import util +import os +import re +from typing import Callable, Dict, Optional, Sequence, Tuple, Type, Union +import pkg_resources + +from google.api_core import client_options as client_options_lib # type: ignore +from google.api_core import exceptions as core_exceptions # type: ignore +from google.api_core import gapic_v1 # type: ignore +from google.api_core import retry as retries # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.video.transcoder_v1beta1.services.transcoder_service import pagers +from google.cloud.video.transcoder_v1beta1.types import resources +from google.cloud.video.transcoder_v1beta1.types import services +from google.protobuf import timestamp_pb2 # type: ignore +from .transports.base import TranscoderServiceTransport, DEFAULT_CLIENT_INFO +from .transports.grpc import TranscoderServiceGrpcTransport +from .transports.grpc_asyncio import TranscoderServiceGrpcAsyncIOTransport + + +class TranscoderServiceClientMeta(type): + """Metaclass for the TranscoderService client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + _transport_registry = OrderedDict() # type: Dict[str, Type[TranscoderServiceTransport]] + _transport_registry["grpc"] = TranscoderServiceGrpcTransport + _transport_registry["grpc_asyncio"] = TranscoderServiceGrpcAsyncIOTransport + + def get_transport_class(cls, + label: str = None, + ) -> Type[TranscoderServiceTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class TranscoderServiceClient(metaclass=TranscoderServiceClientMeta): + """Using the Transcoder API, you can queue asynchronous jobs for + transcoding media into various output formats. Output formats + may include different streaming standards such as HTTP Live + Streaming (HLS) and Dynamic Adaptive Streaming over HTTP (DASH). + You can also customize jobs using advanced features such as + Digital Rights Management (DRM), audio equalization, content + concatenation, and digital ad-stitch ready content generation. + """ + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + DEFAULT_ENDPOINT = "transcoder.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + TranscoderServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + TranscoderServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file( + filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> TranscoderServiceTransport: + """Returns the transport used by the client instance. + + Returns: + TranscoderServiceTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def job_path(project: str,location: str,job: str,) -> str: + """Returns a fully-qualified job string.""" + return "projects/{project}/locations/{location}/jobs/{job}".format(project=project, location=location, job=job, ) + + @staticmethod + def parse_job_path(path: str) -> Dict[str,str]: + """Parses a job path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/jobs/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def job_template_path(project: str,location: str,job_template: str,) -> str: + """Returns a fully-qualified job_template string.""" + return "projects/{project}/locations/{location}/jobTemplates/{job_template}".format(project=project, location=location, job_template=job_template, ) + + @staticmethod + def parse_job_template_path(path: str) -> Dict[str,str]: + """Parses a job_template path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/jobTemplates/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_billing_account_path(billing_account: str, ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str,str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path(folder: str, ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format(folder=folder, ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str,str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path(organization: str, ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format(organization=organization, ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str,str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path(project: str, ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format(project=project, ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str,str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path(project: str, location: str, ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format(project=project, location=location, ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str,str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + def __init__(self, *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Union[str, TranscoderServiceTransport, None] = None, + client_options: Optional[client_options_lib.ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the transcoder service client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, TranscoderServiceTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. It won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + if isinstance(client_options, dict): + client_options = client_options_lib.from_dict(client_options) + if client_options is None: + client_options = client_options_lib.ClientOptions() + + # Create SSL credentials for mutual TLS if needed. + use_client_cert = bool(util.strtobool(os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false"))) + + client_cert_source_func = None + is_mtls = False + if use_client_cert: + if client_options.client_cert_source: + is_mtls = True + client_cert_source_func = client_options.client_cert_source + else: + is_mtls = mtls.has_default_client_cert_source() + if is_mtls: + client_cert_source_func = mtls.default_client_cert_source() + else: + client_cert_source_func = None + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + else: + use_mtls_env = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_mtls_env == "never": + api_endpoint = self.DEFAULT_ENDPOINT + elif use_mtls_env == "always": + api_endpoint = self.DEFAULT_MTLS_ENDPOINT + elif use_mtls_env == "auto": + if is_mtls: + api_endpoint = self.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = self.DEFAULT_ENDPOINT + else: + raise MutualTLSChannelError( + "Unsupported GOOGLE_API_USE_MTLS_ENDPOINT value. Accepted " + "values: never, auto, always" + ) + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + if isinstance(transport, TranscoderServiceTransport): + # transport is a TranscoderServiceTransport instance. + if credentials or client_options.credentials_file: + raise ValueError("When providing a transport instance, " + "provide its credentials directly.") + if client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = transport + else: + Transport = type(self).get_transport_class(transport) + self._transport = Transport( + credentials=credentials, + credentials_file=client_options.credentials_file, + host=api_endpoint, + scopes=client_options.scopes, + client_cert_source_for_mtls=client_cert_source_func, + quota_project_id=client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=( + Transport == type(self).get_transport_class("grpc") + or Transport == type(self).get_transport_class("grpc_asyncio") + ), + ) + + def create_job(self, + request: services.CreateJobRequest = None, + *, + parent: str = None, + job: resources.Job = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> resources.Job: + r"""Creates a job in the specified region. + + Args: + request (google.cloud.video.transcoder_v1beta1.types.CreateJobRequest): + The request object. Request message for + `TranscoderService.CreateJob`. + parent (str): + Required. The parent location to create and process this + job. Format: ``projects/{project}/locations/{location}`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + job (google.cloud.video.transcoder_v1beta1.types.Job): + Required. Parameters for creating + transcoding job. + + This corresponds to the ``job`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.video.transcoder_v1beta1.types.Job: + Transcoding job resource. + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, job]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a services.CreateJobRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, services.CreateJobRequest): + request = services.CreateJobRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if job is not None: + request.job = job + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_job] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_jobs(self, + request: services.ListJobsRequest = None, + *, + parent: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListJobsPager: + r"""Lists jobs in the specified region. + + Args: + request (google.cloud.video.transcoder_v1beta1.types.ListJobsRequest): + The request object. Request message for + `TranscoderService.ListJobs`. The parent location from + which to retrieve the collection of jobs. + parent (str): + Required. Format: + ``projects/{project}/locations/{location}`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.video.transcoder_v1beta1.services.transcoder_service.pagers.ListJobsPager: + Response message for TranscoderService.ListJobs. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a services.ListJobsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, services.ListJobsRequest): + request = services.ListJobsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_jobs] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListJobsPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_job(self, + request: services.GetJobRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> resources.Job: + r"""Returns the job data. + + Args: + request (google.cloud.video.transcoder_v1beta1.types.GetJobRequest): + The request object. Request message for + `TranscoderService.GetJob`. + name (str): + Required. The name of the job to retrieve. Format: + ``projects/{project}/locations/{location}/jobs/{job}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.video.transcoder_v1beta1.types.Job: + Transcoding job resource. + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a services.GetJobRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, services.GetJobRequest): + request = services.GetJobRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_job] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_job(self, + request: services.DeleteJobRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a job. + + Args: + request (google.cloud.video.transcoder_v1beta1.types.DeleteJobRequest): + The request object. Request message for + `TranscoderService.DeleteJob`. + name (str): + Required. The name of the job to delete. Format: + ``projects/{project}/locations/{location}/jobs/{job}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a services.DeleteJobRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, services.DeleteJobRequest): + request = services.DeleteJobRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_job] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def create_job_template(self, + request: services.CreateJobTemplateRequest = None, + *, + parent: str = None, + job_template: resources.JobTemplate = None, + job_template_id: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> resources.JobTemplate: + r"""Creates a job template in the specified region. + + Args: + request (google.cloud.video.transcoder_v1beta1.types.CreateJobTemplateRequest): + The request object. Request message for + `TranscoderService.CreateJobTemplate`. + parent (str): + Required. The parent location to create this job + template. Format: + ``projects/{project}/locations/{location}`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + job_template (google.cloud.video.transcoder_v1beta1.types.JobTemplate): + Required. Parameters for creating job + template. + + This corresponds to the ``job_template`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + job_template_id (str): + Required. The ID to use for the job template, which will + become the final component of the job template's + resource name. + + This value should be 4-63 characters, and valid + characters must match the regular expression + ``[a-zA-Z][a-zA-Z0-9_-]*``. + + This corresponds to the ``job_template_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.video.transcoder_v1beta1.types.JobTemplate: + Transcoding job template resource. + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, job_template, job_template_id]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a services.CreateJobTemplateRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, services.CreateJobTemplateRequest): + request = services.CreateJobTemplateRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if job_template is not None: + request.job_template = job_template + if job_template_id is not None: + request.job_template_id = job_template_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_job_template] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_job_templates(self, + request: services.ListJobTemplatesRequest = None, + *, + parent: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListJobTemplatesPager: + r"""Lists job templates in the specified region. + + Args: + request (google.cloud.video.transcoder_v1beta1.types.ListJobTemplatesRequest): + The request object. Request message for + `TranscoderService.ListJobTemplates`. + parent (str): + Required. The parent location from which to retrieve the + collection of job templates. Format: + ``projects/{project}/locations/{location}`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.video.transcoder_v1beta1.services.transcoder_service.pagers.ListJobTemplatesPager: + Response message for TranscoderService.ListJobTemplates. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a services.ListJobTemplatesRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, services.ListJobTemplatesRequest): + request = services.ListJobTemplatesRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_job_templates] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListJobTemplatesPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_job_template(self, + request: services.GetJobTemplateRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> resources.JobTemplate: + r"""Returns the job template data. + + Args: + request (google.cloud.video.transcoder_v1beta1.types.GetJobTemplateRequest): + The request object. Request message for + `TranscoderService.GetJobTemplate`. + name (str): + Required. The name of the job template to retrieve. + Format: + ``projects/{project}/locations/{location}/jobTemplates/{job_template}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.video.transcoder_v1beta1.types.JobTemplate: + Transcoding job template resource. + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a services.GetJobTemplateRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, services.GetJobTemplateRequest): + request = services.GetJobTemplateRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_job_template] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_job_template(self, + request: services.DeleteJobTemplateRequest = None, + *, + name: str = None, + retry: retries.Retry = gapic_v1.method.DEFAULT, + timeout: float = None, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a job template. + + Args: + request (google.cloud.video.transcoder_v1beta1.types.DeleteJobTemplateRequest): + The request object. Request message for + `TranscoderService.DeleteJobTemplate`. + name (str): + Required. The name of the job template to delete. + ``projects/{project}/locations/{location}/jobTemplates/{job_template}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Sanity check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a services.DeleteJobTemplateRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, services.DeleteJobTemplateRequest): + request = services.DeleteJobTemplateRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_job_template] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + + + + +try: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=pkg_resources.get_distribution( + "google-cloud-video-transcoder", + ).version, + ) +except pkg_resources.DistributionNotFound: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() + + +__all__ = ( + "TranscoderServiceClient", +) diff --git a/owl-bot-staging/v1beta1/google/cloud/video/transcoder_v1beta1/services/transcoder_service/pagers.py b/owl-bot-staging/v1beta1/google/cloud/video/transcoder_v1beta1/services/transcoder_service/pagers.py new file mode 100644 index 0000000..63d2ed7 --- /dev/null +++ b/owl-bot-staging/v1beta1/google/cloud/video/transcoder_v1beta1/services/transcoder_service/pagers.py @@ -0,0 +1,263 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Any, AsyncIterable, Awaitable, Callable, Iterable, Sequence, Tuple, Optional + +from google.cloud.video.transcoder_v1beta1.types import resources +from google.cloud.video.transcoder_v1beta1.types import services + + +class ListJobsPager: + """A pager for iterating through ``list_jobs`` requests. + + This class thinly wraps an initial + :class:`google.cloud.video.transcoder_v1beta1.types.ListJobsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``jobs`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListJobs`` requests and continue to iterate + through the ``jobs`` field on the + corresponding responses. + + All the usual :class:`google.cloud.video.transcoder_v1beta1.types.ListJobsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., services.ListJobsResponse], + request: services.ListJobsRequest, + response: services.ListJobsResponse, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.video.transcoder_v1beta1.types.ListJobsRequest): + The initial request object. + response (google.cloud.video.transcoder_v1beta1.types.ListJobsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = services.ListJobsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterable[services.ListJobsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterable[resources.Job]: + for page in self.pages: + yield from page.jobs + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListJobsAsyncPager: + """A pager for iterating through ``list_jobs`` requests. + + This class thinly wraps an initial + :class:`google.cloud.video.transcoder_v1beta1.types.ListJobsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``jobs`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListJobs`` requests and continue to iterate + through the ``jobs`` field on the + corresponding responses. + + All the usual :class:`google.cloud.video.transcoder_v1beta1.types.ListJobsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., Awaitable[services.ListJobsResponse]], + request: services.ListJobsRequest, + response: services.ListJobsResponse, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.video.transcoder_v1beta1.types.ListJobsRequest): + The initial request object. + response (google.cloud.video.transcoder_v1beta1.types.ListJobsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = services.ListJobsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterable[services.ListJobsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterable[resources.Job]: + async def async_generator(): + async for page in self.pages: + for response in page.jobs: + yield response + + return async_generator() + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListJobTemplatesPager: + """A pager for iterating through ``list_job_templates`` requests. + + This class thinly wraps an initial + :class:`google.cloud.video.transcoder_v1beta1.types.ListJobTemplatesResponse` object, and + provides an ``__iter__`` method to iterate through its + ``job_templates`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListJobTemplates`` requests and continue to iterate + through the ``job_templates`` field on the + corresponding responses. + + All the usual :class:`google.cloud.video.transcoder_v1beta1.types.ListJobTemplatesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., services.ListJobTemplatesResponse], + request: services.ListJobTemplatesRequest, + response: services.ListJobTemplatesResponse, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.video.transcoder_v1beta1.types.ListJobTemplatesRequest): + The initial request object. + response (google.cloud.video.transcoder_v1beta1.types.ListJobTemplatesResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = services.ListJobTemplatesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterable[services.ListJobTemplatesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterable[resources.JobTemplate]: + for page in self.pages: + yield from page.job_templates + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListJobTemplatesAsyncPager: + """A pager for iterating through ``list_job_templates`` requests. + + This class thinly wraps an initial + :class:`google.cloud.video.transcoder_v1beta1.types.ListJobTemplatesResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``job_templates`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListJobTemplates`` requests and continue to iterate + through the ``job_templates`` field on the + corresponding responses. + + All the usual :class:`google.cloud.video.transcoder_v1beta1.types.ListJobTemplatesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., Awaitable[services.ListJobTemplatesResponse]], + request: services.ListJobTemplatesRequest, + response: services.ListJobTemplatesResponse, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.video.transcoder_v1beta1.types.ListJobTemplatesRequest): + The initial request object. + response (google.cloud.video.transcoder_v1beta1.types.ListJobTemplatesResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = services.ListJobTemplatesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterable[services.ListJobTemplatesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + + def __aiter__(self) -> AsyncIterable[resources.JobTemplate]: + async def async_generator(): + async for page in self.pages: + for response in page.job_templates: + yield response + + return async_generator() + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) diff --git a/owl-bot-staging/v1beta1/google/cloud/video/transcoder_v1beta1/services/transcoder_service/transports/__init__.py b/owl-bot-staging/v1beta1/google/cloud/video/transcoder_v1beta1/services/transcoder_service/transports/__init__.py new file mode 100644 index 0000000..5ed2b9a --- /dev/null +++ b/owl-bot-staging/v1beta1/google/cloud/video/transcoder_v1beta1/services/transcoder_service/transports/__init__.py @@ -0,0 +1,33 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import TranscoderServiceTransport +from .grpc import TranscoderServiceGrpcTransport +from .grpc_asyncio import TranscoderServiceGrpcAsyncIOTransport + + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[TranscoderServiceTransport]] +_transport_registry['grpc'] = TranscoderServiceGrpcTransport +_transport_registry['grpc_asyncio'] = TranscoderServiceGrpcAsyncIOTransport + +__all__ = ( + 'TranscoderServiceTransport', + 'TranscoderServiceGrpcTransport', + 'TranscoderServiceGrpcAsyncIOTransport', +) diff --git a/owl-bot-staging/v1beta1/google/cloud/video/transcoder_v1beta1/services/transcoder_service/transports/base.py b/owl-bot-staging/v1beta1/google/cloud/video/transcoder_v1beta1/services/transcoder_service/transports/base.py new file mode 100644 index 0000000..ccf0914 --- /dev/null +++ b/owl-bot-staging/v1beta1/google/cloud/video/transcoder_v1beta1/services/transcoder_service/transports/base.py @@ -0,0 +1,268 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union +import packaging.version +import pkg_resources + +import google.auth # type: ignore +import google.api_core # type: ignore +from google.api_core import exceptions as core_exceptions # type: ignore +from google.api_core import gapic_v1 # type: ignore +from google.api_core import retry as retries # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.video.transcoder_v1beta1.types import resources +from google.cloud.video.transcoder_v1beta1.types import services +from google.protobuf import empty_pb2 # type: ignore + +try: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( + gapic_version=pkg_resources.get_distribution( + 'google-cloud-video-transcoder', + ).version, + ) +except pkg_resources.DistributionNotFound: + DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() + +try: + # google.auth.__version__ was added in 1.26.0 + _GOOGLE_AUTH_VERSION = google.auth.__version__ +except AttributeError: + try: # try pkg_resources if it is available + _GOOGLE_AUTH_VERSION = pkg_resources.get_distribution("google-auth").version + except pkg_resources.DistributionNotFound: # pragma: NO COVER + _GOOGLE_AUTH_VERSION = None + + +class TranscoderServiceTransport(abc.ABC): + """Abstract transport class for TranscoderService.""" + + AUTH_SCOPES = ( + 'https://www.googleapis.com/auth/cloud-platform', + ) + + DEFAULT_HOST: str = 'transcoder.googleapis.com' + def __init__( + self, *, + host: str = DEFAULT_HOST, + credentials: ga_credentials.Credentials = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ':' not in host: + host += ':443' + self._host = host + + scopes_kwargs = self._get_scopes_kwargs(self._host, scopes) + + # Save the scopes. + self._scopes = scopes + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive") + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, + **scopes_kwargs, + quota_project_id=quota_project_id + ) + + elif credentials is None: + credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) + + # If the credentials is service account credentials, then always try to use self signed JWT. + if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # TODO(busunkim): This method is in the base transport + # to avoid duplicating code across the transport classes. These functions + # should be deleted once the minimum required versions of google-auth is increased. + + # TODO: Remove this function once google-auth >= 1.25.0 is required + @classmethod + def _get_scopes_kwargs(cls, host: str, scopes: Optional[Sequence[str]]) -> Dict[str, Optional[Sequence[str]]]: + """Returns scopes kwargs to pass to google-auth methods depending on the google-auth version""" + + scopes_kwargs = {} + + if _GOOGLE_AUTH_VERSION and ( + packaging.version.parse(_GOOGLE_AUTH_VERSION) + >= packaging.version.parse("1.25.0") + ): + scopes_kwargs = {"scopes": scopes, "default_scopes": cls.AUTH_SCOPES} + else: + scopes_kwargs = {"scopes": scopes or cls.AUTH_SCOPES} + + return scopes_kwargs + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.create_job: gapic_v1.method.wrap_method( + self.create_job, + default_timeout=60.0, + client_info=client_info, + ), + self.list_jobs: gapic_v1.method.wrap_method( + self.list_jobs, + default_timeout=60.0, + client_info=client_info, + ), + self.get_job: gapic_v1.method.wrap_method( + self.get_job, + default_timeout=60.0, + client_info=client_info, + ), + self.delete_job: gapic_v1.method.wrap_method( + self.delete_job, + default_timeout=60.0, + client_info=client_info, + ), + self.create_job_template: gapic_v1.method.wrap_method( + self.create_job_template, + default_timeout=60.0, + client_info=client_info, + ), + self.list_job_templates: gapic_v1.method.wrap_method( + self.list_job_templates, + default_timeout=60.0, + client_info=client_info, + ), + self.get_job_template: gapic_v1.method.wrap_method( + self.get_job_template, + default_timeout=60.0, + client_info=client_info, + ), + self.delete_job_template: gapic_v1.method.wrap_method( + self.delete_job_template, + default_timeout=60.0, + client_info=client_info, + ), + } + + @property + def create_job(self) -> Callable[ + [services.CreateJobRequest], + Union[ + resources.Job, + Awaitable[resources.Job] + ]]: + raise NotImplementedError() + + @property + def list_jobs(self) -> Callable[ + [services.ListJobsRequest], + Union[ + services.ListJobsResponse, + Awaitable[services.ListJobsResponse] + ]]: + raise NotImplementedError() + + @property + def get_job(self) -> Callable[ + [services.GetJobRequest], + Union[ + resources.Job, + Awaitable[resources.Job] + ]]: + raise NotImplementedError() + + @property + def delete_job(self) -> Callable[ + [services.DeleteJobRequest], + Union[ + empty_pb2.Empty, + Awaitable[empty_pb2.Empty] + ]]: + raise NotImplementedError() + + @property + def create_job_template(self) -> Callable[ + [services.CreateJobTemplateRequest], + Union[ + resources.JobTemplate, + Awaitable[resources.JobTemplate] + ]]: + raise NotImplementedError() + + @property + def list_job_templates(self) -> Callable[ + [services.ListJobTemplatesRequest], + Union[ + services.ListJobTemplatesResponse, + Awaitable[services.ListJobTemplatesResponse] + ]]: + raise NotImplementedError() + + @property + def get_job_template(self) -> Callable[ + [services.GetJobTemplateRequest], + Union[ + resources.JobTemplate, + Awaitable[resources.JobTemplate] + ]]: + raise NotImplementedError() + + @property + def delete_job_template(self) -> Callable[ + [services.DeleteJobTemplateRequest], + Union[ + empty_pb2.Empty, + Awaitable[empty_pb2.Empty] + ]]: + raise NotImplementedError() + + +__all__ = ( + 'TranscoderServiceTransport', +) diff --git a/owl-bot-staging/v1beta1/google/cloud/video/transcoder_v1beta1/services/transcoder_service/transports/grpc.py b/owl-bot-staging/v1beta1/google/cloud/video/transcoder_v1beta1/services/transcoder_service/transports/grpc.py new file mode 100644 index 0000000..2621bc3 --- /dev/null +++ b/owl-bot-staging/v1beta1/google/cloud/video/transcoder_v1beta1/services/transcoder_service/transports/grpc.py @@ -0,0 +1,442 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import warnings +from typing import Callable, Dict, Optional, Sequence, Tuple, Union + +from google.api_core import grpc_helpers # type: ignore +from google.api_core import gapic_v1 # type: ignore +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore + +import grpc # type: ignore + +from google.cloud.video.transcoder_v1beta1.types import resources +from google.cloud.video.transcoder_v1beta1.types import services +from google.protobuf import empty_pb2 # type: ignore +from .base import TranscoderServiceTransport, DEFAULT_CLIENT_INFO + + +class TranscoderServiceGrpcTransport(TranscoderServiceTransport): + """gRPC backend transport for TranscoderService. + + Using the Transcoder API, you can queue asynchronous jobs for + transcoding media into various output formats. Output formats + may include different streaming standards such as HTTP Live + Streaming (HLS) and Dynamic Adaptive Streaming over HTTP (DASH). + You can also customize jobs using advanced features such as + Digital Rights Management (DRM), audio equalization, content + concatenation, and digital ad-stitch ready content generation. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + _stubs: Dict[str, Callable] + + def __init__(self, *, + host: str = 'transcoder.googleapis.com', + credentials: ga_credentials.Credentials = None, + credentials_file: str = None, + scopes: Sequence[str] = None, + channel: grpc.Channel = None, + api_mtls_endpoint: str = None, + client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, + ssl_channel_credentials: grpc.ChannelCredentials = None, + client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + channel (Optional[grpc.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or applicatin default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + credentials=self._credentials, + credentials_file=credentials_file, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @classmethod + def create_channel(cls, + host: str = 'transcoder.googleapis.com', + credentials: ga_credentials.Credentials = None, + credentials_file: str = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Return the channel designed to connect to this service. + """ + return self._grpc_channel + + @property + def create_job(self) -> Callable[ + [services.CreateJobRequest], + resources.Job]: + r"""Return a callable for the create job method over gRPC. + + Creates a job in the specified region. + + Returns: + Callable[[~.CreateJobRequest], + ~.Job]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_job' not in self._stubs: + self._stubs['create_job'] = self.grpc_channel.unary_unary( + '/google.cloud.video.transcoder.v1beta1.TranscoderService/CreateJob', + request_serializer=services.CreateJobRequest.serialize, + response_deserializer=resources.Job.deserialize, + ) + return self._stubs['create_job'] + + @property + def list_jobs(self) -> Callable[ + [services.ListJobsRequest], + services.ListJobsResponse]: + r"""Return a callable for the list jobs method over gRPC. + + Lists jobs in the specified region. + + Returns: + Callable[[~.ListJobsRequest], + ~.ListJobsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_jobs' not in self._stubs: + self._stubs['list_jobs'] = self.grpc_channel.unary_unary( + '/google.cloud.video.transcoder.v1beta1.TranscoderService/ListJobs', + request_serializer=services.ListJobsRequest.serialize, + response_deserializer=services.ListJobsResponse.deserialize, + ) + return self._stubs['list_jobs'] + + @property + def get_job(self) -> Callable[ + [services.GetJobRequest], + resources.Job]: + r"""Return a callable for the get job method over gRPC. + + Returns the job data. + + Returns: + Callable[[~.GetJobRequest], + ~.Job]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_job' not in self._stubs: + self._stubs['get_job'] = self.grpc_channel.unary_unary( + '/google.cloud.video.transcoder.v1beta1.TranscoderService/GetJob', + request_serializer=services.GetJobRequest.serialize, + response_deserializer=resources.Job.deserialize, + ) + return self._stubs['get_job'] + + @property + def delete_job(self) -> Callable[ + [services.DeleteJobRequest], + empty_pb2.Empty]: + r"""Return a callable for the delete job method over gRPC. + + Deletes a job. + + Returns: + Callable[[~.DeleteJobRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_job' not in self._stubs: + self._stubs['delete_job'] = self.grpc_channel.unary_unary( + '/google.cloud.video.transcoder.v1beta1.TranscoderService/DeleteJob', + request_serializer=services.DeleteJobRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs['delete_job'] + + @property + def create_job_template(self) -> Callable[ + [services.CreateJobTemplateRequest], + resources.JobTemplate]: + r"""Return a callable for the create job template method over gRPC. + + Creates a job template in the specified region. + + Returns: + Callable[[~.CreateJobTemplateRequest], + ~.JobTemplate]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_job_template' not in self._stubs: + self._stubs['create_job_template'] = self.grpc_channel.unary_unary( + '/google.cloud.video.transcoder.v1beta1.TranscoderService/CreateJobTemplate', + request_serializer=services.CreateJobTemplateRequest.serialize, + response_deserializer=resources.JobTemplate.deserialize, + ) + return self._stubs['create_job_template'] + + @property + def list_job_templates(self) -> Callable[ + [services.ListJobTemplatesRequest], + services.ListJobTemplatesResponse]: + r"""Return a callable for the list job templates method over gRPC. + + Lists job templates in the specified region. + + Returns: + Callable[[~.ListJobTemplatesRequest], + ~.ListJobTemplatesResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_job_templates' not in self._stubs: + self._stubs['list_job_templates'] = self.grpc_channel.unary_unary( + '/google.cloud.video.transcoder.v1beta1.TranscoderService/ListJobTemplates', + request_serializer=services.ListJobTemplatesRequest.serialize, + response_deserializer=services.ListJobTemplatesResponse.deserialize, + ) + return self._stubs['list_job_templates'] + + @property + def get_job_template(self) -> Callable[ + [services.GetJobTemplateRequest], + resources.JobTemplate]: + r"""Return a callable for the get job template method over gRPC. + + Returns the job template data. + + Returns: + Callable[[~.GetJobTemplateRequest], + ~.JobTemplate]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_job_template' not in self._stubs: + self._stubs['get_job_template'] = self.grpc_channel.unary_unary( + '/google.cloud.video.transcoder.v1beta1.TranscoderService/GetJobTemplate', + request_serializer=services.GetJobTemplateRequest.serialize, + response_deserializer=resources.JobTemplate.deserialize, + ) + return self._stubs['get_job_template'] + + @property + def delete_job_template(self) -> Callable[ + [services.DeleteJobTemplateRequest], + empty_pb2.Empty]: + r"""Return a callable for the delete job template method over gRPC. + + Deletes a job template. + + Returns: + Callable[[~.DeleteJobTemplateRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_job_template' not in self._stubs: + self._stubs['delete_job_template'] = self.grpc_channel.unary_unary( + '/google.cloud.video.transcoder.v1beta1.TranscoderService/DeleteJobTemplate', + request_serializer=services.DeleteJobTemplateRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs['delete_job_template'] + + +__all__ = ( + 'TranscoderServiceGrpcTransport', +) diff --git a/owl-bot-staging/v1beta1/google/cloud/video/transcoder_v1beta1/services/transcoder_service/transports/grpc_asyncio.py b/owl-bot-staging/v1beta1/google/cloud/video/transcoder_v1beta1/services/transcoder_service/transports/grpc_asyncio.py new file mode 100644 index 0000000..c91e961 --- /dev/null +++ b/owl-bot-staging/v1beta1/google/cloud/video/transcoder_v1beta1/services/transcoder_service/transports/grpc_asyncio.py @@ -0,0 +1,446 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import warnings +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union + +from google.api_core import gapic_v1 # type: ignore +from google.api_core import grpc_helpers_async # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +import packaging.version + +import grpc # type: ignore +from grpc.experimental import aio # type: ignore + +from google.cloud.video.transcoder_v1beta1.types import resources +from google.cloud.video.transcoder_v1beta1.types import services +from google.protobuf import empty_pb2 # type: ignore +from .base import TranscoderServiceTransport, DEFAULT_CLIENT_INFO +from .grpc import TranscoderServiceGrpcTransport + + +class TranscoderServiceGrpcAsyncIOTransport(TranscoderServiceTransport): + """gRPC AsyncIO backend transport for TranscoderService. + + Using the Transcoder API, you can queue asynchronous jobs for + transcoding media into various output formats. Output formats + may include different streaming standards such as HTTP Live + Streaming (HLS) and Dynamic Adaptive Streaming over HTTP (DASH). + You can also customize jobs using advanced features such as + Digital Rights Management (DRM), audio equalization, content + concatenation, and digital ad-stitch ready content generation. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel(cls, + host: str = 'transcoder.googleapis.com', + credentials: ga_credentials.Credentials = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs + ) + + def __init__(self, *, + host: str = 'transcoder.googleapis.com', + credentials: ga_credentials.Credentials = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: aio.Channel = None, + api_mtls_endpoint: str = None, + client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, + ssl_channel_credentials: grpc.ChannelCredentials = None, + client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, + quota_project_id=None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[aio.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or applicatin default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + credentials=self._credentials, + credentials_file=credentials_file, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def create_job(self) -> Callable[ + [services.CreateJobRequest], + Awaitable[resources.Job]]: + r"""Return a callable for the create job method over gRPC. + + Creates a job in the specified region. + + Returns: + Callable[[~.CreateJobRequest], + Awaitable[~.Job]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_job' not in self._stubs: + self._stubs['create_job'] = self.grpc_channel.unary_unary( + '/google.cloud.video.transcoder.v1beta1.TranscoderService/CreateJob', + request_serializer=services.CreateJobRequest.serialize, + response_deserializer=resources.Job.deserialize, + ) + return self._stubs['create_job'] + + @property + def list_jobs(self) -> Callable[ + [services.ListJobsRequest], + Awaitable[services.ListJobsResponse]]: + r"""Return a callable for the list jobs method over gRPC. + + Lists jobs in the specified region. + + Returns: + Callable[[~.ListJobsRequest], + Awaitable[~.ListJobsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_jobs' not in self._stubs: + self._stubs['list_jobs'] = self.grpc_channel.unary_unary( + '/google.cloud.video.transcoder.v1beta1.TranscoderService/ListJobs', + request_serializer=services.ListJobsRequest.serialize, + response_deserializer=services.ListJobsResponse.deserialize, + ) + return self._stubs['list_jobs'] + + @property + def get_job(self) -> Callable[ + [services.GetJobRequest], + Awaitable[resources.Job]]: + r"""Return a callable for the get job method over gRPC. + + Returns the job data. + + Returns: + Callable[[~.GetJobRequest], + Awaitable[~.Job]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_job' not in self._stubs: + self._stubs['get_job'] = self.grpc_channel.unary_unary( + '/google.cloud.video.transcoder.v1beta1.TranscoderService/GetJob', + request_serializer=services.GetJobRequest.serialize, + response_deserializer=resources.Job.deserialize, + ) + return self._stubs['get_job'] + + @property + def delete_job(self) -> Callable[ + [services.DeleteJobRequest], + Awaitable[empty_pb2.Empty]]: + r"""Return a callable for the delete job method over gRPC. + + Deletes a job. + + Returns: + Callable[[~.DeleteJobRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_job' not in self._stubs: + self._stubs['delete_job'] = self.grpc_channel.unary_unary( + '/google.cloud.video.transcoder.v1beta1.TranscoderService/DeleteJob', + request_serializer=services.DeleteJobRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs['delete_job'] + + @property + def create_job_template(self) -> Callable[ + [services.CreateJobTemplateRequest], + Awaitable[resources.JobTemplate]]: + r"""Return a callable for the create job template method over gRPC. + + Creates a job template in the specified region. + + Returns: + Callable[[~.CreateJobTemplateRequest], + Awaitable[~.JobTemplate]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_job_template' not in self._stubs: + self._stubs['create_job_template'] = self.grpc_channel.unary_unary( + '/google.cloud.video.transcoder.v1beta1.TranscoderService/CreateJobTemplate', + request_serializer=services.CreateJobTemplateRequest.serialize, + response_deserializer=resources.JobTemplate.deserialize, + ) + return self._stubs['create_job_template'] + + @property + def list_job_templates(self) -> Callable[ + [services.ListJobTemplatesRequest], + Awaitable[services.ListJobTemplatesResponse]]: + r"""Return a callable for the list job templates method over gRPC. + + Lists job templates in the specified region. + + Returns: + Callable[[~.ListJobTemplatesRequest], + Awaitable[~.ListJobTemplatesResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_job_templates' not in self._stubs: + self._stubs['list_job_templates'] = self.grpc_channel.unary_unary( + '/google.cloud.video.transcoder.v1beta1.TranscoderService/ListJobTemplates', + request_serializer=services.ListJobTemplatesRequest.serialize, + response_deserializer=services.ListJobTemplatesResponse.deserialize, + ) + return self._stubs['list_job_templates'] + + @property + def get_job_template(self) -> Callable[ + [services.GetJobTemplateRequest], + Awaitable[resources.JobTemplate]]: + r"""Return a callable for the get job template method over gRPC. + + Returns the job template data. + + Returns: + Callable[[~.GetJobTemplateRequest], + Awaitable[~.JobTemplate]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_job_template' not in self._stubs: + self._stubs['get_job_template'] = self.grpc_channel.unary_unary( + '/google.cloud.video.transcoder.v1beta1.TranscoderService/GetJobTemplate', + request_serializer=services.GetJobTemplateRequest.serialize, + response_deserializer=resources.JobTemplate.deserialize, + ) + return self._stubs['get_job_template'] + + @property + def delete_job_template(self) -> Callable[ + [services.DeleteJobTemplateRequest], + Awaitable[empty_pb2.Empty]]: + r"""Return a callable for the delete job template method over gRPC. + + Deletes a job template. + + Returns: + Callable[[~.DeleteJobTemplateRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_job_template' not in self._stubs: + self._stubs['delete_job_template'] = self.grpc_channel.unary_unary( + '/google.cloud.video.transcoder.v1beta1.TranscoderService/DeleteJobTemplate', + request_serializer=services.DeleteJobTemplateRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs['delete_job_template'] + + +__all__ = ( + 'TranscoderServiceGrpcAsyncIOTransport', +) diff --git a/owl-bot-staging/v1beta1/google/cloud/video/transcoder_v1beta1/types/__init__.py b/owl-bot-staging/v1beta1/google/cloud/video/transcoder_v1beta1/types/__init__.py new file mode 100644 index 0000000..ebb04cc --- /dev/null +++ b/owl-bot-staging/v1beta1/google/cloud/video/transcoder_v1beta1/types/__init__.py @@ -0,0 +1,84 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .resources import ( + AdBreak, + AudioStream, + EditAtom, + ElementaryStream, + Encryption, + FailureDetail, + Input, + Job, + JobConfig, + JobTemplate, + Manifest, + MuxStream, + Output, + Overlay, + PreprocessingConfig, + Progress, + PubsubDestination, + SegmentSettings, + SpriteSheet, + TextStream, + VideoStream, +) +from .services import ( + CreateJobRequest, + CreateJobTemplateRequest, + DeleteJobRequest, + DeleteJobTemplateRequest, + GetJobRequest, + GetJobTemplateRequest, + ListJobsRequest, + ListJobsResponse, + ListJobTemplatesRequest, + ListJobTemplatesResponse, +) + +__all__ = ( + 'AdBreak', + 'AudioStream', + 'EditAtom', + 'ElementaryStream', + 'Encryption', + 'FailureDetail', + 'Input', + 'Job', + 'JobConfig', + 'JobTemplate', + 'Manifest', + 'MuxStream', + 'Output', + 'Overlay', + 'PreprocessingConfig', + 'Progress', + 'PubsubDestination', + 'SegmentSettings', + 'SpriteSheet', + 'TextStream', + 'VideoStream', + 'CreateJobRequest', + 'CreateJobTemplateRequest', + 'DeleteJobRequest', + 'DeleteJobTemplateRequest', + 'GetJobRequest', + 'GetJobTemplateRequest', + 'ListJobsRequest', + 'ListJobsResponse', + 'ListJobTemplatesRequest', + 'ListJobTemplatesResponse', +) diff --git a/owl-bot-staging/v1beta1/google/cloud/video/transcoder_v1beta1/types/resources.py b/owl-bot-staging/v1beta1/google/cloud/video/transcoder_v1beta1/types/resources.py new file mode 100644 index 0000000..e04fca3 --- /dev/null +++ b/owl-bot-staging/v1beta1/google/cloud/video/transcoder_v1beta1/types/resources.py @@ -0,0 +1,1736 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import proto # type: ignore + +from google.protobuf import duration_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore + + +__protobuf__ = proto.module( + package='google.cloud.video.transcoder.v1beta1', + manifest={ + 'Job', + 'JobTemplate', + 'JobConfig', + 'Input', + 'Output', + 'EditAtom', + 'AdBreak', + 'ElementaryStream', + 'MuxStream', + 'Manifest', + 'PubsubDestination', + 'SpriteSheet', + 'Overlay', + 'PreprocessingConfig', + 'VideoStream', + 'AudioStream', + 'TextStream', + 'SegmentSettings', + 'Encryption', + 'Progress', + 'FailureDetail', + }, +) + + +class Job(proto.Message): + r"""Transcoding job resource. + Attributes: + name (str): + The resource name of the job. Format: + ``projects/{project}/locations/{location}/jobs/{job}`` + input_uri (str): + Input only. Specify the ``input_uri`` to populate empty + ``uri`` fields in each element of ``Job.config.inputs`` or + ``JobTemplate.config.inputs`` when using template. URI of + the media. Input files must be at least 5 seconds in + duration and stored in Cloud Storage (for example, + ``gs://bucket/inputs/file.mp4``). + output_uri (str): + Input only. Specify the ``output_uri`` to populate an empty + ``Job.config.output.uri`` or + ``JobTemplate.config.output.uri`` when using template. URI + for the output file(s). For example, + ``gs://my-bucket/outputs/``. + template_id (str): + Input only. Specify the ``template_id`` to use for + populating ``Job.config``. The default is ``preset/web-hd``. + + Preset Transcoder templates: + + - ``preset/{preset_id}`` + + - User defined JobTemplate: ``{job_template_id}`` + config (google.cloud.video.transcoder_v1beta1.types.JobConfig): + The configuration for this job. + priority (int): + Specify the priority of the job. Enter a + value between 0 and 100, where 0 is the lowest + priority and 100 is the highest priority. The + default is 0. + origin_uri (google.cloud.video.transcoder_v1beta1.types.Job.OriginUri): + Output only. The origin URI. + + state (google.cloud.video.transcoder_v1beta1.types.Job.ProcessingState): + Output only. The current state of the job. + progress (google.cloud.video.transcoder_v1beta1.types.Progress): + Output only. Estimated fractional progress, from ``0`` to + ``1`` for each step. + + .. raw:: html + + + failure_reason (str): + Output only. A description of the reason for the failure. + This property is always present when ``state`` is + ``FAILED``. + failure_details (Sequence[google.cloud.video.transcoder_v1beta1.types.FailureDetail]): + Output only. List of failure details. This property may + contain additional information about the failure when + ``failure_reason`` is present. + + .. raw:: html + + + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time the job was created. + start_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time the transcoding + started. + end_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time the transcoding + finished. + ttl_after_completion_days (int): + Job time to live value in days, which will be + effective after job completion. Job should be + deleted automatically after the given TTL. Enter + a value between 1 and 90. The default is 30. + """ + class ProcessingState(proto.Enum): + r"""The current state of the job.""" + PROCESSING_STATE_UNSPECIFIED = 0 + PENDING = 1 + RUNNING = 2 + SUCCEEDED = 3 + FAILED = 4 + + class OriginUri(proto.Message): + r"""The origin URI. + Attributes: + hls (str): + HLS manifest URI per + https://tools.ietf.org/html/rfc8216#section-4.3.4. + If multiple HLS manifests are created, only the + first one is listed. + dash (str): + Dash manifest URI. If multiple Dash manifests + are created, only the first one is listed. + """ + + hls = proto.Field( + proto.STRING, + number=1, + ) + dash = proto.Field( + proto.STRING, + number=2, + ) + + name = proto.Field( + proto.STRING, + number=1, + ) + input_uri = proto.Field( + proto.STRING, + number=2, + ) + output_uri = proto.Field( + proto.STRING, + number=3, + ) + template_id = proto.Field( + proto.STRING, + number=4, + oneof='job_config', + ) + config = proto.Field( + proto.MESSAGE, + number=5, + oneof='job_config', + message='JobConfig', + ) + priority = proto.Field( + proto.INT32, + number=6, + ) + origin_uri = proto.Field( + proto.MESSAGE, + number=7, + message=OriginUri, + ) + state = proto.Field( + proto.ENUM, + number=8, + enum=ProcessingState, + ) + progress = proto.Field( + proto.MESSAGE, + number=9, + message='Progress', + ) + failure_reason = proto.Field( + proto.STRING, + number=10, + ) + failure_details = proto.RepeatedField( + proto.MESSAGE, + number=11, + message='FailureDetail', + ) + create_time = proto.Field( + proto.MESSAGE, + number=12, + message=timestamp_pb2.Timestamp, + ) + start_time = proto.Field( + proto.MESSAGE, + number=13, + message=timestamp_pb2.Timestamp, + ) + end_time = proto.Field( + proto.MESSAGE, + number=14, + message=timestamp_pb2.Timestamp, + ) + ttl_after_completion_days = proto.Field( + proto.INT32, + number=15, + ) + + +class JobTemplate(proto.Message): + r"""Transcoding job template resource. + Attributes: + name (str): + The resource name of the job template. Format: + ``projects/{project}/locations/{location}/jobTemplates/{job_template}`` + config (google.cloud.video.transcoder_v1beta1.types.JobConfig): + The configuration for this template. + """ + + name = proto.Field( + proto.STRING, + number=1, + ) + config = proto.Field( + proto.MESSAGE, + number=2, + message='JobConfig', + ) + + +class JobConfig(proto.Message): + r"""Job configuration + Attributes: + inputs (Sequence[google.cloud.video.transcoder_v1beta1.types.Input]): + List of input assets stored in Cloud Storage. + edit_list (Sequence[google.cloud.video.transcoder_v1beta1.types.EditAtom]): + List of ``Edit atom``\ s. Defines the ultimate timeline of + the resulting file or manifest. + elementary_streams (Sequence[google.cloud.video.transcoder_v1beta1.types.ElementaryStream]): + List of elementary streams. + mux_streams (Sequence[google.cloud.video.transcoder_v1beta1.types.MuxStream]): + List of multiplexing settings for output + streams. + manifests (Sequence[google.cloud.video.transcoder_v1beta1.types.Manifest]): + List of output manifests. + output (google.cloud.video.transcoder_v1beta1.types.Output): + Output configuration. + ad_breaks (Sequence[google.cloud.video.transcoder_v1beta1.types.AdBreak]): + List of ad breaks. Specifies where to insert + ad break tags in the output manifests. + pubsub_destination (google.cloud.video.transcoder_v1beta1.types.PubsubDestination): + Destination on Pub/Sub. + sprite_sheets (Sequence[google.cloud.video.transcoder_v1beta1.types.SpriteSheet]): + List of output sprite sheets. + overlays (Sequence[google.cloud.video.transcoder_v1beta1.types.Overlay]): + List of overlays on the output video, in + descending Z-order. + """ + + inputs = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='Input', + ) + edit_list = proto.RepeatedField( + proto.MESSAGE, + number=2, + message='EditAtom', + ) + elementary_streams = proto.RepeatedField( + proto.MESSAGE, + number=3, + message='ElementaryStream', + ) + mux_streams = proto.RepeatedField( + proto.MESSAGE, + number=4, + message='MuxStream', + ) + manifests = proto.RepeatedField( + proto.MESSAGE, + number=5, + message='Manifest', + ) + output = proto.Field( + proto.MESSAGE, + number=6, + message='Output', + ) + ad_breaks = proto.RepeatedField( + proto.MESSAGE, + number=7, + message='AdBreak', + ) + pubsub_destination = proto.Field( + proto.MESSAGE, + number=8, + message='PubsubDestination', + ) + sprite_sheets = proto.RepeatedField( + proto.MESSAGE, + number=9, + message='SpriteSheet', + ) + overlays = proto.RepeatedField( + proto.MESSAGE, + number=10, + message='Overlay', + ) + + +class Input(proto.Message): + r"""Input asset. + Attributes: + key (str): + A unique key for this input. Must be + specified when using advanced mapping and edit + lists. + uri (str): + URI of the media. Input files must be at least 5 seconds in + duration and stored in Cloud Storage (for example, + ``gs://bucket/inputs/file.mp4``). If empty, the value will + be populated from ``Job.input_uri``. + preprocessing_config (google.cloud.video.transcoder_v1beta1.types.PreprocessingConfig): + Preprocessing configurations. + """ + + key = proto.Field( + proto.STRING, + number=1, + ) + uri = proto.Field( + proto.STRING, + number=2, + ) + preprocessing_config = proto.Field( + proto.MESSAGE, + number=3, + message='PreprocessingConfig', + ) + + +class Output(proto.Message): + r"""Location of output file(s) in a Cloud Storage bucket. + Attributes: + uri (str): + URI for the output file(s). For example, + ``gs://my-bucket/outputs/``. If empty the value is populated + from ``Job.output_uri``. + """ + + uri = proto.Field( + proto.STRING, + number=1, + ) + + +class EditAtom(proto.Message): + r"""Edit atom. + Attributes: + key (str): + A unique key for this atom. Must be specified + when using advanced mapping. + inputs (Sequence[str]): + List of ``Input.key``\ s identifying files that should be + used in this atom. The listed ``inputs`` must have the same + timeline. + end_time_offset (google.protobuf.duration_pb2.Duration): + End time in seconds for the atom, relative to the input file + timeline. When ``end_time_offset`` is not specified, the + ``inputs`` are used until the end of the atom. + start_time_offset (google.protobuf.duration_pb2.Duration): + Start time in seconds for the atom, relative to the input + file timeline. The default is ``0s``. + """ + + key = proto.Field( + proto.STRING, + number=1, + ) + inputs = proto.RepeatedField( + proto.STRING, + number=2, + ) + end_time_offset = proto.Field( + proto.MESSAGE, + number=3, + message=duration_pb2.Duration, + ) + start_time_offset = proto.Field( + proto.MESSAGE, + number=4, + message=duration_pb2.Duration, + ) + + +class AdBreak(proto.Message): + r"""Ad break. + Attributes: + start_time_offset (google.protobuf.duration_pb2.Duration): + Start time in seconds for the ad break, relative to the + output file timeline. The default is ``0s``. + """ + + start_time_offset = proto.Field( + proto.MESSAGE, + number=1, + message=duration_pb2.Duration, + ) + + +class ElementaryStream(proto.Message): + r"""Encoding of an input file such as an audio, video, or text + track. Elementary streams must be packaged before + mapping and sharing between different output formats. + + Attributes: + key (str): + A unique key for this elementary stream. + video_stream (google.cloud.video.transcoder_v1beta1.types.VideoStream): + Encoding of a video stream. + audio_stream (google.cloud.video.transcoder_v1beta1.types.AudioStream): + Encoding of an audio stream. + text_stream (google.cloud.video.transcoder_v1beta1.types.TextStream): + Encoding of a text stream. For example, + closed captions or subtitles. + """ + + key = proto.Field( + proto.STRING, + number=4, + ) + video_stream = proto.Field( + proto.MESSAGE, + number=1, + oneof='elementary_stream', + message='VideoStream', + ) + audio_stream = proto.Field( + proto.MESSAGE, + number=2, + oneof='elementary_stream', + message='AudioStream', + ) + text_stream = proto.Field( + proto.MESSAGE, + number=3, + oneof='elementary_stream', + message='TextStream', + ) + + +class MuxStream(proto.Message): + r"""Multiplexing settings for output stream. + Attributes: + key (str): + A unique key for this multiplexed stream. HLS media + manifests will be named ``MuxStream.key`` with the + ``".m3u8"`` extension suffix. + file_name (str): + The name of the generated file. The default is + ``MuxStream.key`` with the extension suffix corresponding to + the ``MuxStream.container``. + + Individual segments also have an incremental 10-digit + zero-padded suffix starting from 0 before the extension, + such as ``"mux_stream0000000123.ts"``. + container (str): + The container format. The default is ``"mp4"`` + + Supported container formats: + + - 'ts' + - 'fmp4'- the corresponding file extension is ``".m4s"`` + - 'mp4' + - 'vtt' + elementary_streams (Sequence[str]): + List of ``ElementaryStream.key``\ s multiplexed in this + stream. + segment_settings (google.cloud.video.transcoder_v1beta1.types.SegmentSettings): + Segment settings for ``"ts"``, ``"fmp4"`` and ``"vtt"``. + encryption (google.cloud.video.transcoder_v1beta1.types.Encryption): + Encryption settings. + """ + + key = proto.Field( + proto.STRING, + number=1, + ) + file_name = proto.Field( + proto.STRING, + number=2, + ) + container = proto.Field( + proto.STRING, + number=3, + ) + elementary_streams = proto.RepeatedField( + proto.STRING, + number=4, + ) + segment_settings = proto.Field( + proto.MESSAGE, + number=5, + message='SegmentSettings', + ) + encryption = proto.Field( + proto.MESSAGE, + number=6, + message='Encryption', + ) + + +class Manifest(proto.Message): + r"""Manifest configuration. + Attributes: + file_name (str): + The name of the generated file. The default is + ``"manifest"`` with the extension suffix corresponding to + the ``Manifest.type``. + type_ (google.cloud.video.transcoder_v1beta1.types.Manifest.ManifestType): + Required. Type of the manifest, can be "HLS" + or "DASH". + mux_streams (Sequence[str]): + Required. List of user given ``MuxStream.key``\ s that + should appear in this manifest. + + When ``Manifest.type`` is ``HLS``, a media manifest with + name ``MuxStream.key`` and ``.m3u8`` extension is generated + for each element of the ``Manifest.mux_streams``. + """ + class ManifestType(proto.Enum): + r"""The manifest type can be either ``"HLS"`` or ``"DASH"``.""" + MANIFEST_TYPE_UNSPECIFIED = 0 + HLS = 1 + DASH = 2 + + file_name = proto.Field( + proto.STRING, + number=1, + ) + type_ = proto.Field( + proto.ENUM, + number=2, + enum=ManifestType, + ) + mux_streams = proto.RepeatedField( + proto.STRING, + number=3, + ) + + +class PubsubDestination(proto.Message): + r"""A Pub/Sub destination. + Attributes: + topic (str): + The name of the Pub/Sub topic to publish job completion + notification to. For example: + ``projects/{project}/topics/{topic}``. + """ + + topic = proto.Field( + proto.STRING, + number=1, + ) + + +class SpriteSheet(proto.Message): + r"""Sprite sheet configuration. + Attributes: + format_ (str): + Format type. The default is ``"jpeg"``. + + Supported formats: + + - 'jpeg' + file_prefix (str): + Required. File name prefix for the generated sprite sheets. + + Each sprite sheet has an incremental 10-digit zero-padded + suffix starting from 0 before the extension, such as + ``"sprite_sheet0000000123.jpeg"``. + sprite_width_pixels (int): + Required. The width of sprite in pixels. Must be an even + integer. To preserve the source aspect ratio, set the + [SpriteSheet.sprite_width_pixels][google.cloud.video.transcoder.v1beta1.SpriteSheet.sprite_width_pixels] + field or the + [SpriteSheet.sprite_height_pixels][google.cloud.video.transcoder.v1beta1.SpriteSheet.sprite_height_pixels] + field, but not both (the API will automatically calculate + the missing field). + sprite_height_pixels (int): + Required. The height of sprite in pixels. Must be an even + integer. To preserve the source aspect ratio, set the + [SpriteSheet.sprite_height_pixels][google.cloud.video.transcoder.v1beta1.SpriteSheet.sprite_height_pixels] + field or the + [SpriteSheet.sprite_width_pixels][google.cloud.video.transcoder.v1beta1.SpriteSheet.sprite_width_pixels] + field, but not both (the API will automatically calculate + the missing field). + column_count (int): + The maximum number of sprites per row in a + sprite sheet. The default is 0, which indicates + no maximum limit. + row_count (int): + The maximum number of rows per sprite sheet. + When the sprite sheet is full, a new sprite + sheet is created. The default is 0, which + indicates no maximum limit. + start_time_offset (google.protobuf.duration_pb2.Duration): + Start time in seconds, relative to the output file timeline. + Determines the first sprite to pick. The default is ``0s``. + end_time_offset (google.protobuf.duration_pb2.Duration): + End time in seconds, relative to the output file timeline. + When ``end_time_offset`` is not specified, the sprites are + generated until the end of the output file. + total_count (int): + Total number of sprites. Create the specified + number of sprites distributed evenly across the + timeline of the output media. The default is + 100. + interval (google.protobuf.duration_pb2.Duration): + Starting from ``0s``, create sprites at regular intervals. + Specify the interval value in seconds. + quality (int): + The quality of the generated sprite sheet. + Enter a value between 1 and 100, where 1 is the + lowest quality and 100 is the highest quality. + The default is 100. A high quality value + corresponds to a low image data compression + ratio. + """ + + format_ = proto.Field( + proto.STRING, + number=1, + ) + file_prefix = proto.Field( + proto.STRING, + number=2, + ) + sprite_width_pixels = proto.Field( + proto.INT32, + number=3, + ) + sprite_height_pixels = proto.Field( + proto.INT32, + number=4, + ) + column_count = proto.Field( + proto.INT32, + number=5, + ) + row_count = proto.Field( + proto.INT32, + number=6, + ) + start_time_offset = proto.Field( + proto.MESSAGE, + number=7, + message=duration_pb2.Duration, + ) + end_time_offset = proto.Field( + proto.MESSAGE, + number=8, + message=duration_pb2.Duration, + ) + total_count = proto.Field( + proto.INT32, + number=9, + oneof='extraction_strategy', + ) + interval = proto.Field( + proto.MESSAGE, + number=10, + oneof='extraction_strategy', + message=duration_pb2.Duration, + ) + quality = proto.Field( + proto.INT32, + number=11, + ) + + +class Overlay(proto.Message): + r"""Overlay configuration. + Attributes: + image (google.cloud.video.transcoder_v1beta1.types.Overlay.Image): + Image overlay. + animations (Sequence[google.cloud.video.transcoder_v1beta1.types.Overlay.Animation]): + List of Animations. The list should be + chronological, without any time overlap. + """ + class FadeType(proto.Enum): + r"""Fade type for the overlay: ``FADE_IN`` or ``FADE_OUT``.""" + FADE_TYPE_UNSPECIFIED = 0 + FADE_IN = 1 + FADE_OUT = 2 + + class NormalizedCoordinate(proto.Message): + r"""2D normalized coordinates. Default: ``{0.0, 0.0}`` + Attributes: + x (float): + Normalized x coordinate. + y (float): + Normalized y coordinate. + """ + + x = proto.Field( + proto.DOUBLE, + number=1, + ) + y = proto.Field( + proto.DOUBLE, + number=2, + ) + + class Image(proto.Message): + r"""Overlaid jpeg image. + Attributes: + uri (str): + Required. URI of the JPEG image in Cloud Storage. For + example, ``gs://bucket/inputs/image.jpeg``. JPEG is the only + supported image type. + resolution (google.cloud.video.transcoder_v1beta1.types.Overlay.NormalizedCoordinate): + Normalized image resolution, based on output video + resolution. Valid values: ``0.0``–``1.0``. To respect the + original image aspect ratio, set either ``x`` or ``y`` to + ``0.0``. To use the original image resolution, set both + ``x`` and ``y`` to ``0.0``. + alpha (float): + Target image opacity. Valid values are from ``1.0`` (solid, + default) to ``0.0`` (transparent), exclusive. Set this to a + value greater than ``0.0``. + """ + + uri = proto.Field( + proto.STRING, + number=1, + ) + resolution = proto.Field( + proto.MESSAGE, + number=2, + message='Overlay.NormalizedCoordinate', + ) + alpha = proto.Field( + proto.DOUBLE, + number=3, + ) + + class AnimationStatic(proto.Message): + r"""Display static overlay object. + Attributes: + xy (google.cloud.video.transcoder_v1beta1.types.Overlay.NormalizedCoordinate): + Normalized coordinates based on output video resolution. + Valid values: ``0.0``–``1.0``. ``xy`` is the upper-left + coordinate of the overlay object. For example, use the x and + y coordinates {0,0} to position the top-left corner of the + overlay animation in the top-left corner of the output + video. + start_time_offset (google.protobuf.duration_pb2.Duration): + The time to start displaying the overlay + object, in seconds. Default: 0 + """ + + xy = proto.Field( + proto.MESSAGE, + number=1, + message='Overlay.NormalizedCoordinate', + ) + start_time_offset = proto.Field( + proto.MESSAGE, + number=2, + message=duration_pb2.Duration, + ) + + class AnimationFade(proto.Message): + r"""Display overlay object with fade animation. + Attributes: + fade_type (google.cloud.video.transcoder_v1beta1.types.Overlay.FadeType): + Required. Type of fade animation: ``FADE_IN`` or + ``FADE_OUT``. + xy (google.cloud.video.transcoder_v1beta1.types.Overlay.NormalizedCoordinate): + Normalized coordinates based on output video resolution. + Valid values: ``0.0``–``1.0``. ``xy`` is the upper-left + coordinate of the overlay object. For example, use the x and + y coordinates {0,0} to position the top-left corner of the + overlay animation in the top-left corner of the output + video. + start_time_offset (google.protobuf.duration_pb2.Duration): + The time to start the fade animation, in + seconds. Default: 0 + end_time_offset (google.protobuf.duration_pb2.Duration): + The time to end the fade animation, in seconds. Default: + ``start_time_offset`` + 1s + """ + + fade_type = proto.Field( + proto.ENUM, + number=1, + enum='Overlay.FadeType', + ) + xy = proto.Field( + proto.MESSAGE, + number=2, + message='Overlay.NormalizedCoordinate', + ) + start_time_offset = proto.Field( + proto.MESSAGE, + number=3, + message=duration_pb2.Duration, + ) + end_time_offset = proto.Field( + proto.MESSAGE, + number=4, + message=duration_pb2.Duration, + ) + + class AnimationEnd(proto.Message): + r"""End previous overlay animation from the video. Without + AnimationEnd, the overlay object will keep the state of previous + animation until the end of the video. + + Attributes: + start_time_offset (google.protobuf.duration_pb2.Duration): + The time to end overlay object, in seconds. + Default: 0 + """ + + start_time_offset = proto.Field( + proto.MESSAGE, + number=1, + message=duration_pb2.Duration, + ) + + class Animation(proto.Message): + r"""Animation types. + Attributes: + animation_static (google.cloud.video.transcoder_v1beta1.types.Overlay.AnimationStatic): + Display static overlay object. + animation_fade (google.cloud.video.transcoder_v1beta1.types.Overlay.AnimationFade): + Display overlay object with fade animation. + animation_end (google.cloud.video.transcoder_v1beta1.types.Overlay.AnimationEnd): + End previous animation. + """ + + animation_static = proto.Field( + proto.MESSAGE, + number=1, + oneof='animation_type', + message='Overlay.AnimationStatic', + ) + animation_fade = proto.Field( + proto.MESSAGE, + number=2, + oneof='animation_type', + message='Overlay.AnimationFade', + ) + animation_end = proto.Field( + proto.MESSAGE, + number=3, + oneof='animation_type', + message='Overlay.AnimationEnd', + ) + + image = proto.Field( + proto.MESSAGE, + number=1, + message=Image, + ) + animations = proto.RepeatedField( + proto.MESSAGE, + number=2, + message=Animation, + ) + + +class PreprocessingConfig(proto.Message): + r"""Preprocessing configurations. + Attributes: + color (google.cloud.video.transcoder_v1beta1.types.PreprocessingConfig.Color): + Color preprocessing configuration. + denoise (google.cloud.video.transcoder_v1beta1.types.PreprocessingConfig.Denoise): + Denoise preprocessing configuration. + deblock (google.cloud.video.transcoder_v1beta1.types.PreprocessingConfig.Deblock): + Deblock preprocessing configuration. + audio (google.cloud.video.transcoder_v1beta1.types.PreprocessingConfig.Audio): + Audio preprocessing configuration. + crop (google.cloud.video.transcoder_v1beta1.types.PreprocessingConfig.Crop): + Specify the video cropping configuration. + pad (google.cloud.video.transcoder_v1beta1.types.PreprocessingConfig.Pad): + Specify the video pad filter configuration. + """ + + class Color(proto.Message): + r"""Color preprocessing configuration. + Attributes: + saturation (float): + Control color saturation of the video. Enter + a value between -1 and 1, where -1 is fully + desaturated and 1 is maximum saturation. 0 is no + change. The default is 0. + contrast (float): + Control black and white contrast of the + video. Enter a value between -1 and 1, where -1 + is minimum contrast and 1 is maximum contrast. 0 + is no change. The default is 0. + brightness (float): + Control brightness of the video. Enter a + value between -1 and 1, where -1 is minimum + brightness and 1 is maximum brightness. 0 is no + change. The default is 0. + """ + + saturation = proto.Field( + proto.DOUBLE, + number=1, + ) + contrast = proto.Field( + proto.DOUBLE, + number=2, + ) + brightness = proto.Field( + proto.DOUBLE, + number=3, + ) + + class Denoise(proto.Message): + r"""Denoise preprocessing configuration. + Attributes: + strength (float): + Set strength of the denoise. Enter a value + between 0 and 1. The higher the value, the + smoother the image. 0 is no denoising. The + default is 0. + tune (str): + Set the denoiser mode. The default is ``"standard"``. + + Supported denoiser modes: + + - 'standard' + - 'grain' + """ + + strength = proto.Field( + proto.DOUBLE, + number=1, + ) + tune = proto.Field( + proto.STRING, + number=2, + ) + + class Deblock(proto.Message): + r"""Deblock preprocessing configuration. + Attributes: + strength (float): + Set strength of the deblocker. Enter a value + between 0 and 1. The higher the value, the + stronger the block removal. 0 is no deblocking. + The default is 0. + enabled (bool): + Enable deblocker. The default is ``false``. + """ + + strength = proto.Field( + proto.DOUBLE, + number=1, + ) + enabled = proto.Field( + proto.BOOL, + number=2, + ) + + class Audio(proto.Message): + r"""Audio preprocessing configuration. + Attributes: + lufs (float): + Specify audio loudness normalization in loudness units + relative to full scale (LUFS). Enter a value between -24 and + 0 (the default), where: + + - -24 is the Advanced Television Systems Committee (ATSC + A/85) standard + - -23 is the EU R128 broadcast standard + - -19 is the prior standard for online mono audio + - -18 is the ReplayGain standard + - -16 is the prior standard for stereo audio + - -14 is the new online audio standard recommended by + Spotify, as well as Amazon Echo + - 0 disables normalization + high_boost (bool): + Enable boosting high frequency components. The default is + ``false``. + low_boost (bool): + Enable boosting low frequency components. The default is + ``false``. + """ + + lufs = proto.Field( + proto.DOUBLE, + number=1, + ) + high_boost = proto.Field( + proto.BOOL, + number=2, + ) + low_boost = proto.Field( + proto.BOOL, + number=3, + ) + + class Crop(proto.Message): + r"""Video cropping configuration for the input video. The cropped + input video is scaled to match the output resolution. + + Attributes: + top_pixels (int): + The number of pixels to crop from the top. + The default is 0. + bottom_pixels (int): + The number of pixels to crop from the bottom. + The default is 0. + left_pixels (int): + The number of pixels to crop from the left. + The default is 0. + right_pixels (int): + The number of pixels to crop from the right. + The default is 0. + """ + + top_pixels = proto.Field( + proto.INT32, + number=1, + ) + bottom_pixels = proto.Field( + proto.INT32, + number=2, + ) + left_pixels = proto.Field( + proto.INT32, + number=3, + ) + right_pixels = proto.Field( + proto.INT32, + number=4, + ) + + class Pad(proto.Message): + r"""Pad filter configuration for the input video. The padded + input video is scaled after padding with black to match the + output resolution. + + Attributes: + top_pixels (int): + The number of pixels to add to the top. The + default is 0. + bottom_pixels (int): + The number of pixels to add to the bottom. + The default is 0. + left_pixels (int): + The number of pixels to add to the left. The + default is 0. + right_pixels (int): + The number of pixels to add to the right. The + default is 0. + """ + + top_pixels = proto.Field( + proto.INT32, + number=1, + ) + bottom_pixels = proto.Field( + proto.INT32, + number=2, + ) + left_pixels = proto.Field( + proto.INT32, + number=3, + ) + right_pixels = proto.Field( + proto.INT32, + number=4, + ) + + color = proto.Field( + proto.MESSAGE, + number=1, + message=Color, + ) + denoise = proto.Field( + proto.MESSAGE, + number=2, + message=Denoise, + ) + deblock = proto.Field( + proto.MESSAGE, + number=3, + message=Deblock, + ) + audio = proto.Field( + proto.MESSAGE, + number=4, + message=Audio, + ) + crop = proto.Field( + proto.MESSAGE, + number=5, + message=Crop, + ) + pad = proto.Field( + proto.MESSAGE, + number=6, + message=Pad, + ) + + +class VideoStream(proto.Message): + r"""Video stream resource. + Attributes: + codec (str): + Codec type. The following codecs are supported: + + - ``h264`` (default) + - ``h265`` + - ``vp9`` + profile (str): + Enforces the specified codec profile. The following profiles + are supported: + + - ``baseline`` + - ``main`` + - ``high`` (default) + + The available options are FFmpeg-compatible. Note that + certain values for this field may cause the transcoder to + override other fields you set in the ``VideoStream`` + message. + tune (str): + Enforces the specified codec tune. The available options are + FFmpeg-compatible. Note that certain values for this field + may cause the transcoder to override other fields you set in + the ``VideoStream`` message. + preset (str): + Enforces the specified codec preset. The default is + ``veryfast``. The available options are FFmpeg-compatible. + Note that certain values for this field may cause the + transcoder to override other fields you set in the + ``VideoStream`` message. + height_pixels (int): + The height of the video in pixels. Must be an + even integer. When not specified, the height is + adjusted to match the specified width and input + aspect ratio. If both are omitted, the input + height is used. + width_pixels (int): + The width of the video in pixels. Must be an + even integer. When not specified, the width is + adjusted to match the specified height and input + aspect ratio. If both are omitted, the input + width is used. + pixel_format (str): + Pixel format to use. The default is ``"yuv420p"``. + + Supported pixel formats: + + - 'yuv420p' pixel format. + - 'yuv422p' pixel format. + - 'yuv444p' pixel format. + - 'yuv420p10' 10-bit HDR pixel format. + - 'yuv422p10' 10-bit HDR pixel format. + - 'yuv444p10' 10-bit HDR pixel format. + - 'yuv420p12' 12-bit HDR pixel format. + - 'yuv422p12' 12-bit HDR pixel format. + - 'yuv444p12' 12-bit HDR pixel format. + bitrate_bps (int): + Required. The video bitrate in bits per + second. The minimum value is 1,000. The maximum + value for H264/H265 is 800,000,000. The maximum + value for VP9 is 480,000,000. + rate_control_mode (str): + Specify the ``rate_control_mode``. The default is ``"vbr"``. + + Supported rate control modes: + + - 'vbr' - variable bitrate + - 'crf' - constant rate factor + enable_two_pass (bool): + Use two-pass encoding strategy to achieve better video + quality. ``VideoStream.rate_control_mode`` must be + ``"vbr"``. The default is ``false``. + crf_level (int): + Target CRF level. Must be between 10 and 36, + where 10 is the highest quality and 36 is the + most efficient compression. The default is 21. + vbv_size_bits (int): + Size of the Video Buffering Verifier (VBV) buffer in bits. + Must be greater than zero. The default is equal to + ``VideoStream.bitrate_bps``. + vbv_fullness_bits (int): + Initial fullness of the Video Buffering Verifier (VBV) + buffer in bits. Must be greater than zero. The default is + equal to 90% of ``VideoStream.vbv_size_bits``. + allow_open_gop (bool): + Specifies whether an open Group of Pictures (GOP) structure + should be allowed or not. The default is ``false``. + gop_frame_count (int): + Select the GOP size based on the specified + frame count. Must be greater than zero. + gop_duration (google.protobuf.duration_pb2.Duration): + Select the GOP size based on the specified duration. The + default is ``"3s"``. Note that ``gopDuration`` must be less + than or equal to ```segmentDuration`` <#SegmentSettings>`__, + and ```segmentDuration`` <#SegmentSettings>`__ must be + divisible by ``gopDuration``. + entropy_coder (str): + The entropy coder to use. The default is ``"cabac"``. + + Supported entropy coders: + + - 'cavlc' + - 'cabac' + b_pyramid (bool): + Allow B-pyramid for reference frame selection. This may not + be supported on all decoders. The default is ``false``. + b_frame_count (int): + The number of consecutive B-frames. Must be greater than or + equal to zero. Must be less than + ``VideoStream.gop_frame_count`` if set. The default is 0. + frame_rate (float): + Required. The target video frame rate in frames per second + (FPS). Must be less than or equal to 120. Will default to + the input frame rate if larger than the input frame rate. + The API will generate an output FPS that is divisible by the + input FPS, and smaller or equal to the target FPS. See + `Calculate frame + rate `__ + for more information. + aq_strength (float): + Specify the intensity of the adaptive + quantizer (AQ). Must be between 0 and 1, where 0 + disables the quantizer and 1 maximizes the + quantizer. A higher value equals a lower bitrate + but smoother image. The default is 0. + """ + + codec = proto.Field( + proto.STRING, + number=1, + ) + profile = proto.Field( + proto.STRING, + number=2, + ) + tune = proto.Field( + proto.STRING, + number=3, + ) + preset = proto.Field( + proto.STRING, + number=4, + ) + height_pixels = proto.Field( + proto.INT32, + number=5, + ) + width_pixels = proto.Field( + proto.INT32, + number=6, + ) + pixel_format = proto.Field( + proto.STRING, + number=7, + ) + bitrate_bps = proto.Field( + proto.INT32, + number=8, + ) + rate_control_mode = proto.Field( + proto.STRING, + number=9, + ) + enable_two_pass = proto.Field( + proto.BOOL, + number=10, + ) + crf_level = proto.Field( + proto.INT32, + number=11, + ) + vbv_size_bits = proto.Field( + proto.INT32, + number=12, + ) + vbv_fullness_bits = proto.Field( + proto.INT32, + number=13, + ) + allow_open_gop = proto.Field( + proto.BOOL, + number=14, + ) + gop_frame_count = proto.Field( + proto.INT32, + number=15, + oneof='gop_mode', + ) + gop_duration = proto.Field( + proto.MESSAGE, + number=16, + oneof='gop_mode', + message=duration_pb2.Duration, + ) + entropy_coder = proto.Field( + proto.STRING, + number=17, + ) + b_pyramid = proto.Field( + proto.BOOL, + number=18, + ) + b_frame_count = proto.Field( + proto.INT32, + number=19, + ) + frame_rate = proto.Field( + proto.DOUBLE, + number=20, + ) + aq_strength = proto.Field( + proto.DOUBLE, + number=21, + ) + + +class AudioStream(proto.Message): + r"""Audio stream resource. + Attributes: + codec (str): + The codec for this audio stream. The default is ``"aac"``. + + Supported audio codecs: + + - 'aac' + - 'aac-he' + - 'aac-he-v2' + - 'mp3' + - 'ac3' + - 'eac3' + bitrate_bps (int): + Required. Audio bitrate in bits per second. + Must be between 1 and 10,000,000. + channel_count (int): + Number of audio channels. Must be between 1 + and 6. The default is 2. + channel_layout (Sequence[str]): + A list of channel names specifying layout of the audio + channels. This only affects the metadata embedded in the + container headers, if supported by the specified format. The + default is ``["fl", "fr"]``. + + Supported channel names: + + - 'fl' - Front left channel + - 'fr' - Front right channel + - 'sl' - Side left channel + - 'sr' - Side right channel + - 'fc' - Front center channel + - 'lfe' - Low frequency + mapping (Sequence[google.cloud.video.transcoder_v1beta1.types.AudioStream.AudioAtom]): + The mapping for the ``Job.edit_list`` atoms with audio + ``EditAtom.inputs``. + sample_rate_hertz (int): + The audio sample rate in Hertz. The default + is 48000 Hertz. + """ + + class AudioAtom(proto.Message): + r"""The mapping for the ``Job.edit_list`` atoms with audio + ``EditAtom.inputs``. + + Attributes: + key (str): + Required. The ``EditAtom.key`` that references the atom with + audio inputs in the ``Job.edit_list``. + channels (Sequence[google.cloud.video.transcoder_v1beta1.types.AudioStream.AudioAtom.AudioChannel]): + List of ``Channel``\ s for this audio stream. for in-depth + explanation. + """ + + class AudioChannel(proto.Message): + r"""The audio channel. + Attributes: + inputs (Sequence[google.cloud.video.transcoder_v1beta1.types.AudioStream.AudioAtom.AudioChannel.AudioChannelInput]): + List of ``Job.inputs`` for this audio channel. + """ + + class AudioChannelInput(proto.Message): + r"""Identifies which input file, track, and channel should be + used. + + Attributes: + key (str): + Required. The ``Input.key`` that identifies the input file. + track (int): + Required. The zero-based index of the track + in the input file. + channel (int): + Required. The zero-based index of the channel + in the input file. + gain_db (float): + Audio volume control in dB. Negative values + decrease volume, positive values increase. The + default is 0. + """ + + key = proto.Field( + proto.STRING, + number=1, + ) + track = proto.Field( + proto.INT32, + number=2, + ) + channel = proto.Field( + proto.INT32, + number=3, + ) + gain_db = proto.Field( + proto.DOUBLE, + number=4, + ) + + inputs = proto.RepeatedField( + proto.MESSAGE, + number=2, + message='AudioStream.AudioAtom.AudioChannel.AudioChannelInput', + ) + + key = proto.Field( + proto.STRING, + number=1, + ) + channels = proto.RepeatedField( + proto.MESSAGE, + number=2, + message='AudioStream.AudioAtom.AudioChannel', + ) + + codec = proto.Field( + proto.STRING, + number=1, + ) + bitrate_bps = proto.Field( + proto.INT32, + number=2, + ) + channel_count = proto.Field( + proto.INT32, + number=3, + ) + channel_layout = proto.RepeatedField( + proto.STRING, + number=4, + ) + mapping = proto.RepeatedField( + proto.MESSAGE, + number=5, + message=AudioAtom, + ) + sample_rate_hertz = proto.Field( + proto.INT32, + number=6, + ) + + +class TextStream(proto.Message): + r"""Encoding of a text stream. For example, closed captions or + subtitles. + + Attributes: + codec (str): + The codec for this text stream. The default is ``"webvtt"``. + + Supported text codecs: + + - 'srt' + - 'ttml' + - 'cea608' + - 'cea708' + - 'webvtt' + language_code (str): + Required. The BCP-47 language code, such as ``"en-US"`` or + ``"sr-Latn"``. For more information, see + https://www.unicode.org/reports/tr35/#Unicode_locale_identifier. + mapping (Sequence[google.cloud.video.transcoder_v1beta1.types.TextStream.TextAtom]): + The mapping for the ``Job.edit_list`` atoms with text + ``EditAtom.inputs``. + """ + + class TextAtom(proto.Message): + r"""The mapping for the ``Job.edit_list`` atoms with text + ``EditAtom.inputs``. + + Attributes: + key (str): + Required. The ``EditAtom.key`` that references atom with + text inputs in the ``Job.edit_list``. + inputs (Sequence[google.cloud.video.transcoder_v1beta1.types.TextStream.TextAtom.TextInput]): + List of ``Job.inputs`` that should be embedded in this atom. + Only one input is supported. + """ + + class TextInput(proto.Message): + r"""Identifies which input file and track should be used. + Attributes: + key (str): + Required. The ``Input.key`` that identifies the input file. + track (int): + Required. The zero-based index of the track + in the input file. + """ + + key = proto.Field( + proto.STRING, + number=1, + ) + track = proto.Field( + proto.INT32, + number=2, + ) + + key = proto.Field( + proto.STRING, + number=1, + ) + inputs = proto.RepeatedField( + proto.MESSAGE, + number=2, + message='TextStream.TextAtom.TextInput', + ) + + codec = proto.Field( + proto.STRING, + number=1, + ) + language_code = proto.Field( + proto.STRING, + number=2, + ) + mapping = proto.RepeatedField( + proto.MESSAGE, + number=3, + message=TextAtom, + ) + + +class SegmentSettings(proto.Message): + r"""Segment settings for ``"ts"``, ``"fmp4"`` and ``"vtt"``. + Attributes: + segment_duration (google.protobuf.duration_pb2.Duration): + Duration of the segments in seconds. The default is + ``"6.0s"``. Note that ``segmentDuration`` must be greater + than or equal to ```gopDuration`` <#videostream>`__, and + ``segmentDuration`` must be divisible by + ```gopDuration`` <#videostream>`__. + individual_segments (bool): + Required. Create an individual segment file. The default is + ``false``. + """ + + segment_duration = proto.Field( + proto.MESSAGE, + number=1, + message=duration_pb2.Duration, + ) + individual_segments = proto.Field( + proto.BOOL, + number=3, + ) + + +class Encryption(proto.Message): + r"""Encryption settings. + Attributes: + key (str): + Required. 128 bit encryption key represented + as lowercase hexadecimal digits. + iv (str): + Required. 128 bit Initialization Vector (IV) + represented as lowercase hexadecimal digits. + aes_128 (google.cloud.video.transcoder_v1beta1.types.Encryption.Aes128Encryption): + Configuration for AES-128 encryption. + sample_aes (google.cloud.video.transcoder_v1beta1.types.Encryption.SampleAesEncryption): + Configuration for SAMPLE-AES encryption. + mpeg_cenc (google.cloud.video.transcoder_v1beta1.types.Encryption.MpegCommonEncryption): + Configuration for MPEG Common Encryption + (MPEG-CENC). + """ + + class Aes128Encryption(proto.Message): + r"""Configuration for AES-128 encryption. + Attributes: + key_uri (str): + Required. URI of the key delivery service. + This URI is inserted into the M3U8 header. + """ + + key_uri = proto.Field( + proto.STRING, + number=1, + ) + + class SampleAesEncryption(proto.Message): + r"""Configuration for SAMPLE-AES encryption. + Attributes: + key_uri (str): + Required. URI of the key delivery service. + This URI is inserted into the M3U8 header. + """ + + key_uri = proto.Field( + proto.STRING, + number=1, + ) + + class MpegCommonEncryption(proto.Message): + r"""Configuration for MPEG Common Encryption (MPEG-CENC). + Attributes: + key_id (str): + Required. 128 bit Key ID represented as + lowercase hexadecimal digits for use with common + encryption. + scheme (str): + Required. Specify the encryption scheme. + Supported encryption schemes: + - 'cenc' + - 'cbcs' + """ + + key_id = proto.Field( + proto.STRING, + number=1, + ) + scheme = proto.Field( + proto.STRING, + number=2, + ) + + key = proto.Field( + proto.STRING, + number=1, + ) + iv = proto.Field( + proto.STRING, + number=2, + ) + aes_128 = proto.Field( + proto.MESSAGE, + number=3, + oneof='encryption_mode', + message=Aes128Encryption, + ) + sample_aes = proto.Field( + proto.MESSAGE, + number=4, + oneof='encryption_mode', + message=SampleAesEncryption, + ) + mpeg_cenc = proto.Field( + proto.MESSAGE, + number=5, + oneof='encryption_mode', + message=MpegCommonEncryption, + ) + + +class Progress(proto.Message): + r"""Estimated fractional progress for each step, from ``0`` to ``1``. + Attributes: + analyzed (float): + Estimated fractional progress for ``analyzing`` step. + encoded (float): + Estimated fractional progress for ``encoding`` step. + uploaded (float): + Estimated fractional progress for ``uploading`` step. + notified (float): + Estimated fractional progress for ``notifying`` step. + """ + + analyzed = proto.Field( + proto.DOUBLE, + number=1, + ) + encoded = proto.Field( + proto.DOUBLE, + number=2, + ) + uploaded = proto.Field( + proto.DOUBLE, + number=3, + ) + notified = proto.Field( + proto.DOUBLE, + number=4, + ) + + +class FailureDetail(proto.Message): + r"""Additional information about the reasons for the failure. + Attributes: + description (str): + A description of the failure. + """ + + description = proto.Field( + proto.STRING, + number=1, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/v1beta1/google/cloud/video/transcoder_v1beta1/types/services.py b/owl-bot-staging/v1beta1/google/cloud/video/transcoder_v1beta1/types/services.py new file mode 100644 index 0000000..937818b --- /dev/null +++ b/owl-bot-staging/v1beta1/google/cloud/video/transcoder_v1beta1/types/services.py @@ -0,0 +1,256 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import proto # type: ignore + +from google.cloud.video.transcoder_v1beta1.types import resources + + +__protobuf__ = proto.module( + package='google.cloud.video.transcoder.v1beta1', + manifest={ + 'CreateJobRequest', + 'ListJobsRequest', + 'GetJobRequest', + 'DeleteJobRequest', + 'ListJobsResponse', + 'CreateJobTemplateRequest', + 'ListJobTemplatesRequest', + 'GetJobTemplateRequest', + 'DeleteJobTemplateRequest', + 'ListJobTemplatesResponse', + }, +) + + +class CreateJobRequest(proto.Message): + r"""Request message for ``TranscoderService.CreateJob``. + Attributes: + parent (str): + Required. The parent location to create and process this + job. Format: ``projects/{project}/locations/{location}`` + job (google.cloud.video.transcoder_v1beta1.types.Job): + Required. Parameters for creating transcoding + job. + """ + + parent = proto.Field( + proto.STRING, + number=1, + ) + job = proto.Field( + proto.MESSAGE, + number=2, + message=resources.Job, + ) + + +class ListJobsRequest(proto.Message): + r"""Request message for ``TranscoderService.ListJobs``. The parent + location from which to retrieve the collection of jobs. + + Attributes: + parent (str): + Required. Format: + ``projects/{project}/locations/{location}`` + page_size (int): + The maximum number of items to return. + page_token (str): + The ``next_page_token`` value returned from a previous List + request, if any. + """ + + parent = proto.Field( + proto.STRING, + number=1, + ) + page_size = proto.Field( + proto.INT32, + number=2, + ) + page_token = proto.Field( + proto.STRING, + number=3, + ) + + +class GetJobRequest(proto.Message): + r"""Request message for ``TranscoderService.GetJob``. + Attributes: + name (str): + Required. The name of the job to retrieve. Format: + ``projects/{project}/locations/{location}/jobs/{job}`` + """ + + name = proto.Field( + proto.STRING, + number=1, + ) + + +class DeleteJobRequest(proto.Message): + r"""Request message for ``TranscoderService.DeleteJob``. + Attributes: + name (str): + Required. The name of the job to delete. Format: + ``projects/{project}/locations/{location}/jobs/{job}`` + """ + + name = proto.Field( + proto.STRING, + number=1, + ) + + +class ListJobsResponse(proto.Message): + r"""Response message for ``TranscoderService.ListJobs``. + Attributes: + jobs (Sequence[google.cloud.video.transcoder_v1beta1.types.Job]): + List of jobs in the specified region. + next_page_token (str): + The pagination token. + """ + + @property + def raw_page(self): + return self + + jobs = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=resources.Job, + ) + next_page_token = proto.Field( + proto.STRING, + number=2, + ) + + +class CreateJobTemplateRequest(proto.Message): + r"""Request message for ``TranscoderService.CreateJobTemplate``. + Attributes: + parent (str): + Required. The parent location to create this job template. + Format: ``projects/{project}/locations/{location}`` + job_template (google.cloud.video.transcoder_v1beta1.types.JobTemplate): + Required. Parameters for creating job + template. + job_template_id (str): + Required. The ID to use for the job template, which will + become the final component of the job template's resource + name. + + This value should be 4-63 characters, and valid characters + must match the regular expression + ``[a-zA-Z][a-zA-Z0-9_-]*``. + """ + + parent = proto.Field( + proto.STRING, + number=1, + ) + job_template = proto.Field( + proto.MESSAGE, + number=2, + message=resources.JobTemplate, + ) + job_template_id = proto.Field( + proto.STRING, + number=3, + ) + + +class ListJobTemplatesRequest(proto.Message): + r"""Request message for ``TranscoderService.ListJobTemplates``. + Attributes: + parent (str): + Required. The parent location from which to retrieve the + collection of job templates. Format: + ``projects/{project}/locations/{location}`` + page_size (int): + The maximum number of items to return. + page_token (str): + The ``next_page_token`` value returned from a previous List + request, if any. + """ + + parent = proto.Field( + proto.STRING, + number=1, + ) + page_size = proto.Field( + proto.INT32, + number=2, + ) + page_token = proto.Field( + proto.STRING, + number=3, + ) + + +class GetJobTemplateRequest(proto.Message): + r"""Request message for ``TranscoderService.GetJobTemplate``. + Attributes: + name (str): + Required. The name of the job template to retrieve. Format: + ``projects/{project}/locations/{location}/jobTemplates/{job_template}`` + """ + + name = proto.Field( + proto.STRING, + number=1, + ) + + +class DeleteJobTemplateRequest(proto.Message): + r"""Request message for ``TranscoderService.DeleteJobTemplate``. + Attributes: + name (str): + Required. The name of the job template to delete. + ``projects/{project}/locations/{location}/jobTemplates/{job_template}`` + """ + + name = proto.Field( + proto.STRING, + number=1, + ) + + +class ListJobTemplatesResponse(proto.Message): + r"""Response message for ``TranscoderService.ListJobTemplates``. + Attributes: + job_templates (Sequence[google.cloud.video.transcoder_v1beta1.types.JobTemplate]): + List of job templates in the specified + region. + next_page_token (str): + The pagination token. + """ + + @property + def raw_page(self): + return self + + job_templates = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=resources.JobTemplate, + ) + next_page_token = proto.Field( + proto.STRING, + number=2, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/v1beta1/mypy.ini b/owl-bot-staging/v1beta1/mypy.ini new file mode 100644 index 0000000..4505b48 --- /dev/null +++ b/owl-bot-staging/v1beta1/mypy.ini @@ -0,0 +1,3 @@ +[mypy] +python_version = 3.6 +namespace_packages = True diff --git a/owl-bot-staging/v1beta1/noxfile.py b/owl-bot-staging/v1beta1/noxfile.py new file mode 100644 index 0000000..4ddc2d2 --- /dev/null +++ b/owl-bot-staging/v1beta1/noxfile.py @@ -0,0 +1,132 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os +import pathlib +import shutil +import subprocess +import sys + + +import nox # type: ignore + +CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() + +LOWER_BOUND_CONSTRAINTS_FILE = CURRENT_DIRECTORY / "constraints.txt" +PACKAGE_NAME = subprocess.check_output([sys.executable, "setup.py", "--name"], encoding="utf-8") + + +nox.sessions = [ + "unit", + "cover", + "mypy", + "check_lower_bounds" + # exclude update_lower_bounds from default + "docs", +] + +@nox.session(python=['3.6', '3.7', '3.8', '3.9']) +def unit(session): + """Run the unit test suite.""" + + session.install('coverage', 'pytest', 'pytest-cov', 'asyncmock', 'pytest-asyncio') + session.install('-e', '.') + + session.run( + 'py.test', + '--quiet', + '--cov=google/cloud/video/transcoder_v1beta1/', + '--cov-config=.coveragerc', + '--cov-report=term', + '--cov-report=html', + os.path.join('tests', 'unit', ''.join(session.posargs)) + ) + + +@nox.session(python='3.7') +def cover(session): + """Run the final coverage report. + This outputs the coverage report aggregating coverage from the unit + test runs (not system test runs), and then erases coverage data. + """ + session.install("coverage", "pytest-cov") + session.run("coverage", "report", "--show-missing", "--fail-under=100") + + session.run("coverage", "erase") + + +@nox.session(python=['3.6', '3.7']) +def mypy(session): + """Run the type checker.""" + session.install('mypy', 'types-pkg_resources') + session.install('.') + session.run( + 'mypy', + '--explicit-package-bases', + 'google', + ) + + +@nox.session +def update_lower_bounds(session): + """Update lower bounds in constraints.txt to match setup.py""" + session.install('google-cloud-testutils') + session.install('.') + + session.run( + 'lower-bound-checker', + 'update', + '--package-name', + PACKAGE_NAME, + '--constraints-file', + str(LOWER_BOUND_CONSTRAINTS_FILE), + ) + + +@nox.session +def check_lower_bounds(session): + """Check lower bounds in setup.py are reflected in constraints file""" + session.install('google-cloud-testutils') + session.install('.') + + session.run( + 'lower-bound-checker', + 'check', + '--package-name', + PACKAGE_NAME, + '--constraints-file', + str(LOWER_BOUND_CONSTRAINTS_FILE), + ) + +@nox.session(python='3.6') +def docs(session): + """Build the docs for this library.""" + + session.install("-e", ".") + session.install("sphinx<3.0.0", "alabaster", "recommonmark") + + shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) + session.run( + "sphinx-build", + "-W", # warnings as errors + "-T", # show full traceback on exception + "-N", # no colors + "-b", + "html", + "-d", + os.path.join("docs", "_build", "doctrees", ""), + os.path.join("docs", ""), + os.path.join("docs", "_build", "html", ""), + ) diff --git a/owl-bot-staging/v1beta1/scripts/fixup_transcoder_v1beta1_keywords.py b/owl-bot-staging/v1beta1/scripts/fixup_transcoder_v1beta1_keywords.py new file mode 100644 index 0000000..700007b --- /dev/null +++ b/owl-bot-staging/v1beta1/scripts/fixup_transcoder_v1beta1_keywords.py @@ -0,0 +1,183 @@ +#! /usr/bin/env python3 +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import argparse +import os +import libcst as cst +import pathlib +import sys +from typing import (Any, Callable, Dict, List, Sequence, Tuple) + + +def partition( + predicate: Callable[[Any], bool], + iterator: Sequence[Any] +) -> Tuple[List[Any], List[Any]]: + """A stable, out-of-place partition.""" + results = ([], []) + + for i in iterator: + results[int(predicate(i))].append(i) + + # Returns trueList, falseList + return results[1], results[0] + + +class transcoderCallTransformer(cst.CSTTransformer): + CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') + METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { + 'create_job': ('parent', 'job', ), + 'create_job_template': ('parent', 'job_template', 'job_template_id', ), + 'delete_job': ('name', ), + 'delete_job_template': ('name', ), + 'get_job': ('name', ), + 'get_job_template': ('name', ), + 'list_jobs': ('parent', 'page_size', 'page_token', ), + 'list_job_templates': ('parent', 'page_size', 'page_token', ), + } + + def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: + try: + key = original.func.attr.value + kword_params = self.METHOD_TO_PARAMS[key] + except (AttributeError, KeyError): + # Either not a method from the API or too convoluted to be sure. + return updated + + # If the existing code is valid, keyword args come after positional args. + # Therefore, all positional args must map to the first parameters. + args, kwargs = partition(lambda a: not bool(a.keyword), updated.args) + if any(k.keyword.value == "request" for k in kwargs): + # We've already fixed this file, don't fix it again. + return updated + + kwargs, ctrl_kwargs = partition( + lambda a: not a.keyword.value in self.CTRL_PARAMS, + kwargs + ) + + args, ctrl_args = args[:len(kword_params)], args[len(kword_params):] + ctrl_kwargs.extend(cst.Arg(value=a.value, keyword=cst.Name(value=ctrl)) + for a, ctrl in zip(ctrl_args, self.CTRL_PARAMS)) + + request_arg = cst.Arg( + value=cst.Dict([ + cst.DictElement( + cst.SimpleString("'{}'".format(name)), +cst.Element(value=arg.value) + ) + # Note: the args + kwargs looks silly, but keep in mind that + # the control parameters had to be stripped out, and that + # those could have been passed positionally or by keyword. + for name, arg in zip(kword_params, args + kwargs)]), + keyword=cst.Name("request") + ) + + return updated.with_changes( + args=[request_arg] + ctrl_kwargs + ) + + +def fix_files( + in_dir: pathlib.Path, + out_dir: pathlib.Path, + *, + transformer=transcoderCallTransformer(), +): + """Duplicate the input dir to the output dir, fixing file method calls. + + Preconditions: + * in_dir is a real directory + * out_dir is a real, empty directory + """ + pyfile_gen = ( + pathlib.Path(os.path.join(root, f)) + for root, _, files in os.walk(in_dir) + for f in files if os.path.splitext(f)[1] == ".py" + ) + + for fpath in pyfile_gen: + with open(fpath, 'r') as f: + src = f.read() + + # Parse the code and insert method call fixes. + tree = cst.parse_module(src) + updated = tree.visit(transformer) + + # Create the path and directory structure for the new file. + updated_path = out_dir.joinpath(fpath.relative_to(in_dir)) + updated_path.parent.mkdir(parents=True, exist_ok=True) + + # Generate the updated source file at the corresponding path. + with open(updated_path, 'w') as f: + f.write(updated.code) + + +if __name__ == '__main__': + parser = argparse.ArgumentParser( + description="""Fix up source that uses the transcoder client library. + +The existing sources are NOT overwritten but are copied to output_dir with changes made. + +Note: This tool operates at a best-effort level at converting positional + parameters in client method calls to keyword based parameters. + Cases where it WILL FAIL include + A) * or ** expansion in a method call. + B) Calls via function or method alias (includes free function calls) + C) Indirect or dispatched calls (e.g. the method is looked up dynamically) + + These all constitute false negatives. The tool will also detect false + positives when an API method shares a name with another method. +""") + parser.add_argument( + '-d', + '--input-directory', + required=True, + dest='input_dir', + help='the input directory to walk for python files to fix up', + ) + parser.add_argument( + '-o', + '--output-directory', + required=True, + dest='output_dir', + help='the directory to output files fixed via un-flattening', + ) + args = parser.parse_args() + input_dir = pathlib.Path(args.input_dir) + output_dir = pathlib.Path(args.output_dir) + if not input_dir.is_dir(): + print( + f"input directory '{input_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if not output_dir.is_dir(): + print( + f"output directory '{output_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if os.listdir(output_dir): + print( + f"output directory '{output_dir}' is not empty", + file=sys.stderr, + ) + sys.exit(-1) + + fix_files(input_dir, output_dir) diff --git a/owl-bot-staging/v1beta1/setup.py b/owl-bot-staging/v1beta1/setup.py new file mode 100644 index 0000000..8f17241 --- /dev/null +++ b/owl-bot-staging/v1beta1/setup.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import io +import os +import setuptools # type: ignore + +version = '0.1.0' + +package_root = os.path.abspath(os.path.dirname(__file__)) + +readme_filename = os.path.join(package_root, 'README.rst') +with io.open(readme_filename, encoding='utf-8') as readme_file: + readme = readme_file.read() + +setuptools.setup( + name='google-cloud-video-transcoder', + version=version, + long_description=readme, + packages=setuptools.PEP420PackageFinder.find(), + namespace_packages=('google', 'google.cloud', 'google.cloud.video'), + platforms='Posix; MacOS X; Windows', + include_package_data=True, + install_requires=( + 'google-api-core[grpc] >= 1.27.0, < 3.0.0dev', + 'libcst >= 0.2.5', + 'proto-plus >= 1.15.0', + 'packaging >= 14.3', ), + python_requires='>=3.6', + classifiers=[ + 'Development Status :: 3 - Alpha', + 'Intended Audience :: Developers', + 'Operating System :: OS Independent', + 'Programming Language :: Python :: 3.6', + 'Programming Language :: Python :: 3.7', + 'Programming Language :: Python :: 3.8', + 'Programming Language :: Python :: 3.9', + 'Topic :: Internet', + 'Topic :: Software Development :: Libraries :: Python Modules', + ], + zip_safe=False, +) diff --git a/owl-bot-staging/v1beta1/tests/__init__.py b/owl-bot-staging/v1beta1/tests/__init__.py new file mode 100644 index 0000000..b54a5fc --- /dev/null +++ b/owl-bot-staging/v1beta1/tests/__init__.py @@ -0,0 +1,16 @@ + +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/owl-bot-staging/v1beta1/tests/unit/__init__.py b/owl-bot-staging/v1beta1/tests/unit/__init__.py new file mode 100644 index 0000000..b54a5fc --- /dev/null +++ b/owl-bot-staging/v1beta1/tests/unit/__init__.py @@ -0,0 +1,16 @@ + +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/owl-bot-staging/v1beta1/tests/unit/gapic/__init__.py b/owl-bot-staging/v1beta1/tests/unit/gapic/__init__.py new file mode 100644 index 0000000..b54a5fc --- /dev/null +++ b/owl-bot-staging/v1beta1/tests/unit/gapic/__init__.py @@ -0,0 +1,16 @@ + +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/owl-bot-staging/v1beta1/tests/unit/gapic/transcoder_v1beta1/__init__.py b/owl-bot-staging/v1beta1/tests/unit/gapic/transcoder_v1beta1/__init__.py new file mode 100644 index 0000000..b54a5fc --- /dev/null +++ b/owl-bot-staging/v1beta1/tests/unit/gapic/transcoder_v1beta1/__init__.py @@ -0,0 +1,16 @@ + +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/owl-bot-staging/v1beta1/tests/unit/gapic/transcoder_v1beta1/test_transcoder_service.py b/owl-bot-staging/v1beta1/tests/unit/gapic/transcoder_v1beta1/test_transcoder_service.py new file mode 100644 index 0000000..9851ffb --- /dev/null +++ b/owl-bot-staging/v1beta1/tests/unit/gapic/transcoder_v1beta1/test_transcoder_service.py @@ -0,0 +1,3243 @@ +# -*- coding: utf-8 -*- +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os +import mock +import packaging.version + +import grpc +from grpc.experimental import aio +import math +import pytest +from proto.marshal.rules.dates import DurationRule, TimestampRule + + +from google.api_core import client_options +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers +from google.api_core import grpc_helpers_async +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.cloud.video.transcoder_v1beta1.services.transcoder_service import TranscoderServiceAsyncClient +from google.cloud.video.transcoder_v1beta1.services.transcoder_service import TranscoderServiceClient +from google.cloud.video.transcoder_v1beta1.services.transcoder_service import pagers +from google.cloud.video.transcoder_v1beta1.services.transcoder_service import transports +from google.cloud.video.transcoder_v1beta1.services.transcoder_service.transports.base import _GOOGLE_AUTH_VERSION +from google.cloud.video.transcoder_v1beta1.types import resources +from google.cloud.video.transcoder_v1beta1.types import services +from google.oauth2 import service_account +from google.protobuf import duration_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +import google.auth + + +# TODO(busunkim): Once google-auth >= 1.25.0 is required transitively +# through google-api-core: +# - Delete the auth "less than" test cases +# - Delete these pytest markers (Make the "greater than or equal to" tests the default). +requires_google_auth_lt_1_25_0 = pytest.mark.skipif( + packaging.version.parse(_GOOGLE_AUTH_VERSION) >= packaging.version.parse("1.25.0"), + reason="This test requires google-auth < 1.25.0", +) +requires_google_auth_gte_1_25_0 = pytest.mark.skipif( + packaging.version.parse(_GOOGLE_AUTH_VERSION) < packaging.version.parse("1.25.0"), + reason="This test requires google-auth >= 1.25.0", +) + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert TranscoderServiceClient._get_default_mtls_endpoint(None) is None + assert TranscoderServiceClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint + assert TranscoderServiceClient._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint + assert TranscoderServiceClient._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint + assert TranscoderServiceClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint + assert TranscoderServiceClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi + + +@pytest.mark.parametrize("client_class", [ + TranscoderServiceClient, + TranscoderServiceAsyncClient, +]) +def test_transcoder_service_client_from_service_account_info(client_class): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == 'transcoder.googleapis.com:443' + + +@pytest.mark.parametrize("transport_class,transport_name", [ + (transports.TranscoderServiceGrpcTransport, "grpc"), + (transports.TranscoderServiceGrpcAsyncIOTransport, "grpc_asyncio"), +]) +def test_transcoder_service_client_service_account_always_use_jwt(transport_class, transport_name): + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize("client_class", [ + TranscoderServiceClient, + TranscoderServiceAsyncClient, +]) +def test_transcoder_service_client_from_service_account_file(client_class): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: + factory.return_value = creds + client = client_class.from_service_account_file("dummy/file/path.json") + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json("dummy/file/path.json") + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == 'transcoder.googleapis.com:443' + + +def test_transcoder_service_client_get_transport_class(): + transport = TranscoderServiceClient.get_transport_class() + available_transports = [ + transports.TranscoderServiceGrpcTransport, + ] + assert transport in available_transports + + transport = TranscoderServiceClient.get_transport_class("grpc") + assert transport == transports.TranscoderServiceGrpcTransport + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (TranscoderServiceClient, transports.TranscoderServiceGrpcTransport, "grpc"), + (TranscoderServiceAsyncClient, transports.TranscoderServiceGrpcAsyncIOTransport, "grpc_asyncio"), +]) +@mock.patch.object(TranscoderServiceClient, "DEFAULT_ENDPOINT", modify_default_endpoint(TranscoderServiceClient)) +@mock.patch.object(TranscoderServiceAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(TranscoderServiceAsyncClient)) +def test_transcoder_service_client_client_options(client_class, transport_class, transport_name): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(TranscoderServiceClient, 'get_transport_class') as gtc: + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(TranscoderServiceClient, 'get_transport_class') as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class() + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class() + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError): + client = client_class() + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + with pytest.raises(ValueError): + client = client_class() + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ + (TranscoderServiceClient, transports.TranscoderServiceGrpcTransport, "grpc", "true"), + (TranscoderServiceAsyncClient, transports.TranscoderServiceGrpcAsyncIOTransport, "grpc_asyncio", "true"), + (TranscoderServiceClient, transports.TranscoderServiceGrpcTransport, "grpc", "false"), + (TranscoderServiceAsyncClient, transports.TranscoderServiceGrpcAsyncIOTransport, "grpc_asyncio", "false"), +]) +@mock.patch.object(TranscoderServiceClient, "DEFAULT_ENDPOINT", modify_default_endpoint(TranscoderServiceClient)) +@mock.patch.object(TranscoderServiceAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(TranscoderServiceAsyncClient)) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_transcoder_service_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client.DEFAULT_ENDPOINT + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback): + if use_client_cert_env == "false": + expected_host = client.DEFAULT_ENDPOINT + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class() + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False): + patched.return_value = None + client = client_class() + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + ) + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (TranscoderServiceClient, transports.TranscoderServiceGrpcTransport, "grpc"), + (TranscoderServiceAsyncClient, transports.TranscoderServiceGrpcAsyncIOTransport, "grpc_asyncio"), +]) +def test_transcoder_service_client_client_options_scopes(client_class, transport_class, transport_name): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (TranscoderServiceClient, transports.TranscoderServiceGrpcTransport, "grpc"), + (TranscoderServiceAsyncClient, transports.TranscoderServiceGrpcAsyncIOTransport, "grpc_asyncio"), +]) +def test_transcoder_service_client_client_options_credentials_file(client_class, transport_class, transport_name): + # Check the case credentials file is provided. + options = client_options.ClientOptions( + credentials_file="credentials.json" + ) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + ) + + +def test_transcoder_service_client_client_options_from_dict(): + with mock.patch('google.cloud.video.transcoder_v1beta1.services.transcoder_service.transports.TranscoderServiceGrpcTransport.__init__') as grpc_transport: + grpc_transport.return_value = None + client = TranscoderServiceClient( + client_options={'api_endpoint': 'squid.clam.whelk'} + ) + grpc_transport.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + ) + + +def test_create_job(transport: str = 'grpc', request_type=services.CreateJobRequest): + client = TranscoderServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_job), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = resources.Job( + name='name_value', + input_uri='input_uri_value', + output_uri='output_uri_value', + priority=898, + state=resources.Job.ProcessingState.PENDING, + failure_reason='failure_reason_value', + ttl_after_completion_days=2670, + template_id='template_id_value', + ) + response = client.create_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == services.CreateJobRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, resources.Job) + assert response.name == 'name_value' + assert response.input_uri == 'input_uri_value' + assert response.output_uri == 'output_uri_value' + assert response.priority == 898 + assert response.state == resources.Job.ProcessingState.PENDING + assert response.failure_reason == 'failure_reason_value' + assert response.ttl_after_completion_days == 2670 + + +def test_create_job_from_dict(): + test_create_job(request_type=dict) + + +def test_create_job_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = TranscoderServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_job), + '__call__') as call: + client.create_job() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == services.CreateJobRequest() + + +@pytest.mark.asyncio +async def test_create_job_async(transport: str = 'grpc_asyncio', request_type=services.CreateJobRequest): + client = TranscoderServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_job), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(resources.Job( + name='name_value', + input_uri='input_uri_value', + output_uri='output_uri_value', + priority=898, + state=resources.Job.ProcessingState.PENDING, + failure_reason='failure_reason_value', + ttl_after_completion_days=2670, + )) + response = await client.create_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == services.CreateJobRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, resources.Job) + assert response.name == 'name_value' + assert response.input_uri == 'input_uri_value' + assert response.output_uri == 'output_uri_value' + assert response.priority == 898 + assert response.state == resources.Job.ProcessingState.PENDING + assert response.failure_reason == 'failure_reason_value' + assert response.ttl_after_completion_days == 2670 + + +@pytest.mark.asyncio +async def test_create_job_async_from_dict(): + await test_create_job_async(request_type=dict) + + +def test_create_job_field_headers(): + client = TranscoderServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = services.CreateJobRequest() + + request.parent = 'parent/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_job), + '__call__') as call: + call.return_value = resources.Job() + client.create_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent/value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_create_job_field_headers_async(): + client = TranscoderServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = services.CreateJobRequest() + + request.parent = 'parent/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_job), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.Job()) + await client.create_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent/value', + ) in kw['metadata'] + + +def test_create_job_flattened(): + client = TranscoderServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_job), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = resources.Job() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_job( + parent='parent_value', + job=resources.Job(name='name_value'), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0].parent == 'parent_value' + assert args[0].job == resources.Job(name='name_value') + + +def test_create_job_flattened_error(): + client = TranscoderServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_job( + services.CreateJobRequest(), + parent='parent_value', + job=resources.Job(name='name_value'), + ) + + +@pytest.mark.asyncio +async def test_create_job_flattened_async(): + client = TranscoderServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_job), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = resources.Job() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.Job()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_job( + parent='parent_value', + job=resources.Job(name='name_value'), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0].parent == 'parent_value' + assert args[0].job == resources.Job(name='name_value') + + +@pytest.mark.asyncio +async def test_create_job_flattened_error_async(): + client = TranscoderServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_job( + services.CreateJobRequest(), + parent='parent_value', + job=resources.Job(name='name_value'), + ) + + +def test_list_jobs(transport: str = 'grpc', request_type=services.ListJobsRequest): + client = TranscoderServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_jobs), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = services.ListJobsResponse( + next_page_token='next_page_token_value', + ) + response = client.list_jobs(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == services.ListJobsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListJobsPager) + assert response.next_page_token == 'next_page_token_value' + + +def test_list_jobs_from_dict(): + test_list_jobs(request_type=dict) + + +def test_list_jobs_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = TranscoderServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_jobs), + '__call__') as call: + client.list_jobs() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == services.ListJobsRequest() + + +@pytest.mark.asyncio +async def test_list_jobs_async(transport: str = 'grpc_asyncio', request_type=services.ListJobsRequest): + client = TranscoderServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_jobs), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(services.ListJobsResponse( + next_page_token='next_page_token_value', + )) + response = await client.list_jobs(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == services.ListJobsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListJobsAsyncPager) + assert response.next_page_token == 'next_page_token_value' + + +@pytest.mark.asyncio +async def test_list_jobs_async_from_dict(): + await test_list_jobs_async(request_type=dict) + + +def test_list_jobs_field_headers(): + client = TranscoderServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = services.ListJobsRequest() + + request.parent = 'parent/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_jobs), + '__call__') as call: + call.return_value = services.ListJobsResponse() + client.list_jobs(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent/value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_list_jobs_field_headers_async(): + client = TranscoderServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = services.ListJobsRequest() + + request.parent = 'parent/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_jobs), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(services.ListJobsResponse()) + await client.list_jobs(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent/value', + ) in kw['metadata'] + + +def test_list_jobs_flattened(): + client = TranscoderServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_jobs), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = services.ListJobsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_jobs( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0].parent == 'parent_value' + + +def test_list_jobs_flattened_error(): + client = TranscoderServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_jobs( + services.ListJobsRequest(), + parent='parent_value', + ) + + +@pytest.mark.asyncio +async def test_list_jobs_flattened_async(): + client = TranscoderServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_jobs), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = services.ListJobsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(services.ListJobsResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_jobs( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0].parent == 'parent_value' + + +@pytest.mark.asyncio +async def test_list_jobs_flattened_error_async(): + client = TranscoderServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_jobs( + services.ListJobsRequest(), + parent='parent_value', + ) + + +def test_list_jobs_pager(): + client = TranscoderServiceClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_jobs), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + services.ListJobsResponse( + jobs=[ + resources.Job(), + resources.Job(), + resources.Job(), + ], + next_page_token='abc', + ), + services.ListJobsResponse( + jobs=[], + next_page_token='def', + ), + services.ListJobsResponse( + jobs=[ + resources.Job(), + ], + next_page_token='ghi', + ), + services.ListJobsResponse( + jobs=[ + resources.Job(), + resources.Job(), + ], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ('parent', ''), + )), + ) + pager = client.list_jobs(request={}) + + assert pager._metadata == metadata + + results = [i for i in pager] + assert len(results) == 6 + assert all(isinstance(i, resources.Job) + for i in results) + +def test_list_jobs_pages(): + client = TranscoderServiceClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_jobs), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + services.ListJobsResponse( + jobs=[ + resources.Job(), + resources.Job(), + resources.Job(), + ], + next_page_token='abc', + ), + services.ListJobsResponse( + jobs=[], + next_page_token='def', + ), + services.ListJobsResponse( + jobs=[ + resources.Job(), + ], + next_page_token='ghi', + ), + services.ListJobsResponse( + jobs=[ + resources.Job(), + resources.Job(), + ], + ), + RuntimeError, + ) + pages = list(client.list_jobs(request={}).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.asyncio +async def test_list_jobs_async_pager(): + client = TranscoderServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_jobs), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + services.ListJobsResponse( + jobs=[ + resources.Job(), + resources.Job(), + resources.Job(), + ], + next_page_token='abc', + ), + services.ListJobsResponse( + jobs=[], + next_page_token='def', + ), + services.ListJobsResponse( + jobs=[ + resources.Job(), + ], + next_page_token='ghi', + ), + services.ListJobsResponse( + jobs=[ + resources.Job(), + resources.Job(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_jobs(request={},) + assert async_pager.next_page_token == 'abc' + responses = [] + async for response in async_pager: + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, resources.Job) + for i in responses) + +@pytest.mark.asyncio +async def test_list_jobs_async_pages(): + client = TranscoderServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_jobs), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + services.ListJobsResponse( + jobs=[ + resources.Job(), + resources.Job(), + resources.Job(), + ], + next_page_token='abc', + ), + services.ListJobsResponse( + jobs=[], + next_page_token='def', + ), + services.ListJobsResponse( + jobs=[ + resources.Job(), + ], + next_page_token='ghi', + ), + services.ListJobsResponse( + jobs=[ + resources.Job(), + resources.Job(), + ], + ), + RuntimeError, + ) + pages = [] + async for page_ in (await client.list_jobs(request={})).pages: + pages.append(page_) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +def test_get_job(transport: str = 'grpc', request_type=services.GetJobRequest): + client = TranscoderServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_job), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = resources.Job( + name='name_value', + input_uri='input_uri_value', + output_uri='output_uri_value', + priority=898, + state=resources.Job.ProcessingState.PENDING, + failure_reason='failure_reason_value', + ttl_after_completion_days=2670, + template_id='template_id_value', + ) + response = client.get_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == services.GetJobRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, resources.Job) + assert response.name == 'name_value' + assert response.input_uri == 'input_uri_value' + assert response.output_uri == 'output_uri_value' + assert response.priority == 898 + assert response.state == resources.Job.ProcessingState.PENDING + assert response.failure_reason == 'failure_reason_value' + assert response.ttl_after_completion_days == 2670 + + +def test_get_job_from_dict(): + test_get_job(request_type=dict) + + +def test_get_job_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = TranscoderServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_job), + '__call__') as call: + client.get_job() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == services.GetJobRequest() + + +@pytest.mark.asyncio +async def test_get_job_async(transport: str = 'grpc_asyncio', request_type=services.GetJobRequest): + client = TranscoderServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_job), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(resources.Job( + name='name_value', + input_uri='input_uri_value', + output_uri='output_uri_value', + priority=898, + state=resources.Job.ProcessingState.PENDING, + failure_reason='failure_reason_value', + ttl_after_completion_days=2670, + )) + response = await client.get_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == services.GetJobRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, resources.Job) + assert response.name == 'name_value' + assert response.input_uri == 'input_uri_value' + assert response.output_uri == 'output_uri_value' + assert response.priority == 898 + assert response.state == resources.Job.ProcessingState.PENDING + assert response.failure_reason == 'failure_reason_value' + assert response.ttl_after_completion_days == 2670 + + +@pytest.mark.asyncio +async def test_get_job_async_from_dict(): + await test_get_job_async(request_type=dict) + + +def test_get_job_field_headers(): + client = TranscoderServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = services.GetJobRequest() + + request.name = 'name/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_job), + '__call__') as call: + call.return_value = resources.Job() + client.get_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name/value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_get_job_field_headers_async(): + client = TranscoderServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = services.GetJobRequest() + + request.name = 'name/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_job), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.Job()) + await client.get_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name/value', + ) in kw['metadata'] + + +def test_get_job_flattened(): + client = TranscoderServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_job), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = resources.Job() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_job( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0].name == 'name_value' + + +def test_get_job_flattened_error(): + client = TranscoderServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_job( + services.GetJobRequest(), + name='name_value', + ) + + +@pytest.mark.asyncio +async def test_get_job_flattened_async(): + client = TranscoderServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_job), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = resources.Job() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.Job()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_job( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0].name == 'name_value' + + +@pytest.mark.asyncio +async def test_get_job_flattened_error_async(): + client = TranscoderServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_job( + services.GetJobRequest(), + name='name_value', + ) + + +def test_delete_job(transport: str = 'grpc', request_type=services.DeleteJobRequest): + client = TranscoderServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_job), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.delete_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == services.DeleteJobRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_job_from_dict(): + test_delete_job(request_type=dict) + + +def test_delete_job_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = TranscoderServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_job), + '__call__') as call: + client.delete_job() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == services.DeleteJobRequest() + + +@pytest.mark.asyncio +async def test_delete_job_async(transport: str = 'grpc_asyncio', request_type=services.DeleteJobRequest): + client = TranscoderServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_job), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == services.DeleteJobRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_delete_job_async_from_dict(): + await test_delete_job_async(request_type=dict) + + +def test_delete_job_field_headers(): + client = TranscoderServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = services.DeleteJobRequest() + + request.name = 'name/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_job), + '__call__') as call: + call.return_value = None + client.delete_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name/value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_delete_job_field_headers_async(): + client = TranscoderServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = services.DeleteJobRequest() + + request.name = 'name/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_job), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name/value', + ) in kw['metadata'] + + +def test_delete_job_flattened(): + client = TranscoderServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_job), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_job( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0].name == 'name_value' + + +def test_delete_job_flattened_error(): + client = TranscoderServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_job( + services.DeleteJobRequest(), + name='name_value', + ) + + +@pytest.mark.asyncio +async def test_delete_job_flattened_async(): + client = TranscoderServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_job), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_job( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0].name == 'name_value' + + +@pytest.mark.asyncio +async def test_delete_job_flattened_error_async(): + client = TranscoderServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_job( + services.DeleteJobRequest(), + name='name_value', + ) + + +def test_create_job_template(transport: str = 'grpc', request_type=services.CreateJobTemplateRequest): + client = TranscoderServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_job_template), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = resources.JobTemplate( + name='name_value', + ) + response = client.create_job_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == services.CreateJobTemplateRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, resources.JobTemplate) + assert response.name == 'name_value' + + +def test_create_job_template_from_dict(): + test_create_job_template(request_type=dict) + + +def test_create_job_template_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = TranscoderServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_job_template), + '__call__') as call: + client.create_job_template() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == services.CreateJobTemplateRequest() + + +@pytest.mark.asyncio +async def test_create_job_template_async(transport: str = 'grpc_asyncio', request_type=services.CreateJobTemplateRequest): + client = TranscoderServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_job_template), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(resources.JobTemplate( + name='name_value', + )) + response = await client.create_job_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == services.CreateJobTemplateRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, resources.JobTemplate) + assert response.name == 'name_value' + + +@pytest.mark.asyncio +async def test_create_job_template_async_from_dict(): + await test_create_job_template_async(request_type=dict) + + +def test_create_job_template_field_headers(): + client = TranscoderServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = services.CreateJobTemplateRequest() + + request.parent = 'parent/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_job_template), + '__call__') as call: + call.return_value = resources.JobTemplate() + client.create_job_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent/value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_create_job_template_field_headers_async(): + client = TranscoderServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = services.CreateJobTemplateRequest() + + request.parent = 'parent/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_job_template), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.JobTemplate()) + await client.create_job_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent/value', + ) in kw['metadata'] + + +def test_create_job_template_flattened(): + client = TranscoderServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_job_template), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = resources.JobTemplate() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_job_template( + parent='parent_value', + job_template=resources.JobTemplate(name='name_value'), + job_template_id='job_template_id_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0].parent == 'parent_value' + assert args[0].job_template == resources.JobTemplate(name='name_value') + assert args[0].job_template_id == 'job_template_id_value' + + +def test_create_job_template_flattened_error(): + client = TranscoderServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_job_template( + services.CreateJobTemplateRequest(), + parent='parent_value', + job_template=resources.JobTemplate(name='name_value'), + job_template_id='job_template_id_value', + ) + + +@pytest.mark.asyncio +async def test_create_job_template_flattened_async(): + client = TranscoderServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_job_template), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = resources.JobTemplate() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.JobTemplate()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_job_template( + parent='parent_value', + job_template=resources.JobTemplate(name='name_value'), + job_template_id='job_template_id_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0].parent == 'parent_value' + assert args[0].job_template == resources.JobTemplate(name='name_value') + assert args[0].job_template_id == 'job_template_id_value' + + +@pytest.mark.asyncio +async def test_create_job_template_flattened_error_async(): + client = TranscoderServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_job_template( + services.CreateJobTemplateRequest(), + parent='parent_value', + job_template=resources.JobTemplate(name='name_value'), + job_template_id='job_template_id_value', + ) + + +def test_list_job_templates(transport: str = 'grpc', request_type=services.ListJobTemplatesRequest): + client = TranscoderServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_job_templates), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = services.ListJobTemplatesResponse( + next_page_token='next_page_token_value', + ) + response = client.list_job_templates(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == services.ListJobTemplatesRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListJobTemplatesPager) + assert response.next_page_token == 'next_page_token_value' + + +def test_list_job_templates_from_dict(): + test_list_job_templates(request_type=dict) + + +def test_list_job_templates_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = TranscoderServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_job_templates), + '__call__') as call: + client.list_job_templates() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == services.ListJobTemplatesRequest() + + +@pytest.mark.asyncio +async def test_list_job_templates_async(transport: str = 'grpc_asyncio', request_type=services.ListJobTemplatesRequest): + client = TranscoderServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_job_templates), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(services.ListJobTemplatesResponse( + next_page_token='next_page_token_value', + )) + response = await client.list_job_templates(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == services.ListJobTemplatesRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListJobTemplatesAsyncPager) + assert response.next_page_token == 'next_page_token_value' + + +@pytest.mark.asyncio +async def test_list_job_templates_async_from_dict(): + await test_list_job_templates_async(request_type=dict) + + +def test_list_job_templates_field_headers(): + client = TranscoderServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = services.ListJobTemplatesRequest() + + request.parent = 'parent/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_job_templates), + '__call__') as call: + call.return_value = services.ListJobTemplatesResponse() + client.list_job_templates(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent/value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_list_job_templates_field_headers_async(): + client = TranscoderServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = services.ListJobTemplatesRequest() + + request.parent = 'parent/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_job_templates), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(services.ListJobTemplatesResponse()) + await client.list_job_templates(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent/value', + ) in kw['metadata'] + + +def test_list_job_templates_flattened(): + client = TranscoderServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_job_templates), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = services.ListJobTemplatesResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_job_templates( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0].parent == 'parent_value' + + +def test_list_job_templates_flattened_error(): + client = TranscoderServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_job_templates( + services.ListJobTemplatesRequest(), + parent='parent_value', + ) + + +@pytest.mark.asyncio +async def test_list_job_templates_flattened_async(): + client = TranscoderServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_job_templates), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = services.ListJobTemplatesResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(services.ListJobTemplatesResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_job_templates( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0].parent == 'parent_value' + + +@pytest.mark.asyncio +async def test_list_job_templates_flattened_error_async(): + client = TranscoderServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_job_templates( + services.ListJobTemplatesRequest(), + parent='parent_value', + ) + + +def test_list_job_templates_pager(): + client = TranscoderServiceClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_job_templates), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + services.ListJobTemplatesResponse( + job_templates=[ + resources.JobTemplate(), + resources.JobTemplate(), + resources.JobTemplate(), + ], + next_page_token='abc', + ), + services.ListJobTemplatesResponse( + job_templates=[], + next_page_token='def', + ), + services.ListJobTemplatesResponse( + job_templates=[ + resources.JobTemplate(), + ], + next_page_token='ghi', + ), + services.ListJobTemplatesResponse( + job_templates=[ + resources.JobTemplate(), + resources.JobTemplate(), + ], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ('parent', ''), + )), + ) + pager = client.list_job_templates(request={}) + + assert pager._metadata == metadata + + results = [i for i in pager] + assert len(results) == 6 + assert all(isinstance(i, resources.JobTemplate) + for i in results) + +def test_list_job_templates_pages(): + client = TranscoderServiceClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_job_templates), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + services.ListJobTemplatesResponse( + job_templates=[ + resources.JobTemplate(), + resources.JobTemplate(), + resources.JobTemplate(), + ], + next_page_token='abc', + ), + services.ListJobTemplatesResponse( + job_templates=[], + next_page_token='def', + ), + services.ListJobTemplatesResponse( + job_templates=[ + resources.JobTemplate(), + ], + next_page_token='ghi', + ), + services.ListJobTemplatesResponse( + job_templates=[ + resources.JobTemplate(), + resources.JobTemplate(), + ], + ), + RuntimeError, + ) + pages = list(client.list_job_templates(request={}).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.asyncio +async def test_list_job_templates_async_pager(): + client = TranscoderServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_job_templates), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + services.ListJobTemplatesResponse( + job_templates=[ + resources.JobTemplate(), + resources.JobTemplate(), + resources.JobTemplate(), + ], + next_page_token='abc', + ), + services.ListJobTemplatesResponse( + job_templates=[], + next_page_token='def', + ), + services.ListJobTemplatesResponse( + job_templates=[ + resources.JobTemplate(), + ], + next_page_token='ghi', + ), + services.ListJobTemplatesResponse( + job_templates=[ + resources.JobTemplate(), + resources.JobTemplate(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_job_templates(request={},) + assert async_pager.next_page_token == 'abc' + responses = [] + async for response in async_pager: + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, resources.JobTemplate) + for i in responses) + +@pytest.mark.asyncio +async def test_list_job_templates_async_pages(): + client = TranscoderServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_job_templates), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + services.ListJobTemplatesResponse( + job_templates=[ + resources.JobTemplate(), + resources.JobTemplate(), + resources.JobTemplate(), + ], + next_page_token='abc', + ), + services.ListJobTemplatesResponse( + job_templates=[], + next_page_token='def', + ), + services.ListJobTemplatesResponse( + job_templates=[ + resources.JobTemplate(), + ], + next_page_token='ghi', + ), + services.ListJobTemplatesResponse( + job_templates=[ + resources.JobTemplate(), + resources.JobTemplate(), + ], + ), + RuntimeError, + ) + pages = [] + async for page_ in (await client.list_job_templates(request={})).pages: + pages.append(page_) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +def test_get_job_template(transport: str = 'grpc', request_type=services.GetJobTemplateRequest): + client = TranscoderServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_job_template), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = resources.JobTemplate( + name='name_value', + ) + response = client.get_job_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == services.GetJobTemplateRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, resources.JobTemplate) + assert response.name == 'name_value' + + +def test_get_job_template_from_dict(): + test_get_job_template(request_type=dict) + + +def test_get_job_template_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = TranscoderServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_job_template), + '__call__') as call: + client.get_job_template() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == services.GetJobTemplateRequest() + + +@pytest.mark.asyncio +async def test_get_job_template_async(transport: str = 'grpc_asyncio', request_type=services.GetJobTemplateRequest): + client = TranscoderServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_job_template), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(resources.JobTemplate( + name='name_value', + )) + response = await client.get_job_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == services.GetJobTemplateRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, resources.JobTemplate) + assert response.name == 'name_value' + + +@pytest.mark.asyncio +async def test_get_job_template_async_from_dict(): + await test_get_job_template_async(request_type=dict) + + +def test_get_job_template_field_headers(): + client = TranscoderServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = services.GetJobTemplateRequest() + + request.name = 'name/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_job_template), + '__call__') as call: + call.return_value = resources.JobTemplate() + client.get_job_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name/value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_get_job_template_field_headers_async(): + client = TranscoderServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = services.GetJobTemplateRequest() + + request.name = 'name/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_job_template), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.JobTemplate()) + await client.get_job_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name/value', + ) in kw['metadata'] + + +def test_get_job_template_flattened(): + client = TranscoderServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_job_template), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = resources.JobTemplate() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_job_template( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0].name == 'name_value' + + +def test_get_job_template_flattened_error(): + client = TranscoderServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_job_template( + services.GetJobTemplateRequest(), + name='name_value', + ) + + +@pytest.mark.asyncio +async def test_get_job_template_flattened_async(): + client = TranscoderServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_job_template), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = resources.JobTemplate() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.JobTemplate()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_job_template( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0].name == 'name_value' + + +@pytest.mark.asyncio +async def test_get_job_template_flattened_error_async(): + client = TranscoderServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_job_template( + services.GetJobTemplateRequest(), + name='name_value', + ) + + +def test_delete_job_template(transport: str = 'grpc', request_type=services.DeleteJobTemplateRequest): + client = TranscoderServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_job_template), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.delete_job_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == services.DeleteJobTemplateRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_job_template_from_dict(): + test_delete_job_template(request_type=dict) + + +def test_delete_job_template_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = TranscoderServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_job_template), + '__call__') as call: + client.delete_job_template() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == services.DeleteJobTemplateRequest() + + +@pytest.mark.asyncio +async def test_delete_job_template_async(transport: str = 'grpc_asyncio', request_type=services.DeleteJobTemplateRequest): + client = TranscoderServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_job_template), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_job_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == services.DeleteJobTemplateRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_delete_job_template_async_from_dict(): + await test_delete_job_template_async(request_type=dict) + + +def test_delete_job_template_field_headers(): + client = TranscoderServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = services.DeleteJobTemplateRequest() + + request.name = 'name/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_job_template), + '__call__') as call: + call.return_value = None + client.delete_job_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name/value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_delete_job_template_field_headers_async(): + client = TranscoderServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = services.DeleteJobTemplateRequest() + + request.name = 'name/value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_job_template), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_job_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name/value', + ) in kw['metadata'] + + +def test_delete_job_template_flattened(): + client = TranscoderServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_job_template), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_job_template( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0].name == 'name_value' + + +def test_delete_job_template_flattened_error(): + client = TranscoderServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_job_template( + services.DeleteJobTemplateRequest(), + name='name_value', + ) + + +@pytest.mark.asyncio +async def test_delete_job_template_flattened_async(): + client = TranscoderServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_job_template), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_job_template( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0].name == 'name_value' + + +@pytest.mark.asyncio +async def test_delete_job_template_flattened_error_async(): + client = TranscoderServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_job_template( + services.DeleteJobTemplateRequest(), + name='name_value', + ) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.TranscoderServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = TranscoderServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.TranscoderServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = TranscoderServiceClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.TranscoderServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = TranscoderServiceClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.TranscoderServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = TranscoderServiceClient(transport=transport) + assert client.transport is transport + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.TranscoderServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.TranscoderServiceGrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + +@pytest.mark.parametrize("transport_class", [ + transports.TranscoderServiceGrpcTransport, + transports.TranscoderServiceGrpcAsyncIOTransport, +]) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, 'default') as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = TranscoderServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.TranscoderServiceGrpcTransport, + ) + +def test_transcoder_service_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.TranscoderServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json" + ) + + +def test_transcoder_service_base_transport(): + # Instantiate the base transport. + with mock.patch('google.cloud.video.transcoder_v1beta1.services.transcoder_service.transports.TranscoderServiceTransport.__init__') as Transport: + Transport.return_value = None + transport = transports.TranscoderServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + 'create_job', + 'list_jobs', + 'get_job', + 'delete_job', + 'create_job_template', + 'list_job_templates', + 'get_job_template', + 'delete_job_template', + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + +@requires_google_auth_gte_1_25_0 +def test_transcoder_service_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.video.transcoder_v1beta1.services.transcoder_service.transports.TranscoderServiceTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.TranscoderServiceTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with("credentials.json", + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', +), + quota_project_id="octopus", + ) + + +@requires_google_auth_lt_1_25_0 +def test_transcoder_service_base_transport_with_credentials_file_old_google_auth(): + # Instantiate the base transport with a credentials file + with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.video.transcoder_v1beta1.services.transcoder_service.transports.TranscoderServiceTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.TranscoderServiceTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with("credentials.json", scopes=( + 'https://www.googleapis.com/auth/cloud-platform', + ), + quota_project_id="octopus", + ) + + +def test_transcoder_service_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.video.transcoder_v1beta1.services.transcoder_service.transports.TranscoderServiceTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.TranscoderServiceTransport() + adc.assert_called_once() + + +@requires_google_auth_gte_1_25_0 +def test_transcoder_service_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + TranscoderServiceClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', +), + quota_project_id=None, + ) + + +@requires_google_auth_lt_1_25_0 +def test_transcoder_service_auth_adc_old_google_auth(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + TranscoderServiceClient() + adc.assert_called_once_with( + scopes=( 'https://www.googleapis.com/auth/cloud-platform',), + quota_project_id=None, + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.TranscoderServiceGrpcTransport, + transports.TranscoderServiceGrpcAsyncIOTransport, + ], +) +@requires_google_auth_gte_1_25_0 +def test_transcoder_service_transport_auth_adc(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=( 'https://www.googleapis.com/auth/cloud-platform',), + quota_project_id="octopus", + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.TranscoderServiceGrpcTransport, + transports.TranscoderServiceGrpcAsyncIOTransport, + ], +) +@requires_google_auth_lt_1_25_0 +def test_transcoder_service_transport_auth_adc_old_google_auth(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus") + adc.assert_called_once_with(scopes=( + 'https://www.googleapis.com/auth/cloud-platform', +), + quota_project_id="octopus", + ) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.TranscoderServiceGrpcTransport, grpc_helpers), + (transports.TranscoderServiceGrpcAsyncIOTransport, grpc_helpers_async) + ], +) +def test_transcoder_service_transport_create_channel(transport_class, grpc_helpers): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class( + quota_project_id="octopus", + scopes=["1", "2"] + ) + + create_channel.assert_called_with( + "transcoder.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', +), + scopes=["1", "2"], + default_host="transcoder.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize("transport_class", [transports.TranscoderServiceGrpcTransport, transports.TranscoderServiceGrpcAsyncIOTransport]) +def test_transcoder_service_grpc_transport_client_cert_source_for_mtls( + transport_class +): + cred = ga_credentials.AnonymousCredentials() + + # Check ssl_channel_credentials is used if provided. + with mock.patch.object(transport_class, "create_channel") as mock_create_channel: + mock_ssl_channel_creds = mock.Mock() + transport_class( + host="squid.clam.whelk", + credentials=cred, + ssl_channel_credentials=mock_ssl_channel_creds + ) + mock_create_channel.assert_called_once_with( + "squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_channel_creds, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls + # is used. + with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): + with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: + transport_class( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback + ) + expected_cert, expected_key = client_cert_source_callback() + mock_ssl_cred.assert_called_once_with( + certificate_chain=expected_cert, + private_key=expected_key + ) + + +def test_transcoder_service_host_no_port(): + client = TranscoderServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='transcoder.googleapis.com'), + ) + assert client.transport._host == 'transcoder.googleapis.com:443' + + +def test_transcoder_service_host_with_port(): + client = TranscoderServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='transcoder.googleapis.com:8000'), + ) + assert client.transport._host == 'transcoder.googleapis.com:8000' + +def test_transcoder_service_grpc_transport_channel(): + channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.TranscoderServiceGrpcTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +def test_transcoder_service_grpc_asyncio_transport_channel(): + channel = aio.secure_channel('http://localhost/', grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.TranscoderServiceGrpcAsyncIOTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize("transport_class", [transports.TranscoderServiceGrpcTransport, transports.TranscoderServiceGrpcAsyncIOTransport]) +def test_transcoder_service_transport_channel_mtls_with_client_cert_source( + transport_class +): + with mock.patch("grpc.ssl_channel_credentials", autospec=True) as grpc_ssl_channel_cred: + with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + cred = ga_credentials.AnonymousCredentials() + with pytest.warns(DeprecationWarning): + with mock.patch.object(google.auth, 'default') as adc: + adc.return_value = (cred, None) + transport = transport_class( + host="squid.clam.whelk", + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + adc.assert_called_once() + + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + assert transport._ssl_channel_credentials == mock_ssl_cred + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize("transport_class", [transports.TranscoderServiceGrpcTransport, transports.TranscoderServiceGrpcAsyncIOTransport]) +def test_transcoder_service_transport_channel_mtls_with_adc( + transport_class +): + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + mock_cred = mock.Mock() + + with pytest.warns(DeprecationWarning): + transport = transport_class( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=None, + ) + + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + + +def test_job_path(): + project = "squid" + location = "clam" + job = "whelk" + expected = "projects/{project}/locations/{location}/jobs/{job}".format(project=project, location=location, job=job, ) + actual = TranscoderServiceClient.job_path(project, location, job) + assert expected == actual + + +def test_parse_job_path(): + expected = { + "project": "octopus", + "location": "oyster", + "job": "nudibranch", + } + path = TranscoderServiceClient.job_path(**expected) + + # Check that the path construction is reversible. + actual = TranscoderServiceClient.parse_job_path(path) + assert expected == actual + +def test_job_template_path(): + project = "cuttlefish" + location = "mussel" + job_template = "winkle" + expected = "projects/{project}/locations/{location}/jobTemplates/{job_template}".format(project=project, location=location, job_template=job_template, ) + actual = TranscoderServiceClient.job_template_path(project, location, job_template) + assert expected == actual + + +def test_parse_job_template_path(): + expected = { + "project": "nautilus", + "location": "scallop", + "job_template": "abalone", + } + path = TranscoderServiceClient.job_template_path(**expected) + + # Check that the path construction is reversible. + actual = TranscoderServiceClient.parse_job_template_path(path) + assert expected == actual + +def test_common_billing_account_path(): + billing_account = "squid" + expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + actual = TranscoderServiceClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "clam", + } + path = TranscoderServiceClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = TranscoderServiceClient.parse_common_billing_account_path(path) + assert expected == actual + +def test_common_folder_path(): + folder = "whelk" + expected = "folders/{folder}".format(folder=folder, ) + actual = TranscoderServiceClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "octopus", + } + path = TranscoderServiceClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = TranscoderServiceClient.parse_common_folder_path(path) + assert expected == actual + +def test_common_organization_path(): + organization = "oyster" + expected = "organizations/{organization}".format(organization=organization, ) + actual = TranscoderServiceClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "nudibranch", + } + path = TranscoderServiceClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = TranscoderServiceClient.parse_common_organization_path(path) + assert expected == actual + +def test_common_project_path(): + project = "cuttlefish" + expected = "projects/{project}".format(project=project, ) + actual = TranscoderServiceClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "mussel", + } + path = TranscoderServiceClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = TranscoderServiceClient.parse_common_project_path(path) + assert expected == actual + +def test_common_location_path(): + project = "winkle" + location = "nautilus" + expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) + actual = TranscoderServiceClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "scallop", + "location": "abalone", + } + path = TranscoderServiceClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = TranscoderServiceClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_withDEFAULT_CLIENT_INFO(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object(transports.TranscoderServiceTransport, '_prep_wrapped_messages') as prep: + client = TranscoderServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object(transports.TranscoderServiceTransport, '_prep_wrapped_messages') as prep: + transport_class = TranscoderServiceClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) From 38ee3a0bd736f99252b979353363b3b5fa7c97f5 Mon Sep 17 00:00:00 2001 From: Owl Bot Date: Fri, 6 Aug 2021 21:48:10 +0000 Subject: [PATCH 2/2] =?UTF-8?q?=F0=9F=A6=89=20Updates=20from=20OwlBot?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit See https://github.com/googleapis/repo-automation-bots/blob/master/packages/owl-bot/README.md --- .../transcoder_v1beta1/types/resources.py | 238 +- owl-bot-staging/v1/.coveragerc | 17 - owl-bot-staging/v1/MANIFEST.in | 2 - owl-bot-staging/v1/README.rst | 49 - owl-bot-staging/v1/docs/conf.py | 376 -- owl-bot-staging/v1/docs/index.rst | 7 - .../v1/docs/transcoder_v1/services.rst | 6 - .../docs/transcoder_v1/transcoder_service.rst | 10 - .../v1/docs/transcoder_v1/types.rst | 7 - .../google/cloud/video/transcoder/__init__.py | 81 - .../v1/google/cloud/video/transcoder/py.typed | 2 - .../cloud/video/transcoder_v1/__init__.py | 82 - .../video/transcoder_v1/gapic_metadata.json | 103 - .../google/cloud/video/transcoder_v1/py.typed | 2 - .../video/transcoder_v1/services/__init__.py | 15 - .../services/transcoder_service/__init__.py | 22 - .../transcoder_service/async_client.py | 810 ---- .../services/transcoder_service/client.py | 1010 ----- .../services/transcoder_service/pagers.py | 263 -- .../transcoder_service/transports/__init__.py | 33 - .../transcoder_service/transports/base.py | 268 -- .../transcoder_service/transports/grpc.py | 442 --- .../transports/grpc_asyncio.py | 446 --- .../video/transcoder_v1/types/__init__.py | 80 - .../video/transcoder_v1/types/resources.py | 1942 ---------- .../video/transcoder_v1/types/services.py | 298 -- owl-bot-staging/v1/mypy.ini | 3 - owl-bot-staging/v1/noxfile.py | 132 - .../scripts/fixup_transcoder_v1_keywords.py | 183 - owl-bot-staging/v1/setup.py | 54 - owl-bot-staging/v1/tests/__init__.py | 16 - owl-bot-staging/v1/tests/unit/__init__.py | 16 - .../v1/tests/unit/gapic/__init__.py | 16 - .../unit/gapic/transcoder_v1/__init__.py | 16 - .../transcoder_v1/test_transcoder_service.py | 3237 ---------------- owl-bot-staging/v1beta1/.coveragerc | 17 - owl-bot-staging/v1beta1/MANIFEST.in | 2 - owl-bot-staging/v1beta1/README.rst | 49 - owl-bot-staging/v1beta1/docs/conf.py | 376 -- owl-bot-staging/v1beta1/docs/index.rst | 7 - .../docs/transcoder_v1beta1/services.rst | 6 - .../transcoder_v1beta1/transcoder_service.rst | 10 - .../v1beta1/docs/transcoder_v1beta1/types.rst | 7 - .../google/cloud/video/transcoder/__init__.py | 85 - .../google/cloud/video/transcoder/py.typed | 2 - .../video/transcoder_v1beta1/__init__.py | 86 - .../transcoder_v1beta1/gapic_metadata.json | 103 - .../cloud/video/transcoder_v1beta1/py.typed | 2 - .../transcoder_v1beta1/services/__init__.py | 15 - .../services/transcoder_service/__init__.py | 22 - .../transcoder_service/async_client.py | 809 ---- .../services/transcoder_service/client.py | 1009 ----- .../services/transcoder_service/pagers.py | 263 -- .../transcoder_service/transports/__init__.py | 33 - .../transcoder_service/transports/base.py | 268 -- .../transcoder_service/transports/grpc.py | 442 --- .../transports/grpc_asyncio.py | 446 --- .../transcoder_v1beta1/types/__init__.py | 84 - .../transcoder_v1beta1/types/resources.py | 1736 --------- .../transcoder_v1beta1/types/services.py | 256 -- owl-bot-staging/v1beta1/mypy.ini | 3 - owl-bot-staging/v1beta1/noxfile.py | 132 - .../fixup_transcoder_v1beta1_keywords.py | 183 - owl-bot-staging/v1beta1/setup.py | 54 - owl-bot-staging/v1beta1/tests/__init__.py | 16 - .../v1beta1/tests/unit/__init__.py | 16 - .../v1beta1/tests/unit/gapic/__init__.py | 16 - .../unit/gapic/transcoder_v1beta1/__init__.py | 16 - .../test_transcoder_service.py | 3243 ----------------- .../test_transcoder_service.py | 8 + 70 files changed, 176 insertions(+), 19930 deletions(-) delete mode 100644 owl-bot-staging/v1/.coveragerc delete mode 100644 owl-bot-staging/v1/MANIFEST.in delete mode 100644 owl-bot-staging/v1/README.rst delete mode 100644 owl-bot-staging/v1/docs/conf.py delete mode 100644 owl-bot-staging/v1/docs/index.rst delete mode 100644 owl-bot-staging/v1/docs/transcoder_v1/services.rst delete mode 100644 owl-bot-staging/v1/docs/transcoder_v1/transcoder_service.rst delete mode 100644 owl-bot-staging/v1/docs/transcoder_v1/types.rst delete mode 100644 owl-bot-staging/v1/google/cloud/video/transcoder/__init__.py delete mode 100644 owl-bot-staging/v1/google/cloud/video/transcoder/py.typed delete mode 100644 owl-bot-staging/v1/google/cloud/video/transcoder_v1/__init__.py delete mode 100644 owl-bot-staging/v1/google/cloud/video/transcoder_v1/gapic_metadata.json delete mode 100644 owl-bot-staging/v1/google/cloud/video/transcoder_v1/py.typed delete mode 100644 owl-bot-staging/v1/google/cloud/video/transcoder_v1/services/__init__.py delete mode 100644 owl-bot-staging/v1/google/cloud/video/transcoder_v1/services/transcoder_service/__init__.py delete mode 100644 owl-bot-staging/v1/google/cloud/video/transcoder_v1/services/transcoder_service/async_client.py delete mode 100644 owl-bot-staging/v1/google/cloud/video/transcoder_v1/services/transcoder_service/client.py delete mode 100644 owl-bot-staging/v1/google/cloud/video/transcoder_v1/services/transcoder_service/pagers.py delete mode 100644 owl-bot-staging/v1/google/cloud/video/transcoder_v1/services/transcoder_service/transports/__init__.py delete mode 100644 owl-bot-staging/v1/google/cloud/video/transcoder_v1/services/transcoder_service/transports/base.py delete mode 100644 owl-bot-staging/v1/google/cloud/video/transcoder_v1/services/transcoder_service/transports/grpc.py delete mode 100644 owl-bot-staging/v1/google/cloud/video/transcoder_v1/services/transcoder_service/transports/grpc_asyncio.py delete mode 100644 owl-bot-staging/v1/google/cloud/video/transcoder_v1/types/__init__.py delete mode 100644 owl-bot-staging/v1/google/cloud/video/transcoder_v1/types/resources.py delete mode 100644 owl-bot-staging/v1/google/cloud/video/transcoder_v1/types/services.py delete mode 100644 owl-bot-staging/v1/mypy.ini delete mode 100644 owl-bot-staging/v1/noxfile.py delete mode 100644 owl-bot-staging/v1/scripts/fixup_transcoder_v1_keywords.py delete mode 100644 owl-bot-staging/v1/setup.py delete mode 100644 owl-bot-staging/v1/tests/__init__.py delete mode 100644 owl-bot-staging/v1/tests/unit/__init__.py delete mode 100644 owl-bot-staging/v1/tests/unit/gapic/__init__.py delete mode 100644 owl-bot-staging/v1/tests/unit/gapic/transcoder_v1/__init__.py delete mode 100644 owl-bot-staging/v1/tests/unit/gapic/transcoder_v1/test_transcoder_service.py delete mode 100644 owl-bot-staging/v1beta1/.coveragerc delete mode 100644 owl-bot-staging/v1beta1/MANIFEST.in delete mode 100644 owl-bot-staging/v1beta1/README.rst delete mode 100644 owl-bot-staging/v1beta1/docs/conf.py delete mode 100644 owl-bot-staging/v1beta1/docs/index.rst delete mode 100644 owl-bot-staging/v1beta1/docs/transcoder_v1beta1/services.rst delete mode 100644 owl-bot-staging/v1beta1/docs/transcoder_v1beta1/transcoder_service.rst delete mode 100644 owl-bot-staging/v1beta1/docs/transcoder_v1beta1/types.rst delete mode 100644 owl-bot-staging/v1beta1/google/cloud/video/transcoder/__init__.py delete mode 100644 owl-bot-staging/v1beta1/google/cloud/video/transcoder/py.typed delete mode 100644 owl-bot-staging/v1beta1/google/cloud/video/transcoder_v1beta1/__init__.py delete mode 100644 owl-bot-staging/v1beta1/google/cloud/video/transcoder_v1beta1/gapic_metadata.json delete mode 100644 owl-bot-staging/v1beta1/google/cloud/video/transcoder_v1beta1/py.typed delete mode 100644 owl-bot-staging/v1beta1/google/cloud/video/transcoder_v1beta1/services/__init__.py delete mode 100644 owl-bot-staging/v1beta1/google/cloud/video/transcoder_v1beta1/services/transcoder_service/__init__.py delete mode 100644 owl-bot-staging/v1beta1/google/cloud/video/transcoder_v1beta1/services/transcoder_service/async_client.py delete mode 100644 owl-bot-staging/v1beta1/google/cloud/video/transcoder_v1beta1/services/transcoder_service/client.py delete mode 100644 owl-bot-staging/v1beta1/google/cloud/video/transcoder_v1beta1/services/transcoder_service/pagers.py delete mode 100644 owl-bot-staging/v1beta1/google/cloud/video/transcoder_v1beta1/services/transcoder_service/transports/__init__.py delete mode 100644 owl-bot-staging/v1beta1/google/cloud/video/transcoder_v1beta1/services/transcoder_service/transports/base.py delete mode 100644 owl-bot-staging/v1beta1/google/cloud/video/transcoder_v1beta1/services/transcoder_service/transports/grpc.py delete mode 100644 owl-bot-staging/v1beta1/google/cloud/video/transcoder_v1beta1/services/transcoder_service/transports/grpc_asyncio.py delete mode 100644 owl-bot-staging/v1beta1/google/cloud/video/transcoder_v1beta1/types/__init__.py delete mode 100644 owl-bot-staging/v1beta1/google/cloud/video/transcoder_v1beta1/types/resources.py delete mode 100644 owl-bot-staging/v1beta1/google/cloud/video/transcoder_v1beta1/types/services.py delete mode 100644 owl-bot-staging/v1beta1/mypy.ini delete mode 100644 owl-bot-staging/v1beta1/noxfile.py delete mode 100644 owl-bot-staging/v1beta1/scripts/fixup_transcoder_v1beta1_keywords.py delete mode 100644 owl-bot-staging/v1beta1/setup.py delete mode 100644 owl-bot-staging/v1beta1/tests/__init__.py delete mode 100644 owl-bot-staging/v1beta1/tests/unit/__init__.py delete mode 100644 owl-bot-staging/v1beta1/tests/unit/gapic/__init__.py delete mode 100644 owl-bot-staging/v1beta1/tests/unit/gapic/transcoder_v1beta1/__init__.py delete mode 100644 owl-bot-staging/v1beta1/tests/unit/gapic/transcoder_v1beta1/test_transcoder_service.py diff --git a/google/cloud/video/transcoder_v1beta1/types/resources.py b/google/cloud/video/transcoder_v1beta1/types/resources.py index f416479..ceeefa0 100644 --- a/google/cloud/video/transcoder_v1beta1/types/resources.py +++ b/google/cloud/video/transcoder_v1beta1/types/resources.py @@ -57,8 +57,9 @@ class Job(proto.Message): Input only. Specify the ``input_uri`` to populate empty ``uri`` fields in each element of ``Job.config.inputs`` or ``JobTemplate.config.inputs`` when using template. URI of - the media. It must be stored in Cloud Storage. For example, - ``gs://bucket/inputs/file.mp4``. + the media. Input files must be at least 5 seconds in + duration and stored in Cloud Storage (for example, + ``gs://bucket/inputs/file.mp4``). output_uri (str): Input only. Specify the ``output_uri`` to populate an empty ``Job.config.output.uri`` or @@ -83,11 +84,17 @@ class Job(proto.Message): default is 0. origin_uri (google.cloud.video.transcoder_v1beta1.types.Job.OriginUri): Output only. The origin URI. + state (google.cloud.video.transcoder_v1beta1.types.Job.ProcessingState): Output only. The current state of the job. progress (google.cloud.video.transcoder_v1beta1.types.Progress): Output only. Estimated fractional progress, from ``0`` to ``1`` for each step. + + .. raw:: html + + failure_reason (str): Output only. A description of the reason for the failure. This property is always present when ``state`` is @@ -96,6 +103,10 @@ class Job(proto.Message): Output only. List of failure details. This property may contain additional information about the failure when ``failure_reason`` is present. + + .. raw:: html + + create_time (google.protobuf.timestamp_pb2.Timestamp): Output only. The time the job was created. start_time (google.protobuf.timestamp_pb2.Timestamp): @@ -104,6 +115,11 @@ class Job(proto.Message): end_time (google.protobuf.timestamp_pb2.Timestamp): Output only. The time the transcoding finished. + ttl_after_completion_days (int): + Job time to live value in days, which will be + effective after job completion. Job should be + deleted automatically after the given TTL. Enter + a value between 1 and 90. The default is 30. """ class ProcessingState(proto.Enum): @@ -118,9 +134,10 @@ class OriginUri(proto.Message): r"""The origin URI. Attributes: hls (str): - HLS master manifest URI. If multiple HLS - master manifests are created only first one is - listed. + HLS manifest URI per + https://tools.ietf.org/html/rfc8216#section-4.3.4. + If multiple HLS manifests are created, only the + first one is listed. dash (str): Dash manifest URI. If multiple Dash manifests are created, only the first one is listed. @@ -149,6 +166,7 @@ class OriginUri(proto.Message): ) start_time = proto.Field(proto.MESSAGE, number=13, message=timestamp_pb2.Timestamp,) end_time = proto.Field(proto.MESSAGE, number=14, message=timestamp_pb2.Timestamp,) + ttl_after_completion_days = proto.Field(proto.INT32, number=15,) class JobTemplate(proto.Message): @@ -218,9 +236,10 @@ class Input(proto.Message): specified when using advanced mapping and edit lists. uri (str): - URI of the media. It must be stored in Cloud Storage. - Example ``gs://bucket/inputs/file.mp4``. If empty the value - will be populated from ``Job.input_uri``. + URI of the media. Input files must be at least 5 seconds in + duration and stored in Cloud Storage (for example, + ``gs://bucket/inputs/file.mp4``). If empty, the value will + be populated from ``Job.input_uri``. preprocessing_config (google.cloud.video.transcoder_v1beta1.types.PreprocessingConfig): Preprocessing configurations. """ @@ -415,11 +434,21 @@ class SpriteSheet(proto.Message): suffix starting from 0 before the extension, such as ``"sprite_sheet0000000123.jpeg"``. sprite_width_pixels (int): - Required. The width of sprite in pixels. Must - be an even integer. + Required. The width of sprite in pixels. Must be an even + integer. To preserve the source aspect ratio, set the + [SpriteSheet.sprite_width_pixels][google.cloud.video.transcoder.v1beta1.SpriteSheet.sprite_width_pixels] + field or the + [SpriteSheet.sprite_height_pixels][google.cloud.video.transcoder.v1beta1.SpriteSheet.sprite_height_pixels] + field, but not both (the API will automatically calculate + the missing field). sprite_height_pixels (int): - Required. The height of sprite in pixels. - Must be an even integer. + Required. The height of sprite in pixels. Must be an even + integer. To preserve the source aspect ratio, set the + [SpriteSheet.sprite_height_pixels][google.cloud.video.transcoder.v1beta1.SpriteSheet.sprite_height_pixels] + field or the + [SpriteSheet.sprite_width_pixels][google.cloud.video.transcoder.v1beta1.SpriteSheet.sprite_width_pixels] + field, but not both (the API will automatically calculate + the missing field). column_count (int): The maximum number of sprites per row in a sprite sheet. The default is 0, which indicates @@ -444,6 +473,13 @@ class SpriteSheet(proto.Message): interval (google.protobuf.duration_pb2.Duration): Starting from ``0s``, create sprites at regular intervals. Specify the interval value in seconds. + quality (int): + The quality of the generated sprite sheet. + Enter a value between 1 and 100, where 1 is the + lowest quality and 100 is the highest quality. + The default is 100. A high quality value + corresponds to a low image data compression + ratio. """ format_ = proto.Field(proto.STRING, number=1,) @@ -465,6 +501,7 @@ class SpriteSheet(proto.Message): oneof="extraction_strategy", message=duration_pb2.Duration, ) + quality = proto.Field(proto.INT32, number=11,) class Overlay(proto.Message): @@ -499,8 +536,9 @@ class Image(proto.Message): r"""Overlaid jpeg image. Attributes: uri (str): - Required. URI of the image in Cloud Storage. For example, - ``gs://bucket/inputs/image.jpeg``. + Required. URI of the JPEG image in Cloud Storage. For + example, ``gs://bucket/inputs/image.jpeg``. JPEG is the only + supported image type. resolution (google.cloud.video.transcoder_v1beta1.types.Overlay.NormalizedCoordinate): Normalized image resolution, based on output video resolution. Valid values: ``0.0``–``1.0``. To respect the @@ -508,8 +546,9 @@ class Image(proto.Message): ``0.0``. To use the original image resolution, set both ``x`` and ``y`` to ``0.0``. alpha (float): - Target image opacity. Valid values: ``1`` (solid, default), - ``0`` (transparent). + Target image opacity. Valid values are from ``1.0`` (solid, + default) to ``0.0`` (transparent), exclusive. Set this to a + value greater than ``0.0``. """ uri = proto.Field(proto.STRING, number=1,) @@ -524,7 +563,10 @@ class AnimationStatic(proto.Message): xy (google.cloud.video.transcoder_v1beta1.types.Overlay.NormalizedCoordinate): Normalized coordinates based on output video resolution. Valid values: ``0.0``–``1.0``. ``xy`` is the upper-left - coordinate of the overlay object. + coordinate of the overlay object. For example, use the x and + y coordinates {0,0} to position the top-left corner of the + overlay animation in the top-left corner of the output + video. start_time_offset (google.protobuf.duration_pb2.Duration): The time to start displaying the overlay object, in seconds. Default: 0 @@ -546,7 +588,10 @@ class AnimationFade(proto.Message): xy (google.cloud.video.transcoder_v1beta1.types.Overlay.NormalizedCoordinate): Normalized coordinates based on output video resolution. Valid values: ``0.0``–``1.0``. ``xy`` is the upper-left - coordinate of the overlay object. + coordinate of the overlay object. For example, use the x and + y coordinates {0,0} to position the top-left corner of the + overlay animation in the top-left corner of the output + video. start_time_offset (google.protobuf.duration_pb2.Duration): The time to start the fade animation, in seconds. Default: 0 @@ -626,6 +671,10 @@ class PreprocessingConfig(proto.Message): Deblock preprocessing configuration. audio (google.cloud.video.transcoder_v1beta1.types.PreprocessingConfig.Audio): Audio preprocessing configuration. + crop (google.cloud.video.transcoder_v1beta1.types.PreprocessingConfig.Crop): + Specify the video cropping configuration. + pad (google.cloud.video.transcoder_v1beta1.types.PreprocessingConfig.Pad): + Specify the video pad filter configuration. """ class Color(proto.Message): @@ -691,17 +740,19 @@ class Audio(proto.Message): r"""Audio preprocessing configuration. Attributes: lufs (float): - Specify audio loudness normalization in - loudness units relative to full scale (LUFS). - Enter a value between -24 and 0, where -24 is - the Advanced Television Systems Committee (ATSC - A/85), -23 is the EU R128 broadcast standard, - -19 is the prior standard for online mono audio, - -18 is the ReplayGain standard, -16 is the prior - standard for stereo audio, -14 is the new online - audio standard recommended by Spotify, as well - as Amazon Echo, and 0 disables normalization. - The default is 0. + Specify audio loudness normalization in loudness units + relative to full scale (LUFS). Enter a value between -24 and + 0 (the default), where: + + - -24 is the Advanced Television Systems Committee (ATSC + A/85) standard + - -23 is the EU R128 broadcast standard + - -19 is the prior standard for online mono audio + - -18 is the ReplayGain standard + - -16 is the prior standard for stereo audio + - -14 is the new online audio standard recommended by + Spotify, as well as Amazon Echo + - 0 disables normalization high_boost (bool): Enable boosting high frequency components. The default is ``false``. @@ -714,36 +765,95 @@ class Audio(proto.Message): high_boost = proto.Field(proto.BOOL, number=2,) low_boost = proto.Field(proto.BOOL, number=3,) + class Crop(proto.Message): + r"""Video cropping configuration for the input video. The cropped + input video is scaled to match the output resolution. + + Attributes: + top_pixels (int): + The number of pixels to crop from the top. + The default is 0. + bottom_pixels (int): + The number of pixels to crop from the bottom. + The default is 0. + left_pixels (int): + The number of pixels to crop from the left. + The default is 0. + right_pixels (int): + The number of pixels to crop from the right. + The default is 0. + """ + + top_pixels = proto.Field(proto.INT32, number=1,) + bottom_pixels = proto.Field(proto.INT32, number=2,) + left_pixels = proto.Field(proto.INT32, number=3,) + right_pixels = proto.Field(proto.INT32, number=4,) + + class Pad(proto.Message): + r"""Pad filter configuration for the input video. The padded + input video is scaled after padding with black to match the + output resolution. + + Attributes: + top_pixels (int): + The number of pixels to add to the top. The + default is 0. + bottom_pixels (int): + The number of pixels to add to the bottom. + The default is 0. + left_pixels (int): + The number of pixels to add to the left. The + default is 0. + right_pixels (int): + The number of pixels to add to the right. The + default is 0. + """ + + top_pixels = proto.Field(proto.INT32, number=1,) + bottom_pixels = proto.Field(proto.INT32, number=2,) + left_pixels = proto.Field(proto.INT32, number=3,) + right_pixels = proto.Field(proto.INT32, number=4,) + color = proto.Field(proto.MESSAGE, number=1, message=Color,) denoise = proto.Field(proto.MESSAGE, number=2, message=Denoise,) deblock = proto.Field(proto.MESSAGE, number=3, message=Deblock,) audio = proto.Field(proto.MESSAGE, number=4, message=Audio,) + crop = proto.Field(proto.MESSAGE, number=5, message=Crop,) + pad = proto.Field(proto.MESSAGE, number=6, message=Pad,) class VideoStream(proto.Message): r"""Video stream resource. Attributes: codec (str): - Codec type. The default is ``"h264"``. - - Supported codecs: + Codec type. The following codecs are supported: - - 'h264' - - 'h265' - - 'vp9' + - ``h264`` (default) + - ``h265`` + - ``vp9`` profile (str): - Enforce specified codec profile. The default is ``"high"``. + Enforces the specified codec profile. The following profiles + are supported: - Supported codec profiles: + - ``baseline`` + - ``main`` + - ``high`` (default) - - 'baseline' - - 'main' - - 'high' + The available options are FFmpeg-compatible. Note that + certain values for this field may cause the transcoder to + override other fields you set in the ``VideoStream`` + message. tune (str): - Enforce specified codec tune. + Enforces the specified codec tune. The available options are + FFmpeg-compatible. Note that certain values for this field + may cause the transcoder to override other fields you set in + the ``VideoStream`` message. preset (str): - Enforce specified codec preset. The default is - ``"veryfast"``. + Enforces the specified codec preset. The default is + ``veryfast``. The available options are FFmpeg-compatible. + Note that certain values for this field may cause the + transcoder to override other fields you set in the + ``VideoStream`` message. height_pixels (int): The height of the video in pixels. Must be an even integer. When not specified, the height is @@ -772,7 +882,9 @@ class VideoStream(proto.Message): - 'yuv444p12' 12-bit HDR pixel format. bitrate_bps (int): Required. The video bitrate in bits per - second. Must be between 1 and 1,000,000,000. + second. The minimum value is 1,000. The maximum + value for H264/H265 is 800,000,000. The maximum + value for VP9 is 480,000,000. rate_control_mode (str): Specify the ``rate_control_mode``. The default is ``"vbr"``. @@ -804,7 +916,10 @@ class VideoStream(proto.Message): frame count. Must be greater than zero. gop_duration (google.protobuf.duration_pb2.Duration): Select the GOP size based on the specified duration. The - default is ``"3s"``. + default is ``"3s"``. Note that ``gopDuration`` must be less + than or equal to ```segmentDuration`` <#SegmentSettings>`__, + and ```segmentDuration`` <#SegmentSettings>`__ must be + divisible by ``gopDuration``. entropy_coder (str): The entropy coder to use. The default is ``"cabac"``. @@ -824,30 +939,10 @@ class VideoStream(proto.Message): (FPS). Must be less than or equal to 120. Will default to the input frame rate if larger than the input frame rate. The API will generate an output FPS that is divisible by the - input FPS, and smaller or equal to the target FPS. - - The following table shows the computed video FPS given the - target FPS (in parenthesis) and input FPS (in the first - column): - - :: - - | | (30) | (60) | (25) | (50) | - |--------|--------|--------|------|------| - | 240 | Fail | Fail | Fail | Fail | - | 120 | 30 | 60 | 20 | 30 | - | 100 | 25 | 50 | 20 | 30 | - | 50 | 25 | 50 | 20 | 30 | - | 60 | 30 | 60 | 20 | 30 | - | 59.94 | 29.97 | 59.94 | 20 | 30 | - | 48 | 24 | 48 | 20 | 30 | - | 30 | 30 | 30 | 20 | 30 | - | 25 | 25 | 25 | 20 | 30 | - | 24 | 24 | 24 | 20 | 30 | - | 23.976 | 23.976 | 23.976 | 20 | 30 | - | 15 | 15 | 15 | 20 | 30 | - | 12 | 12 | 12 | 20 | 30 | - | 10 | 10 | 10 | 20 | 30 | + input FPS, and smaller or equal to the target FPS. See + `Calculate frame + rate `__ + for more information. aq_strength (float): Specify the intensity of the adaptive quantizer (AQ). Must be between 0 and 1, where 0 @@ -1051,7 +1146,10 @@ class SegmentSettings(proto.Message): Attributes: segment_duration (google.protobuf.duration_pb2.Duration): Duration of the segments in seconds. The default is - ``"6.0s"``. + ``"6.0s"``. Note that ``segmentDuration`` must be greater + than or equal to ```gopDuration`` <#videostream>`__, and + ``segmentDuration`` must be divisible by + ```gopDuration`` <#videostream>`__. individual_segments (bool): Required. Create an individual segment file. The default is ``false``. diff --git a/owl-bot-staging/v1/.coveragerc b/owl-bot-staging/v1/.coveragerc deleted file mode 100644 index 6926d19..0000000 --- a/owl-bot-staging/v1/.coveragerc +++ /dev/null @@ -1,17 +0,0 @@ -[run] -branch = True - -[report] -show_missing = True -omit = - google/cloud/video/transcoder/__init__.py -exclude_lines = - # Re-enable the standard pragma - pragma: NO COVER - # Ignore debug-only repr - def __repr__ - # Ignore pkg_resources exceptions. - # This is added at the module level as a safeguard for if someone - # generates the code and tries to run it without pip installing. This - # makes it virtually impossible to test properly. - except pkg_resources.DistributionNotFound diff --git a/owl-bot-staging/v1/MANIFEST.in b/owl-bot-staging/v1/MANIFEST.in deleted file mode 100644 index da1cb61..0000000 --- a/owl-bot-staging/v1/MANIFEST.in +++ /dev/null @@ -1,2 +0,0 @@ -recursive-include google/cloud/video/transcoder *.py -recursive-include google/cloud/video/transcoder_v1 *.py diff --git a/owl-bot-staging/v1/README.rst b/owl-bot-staging/v1/README.rst deleted file mode 100644 index 43621a1..0000000 --- a/owl-bot-staging/v1/README.rst +++ /dev/null @@ -1,49 +0,0 @@ -Python Client for Google Cloud Video Transcoder API -================================================= - -Quick Start ------------ - -In order to use this library, you first need to go through the following steps: - -1. `Select or create a Cloud Platform project.`_ -2. `Enable billing for your project.`_ -3. Enable the Google Cloud Video Transcoder API. -4. `Setup Authentication.`_ - -.. _Select or create a Cloud Platform project.: https://console.cloud.google.com/project -.. _Enable billing for your project.: https://cloud.google.com/billing/docs/how-to/modify-project#enable_billing_for_a_project -.. _Setup Authentication.: https://googleapis.dev/python/google-api-core/latest/auth.html - -Installation -~~~~~~~~~~~~ - -Install this library in a `virtualenv`_ using pip. `virtualenv`_ is a tool to -create isolated Python environments. The basic problem it addresses is one of -dependencies and versions, and indirectly permissions. - -With `virtualenv`_, it's possible to install this library without needing system -install permissions, and without clashing with the installed system -dependencies. - -.. _`virtualenv`: https://virtualenv.pypa.io/en/latest/ - - -Mac/Linux -^^^^^^^^^ - -.. code-block:: console - - python3 -m venv - source /bin/activate - /bin/pip install /path/to/library - - -Windows -^^^^^^^ - -.. code-block:: console - - python3 -m venv - \Scripts\activate - \Scripts\pip.exe install \path\to\library diff --git a/owl-bot-staging/v1/docs/conf.py b/owl-bot-staging/v1/docs/conf.py deleted file mode 100644 index d739f6d..0000000 --- a/owl-bot-staging/v1/docs/conf.py +++ /dev/null @@ -1,376 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# -# google-cloud-video-transcoder documentation build configuration file -# -# This file is execfile()d with the current directory set to its -# containing dir. -# -# Note that not all possible configuration values are present in this -# autogenerated file. -# -# All configuration values have a default; values that are commented out -# serve to show the default. - -import sys -import os -import shlex - -# If extensions (or modules to document with autodoc) are in another directory, -# add these directories to sys.path here. If the directory is relative to the -# documentation root, use os.path.abspath to make it absolute, like shown here. -sys.path.insert(0, os.path.abspath("..")) - -__version__ = "0.1.0" - -# -- General configuration ------------------------------------------------ - -# If your documentation needs a minimal Sphinx version, state it here. -needs_sphinx = "1.6.3" - -# Add any Sphinx extension module names here, as strings. They can be -# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom -# ones. -extensions = [ - "sphinx.ext.autodoc", - "sphinx.ext.autosummary", - "sphinx.ext.intersphinx", - "sphinx.ext.coverage", - "sphinx.ext.napoleon", - "sphinx.ext.todo", - "sphinx.ext.viewcode", -] - -# autodoc/autosummary flags -autoclass_content = "both" -autodoc_default_flags = ["members"] -autosummary_generate = True - - -# Add any paths that contain templates here, relative to this directory. -templates_path = ["_templates"] - -# Allow markdown includes (so releases.md can include CHANGLEOG.md) -# http://www.sphinx-doc.org/en/master/markdown.html -source_parsers = {".md": "recommonmark.parser.CommonMarkParser"} - -# The suffix(es) of source filenames. -# You can specify multiple suffix as a list of string: -source_suffix = [".rst", ".md"] - -# The encoding of source files. -# source_encoding = 'utf-8-sig' - -# The master toctree document. -master_doc = "index" - -# General information about the project. -project = u"google-cloud-video-transcoder" -copyright = u"2020, Google, LLC" -author = u"Google APIs" # TODO: autogenerate this bit - -# The version info for the project you're documenting, acts as replacement for -# |version| and |release|, also used in various other places throughout the -# built documents. -# -# The full version, including alpha/beta/rc tags. -release = __version__ -# The short X.Y version. -version = ".".join(release.split(".")[0:2]) - -# The language for content autogenerated by Sphinx. Refer to documentation -# for a list of supported languages. -# -# This is also used if you do content translation via gettext catalogs. -# Usually you set "language" from the command line for these cases. -language = None - -# There are two options for replacing |today|: either, you set today to some -# non-false value, then it is used: -# today = '' -# Else, today_fmt is used as the format for a strftime call. -# today_fmt = '%B %d, %Y' - -# List of patterns, relative to source directory, that match files and -# directories to ignore when looking for source files. -exclude_patterns = ["_build"] - -# The reST default role (used for this markup: `text`) to use for all -# documents. -# default_role = None - -# If true, '()' will be appended to :func: etc. cross-reference text. -# add_function_parentheses = True - -# If true, the current module name will be prepended to all description -# unit titles (such as .. function::). -# add_module_names = True - -# If true, sectionauthor and moduleauthor directives will be shown in the -# output. They are ignored by default. -# show_authors = False - -# The name of the Pygments (syntax highlighting) style to use. -pygments_style = "sphinx" - -# A list of ignored prefixes for module index sorting. -# modindex_common_prefix = [] - -# If true, keep warnings as "system message" paragraphs in the built documents. -# keep_warnings = False - -# If true, `todo` and `todoList` produce output, else they produce nothing. -todo_include_todos = True - - -# -- Options for HTML output ---------------------------------------------- - -# The theme to use for HTML and HTML Help pages. See the documentation for -# a list of builtin themes. -html_theme = "alabaster" - -# Theme options are theme-specific and customize the look and feel of a theme -# further. For a list of options available for each theme, see the -# documentation. -html_theme_options = { - "description": "Google Cloud Video Client Libraries for Python", - "github_user": "googleapis", - "github_repo": "google-cloud-python", - "github_banner": True, - "font_family": "'Roboto', Georgia, sans", - "head_font_family": "'Roboto', Georgia, serif", - "code_font_family": "'Roboto Mono', 'Consolas', monospace", -} - -# Add any paths that contain custom themes here, relative to this directory. -# html_theme_path = [] - -# The name for this set of Sphinx documents. If None, it defaults to -# " v documentation". -# html_title = None - -# A shorter title for the navigation bar. Default is the same as html_title. -# html_short_title = None - -# The name of an image file (relative to this directory) to place at the top -# of the sidebar. -# html_logo = None - -# The name of an image file (within the static path) to use as favicon of the -# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 -# pixels large. -# html_favicon = None - -# Add any paths that contain custom static files (such as style sheets) here, -# relative to this directory. They are copied after the builtin static files, -# so a file named "default.css" will overwrite the builtin "default.css". -html_static_path = ["_static"] - -# Add any extra paths that contain custom files (such as robots.txt or -# .htaccess) here, relative to this directory. These files are copied -# directly to the root of the documentation. -# html_extra_path = [] - -# If not '', a 'Last updated on:' timestamp is inserted at every page bottom, -# using the given strftime format. -# html_last_updated_fmt = '%b %d, %Y' - -# If true, SmartyPants will be used to convert quotes and dashes to -# typographically correct entities. -# html_use_smartypants = True - -# Custom sidebar templates, maps document names to template names. -# html_sidebars = {} - -# Additional templates that should be rendered to pages, maps page names to -# template names. -# html_additional_pages = {} - -# If false, no module index is generated. -# html_domain_indices = True - -# If false, no index is generated. -# html_use_index = True - -# If true, the index is split into individual pages for each letter. -# html_split_index = False - -# If true, links to the reST sources are added to the pages. -# html_show_sourcelink = True - -# If true, "Created using Sphinx" is shown in the HTML footer. Default is True. -# html_show_sphinx = True - -# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. -# html_show_copyright = True - -# If true, an OpenSearch description file will be output, and all pages will -# contain a tag referring to it. The value of this option must be the -# base URL from which the finished HTML is served. -# html_use_opensearch = '' - -# This is the file name suffix for HTML files (e.g. ".xhtml"). -# html_file_suffix = None - -# Language to be used for generating the HTML full-text search index. -# Sphinx supports the following languages: -# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja' -# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr' -# html_search_language = 'en' - -# A dictionary with options for the search language support, empty by default. -# Now only 'ja' uses this config value -# html_search_options = {'type': 'default'} - -# The name of a javascript file (relative to the configuration directory) that -# implements a search results scorer. If empty, the default will be used. -# html_search_scorer = 'scorer.js' - -# Output file base name for HTML help builder. -htmlhelp_basename = "google-cloud-video-transcoder-doc" - -# -- Options for warnings ------------------------------------------------------ - - -suppress_warnings = [ - # Temporarily suppress this to avoid "more than one target found for - # cross-reference" warning, which are intractable for us to avoid while in - # a mono-repo. - # See https://github.com/sphinx-doc/sphinx/blob - # /2a65ffeef5c107c19084fabdd706cdff3f52d93c/sphinx/domains/python.py#L843 - "ref.python" -] - -# -- Options for LaTeX output --------------------------------------------- - -latex_elements = { - # The paper size ('letterpaper' or 'a4paper'). - # 'papersize': 'letterpaper', - # The font size ('10pt', '11pt' or '12pt'). - # 'pointsize': '10pt', - # Additional stuff for the LaTeX preamble. - # 'preamble': '', - # Latex figure (float) alignment - # 'figure_align': 'htbp', -} - -# Grouping the document tree into LaTeX files. List of tuples -# (source start file, target name, title, -# author, documentclass [howto, manual, or own class]). -latex_documents = [ - ( - master_doc, - "google-cloud-video-transcoder.tex", - u"google-cloud-video-transcoder Documentation", - author, - "manual", - ) -] - -# The name of an image file (relative to this directory) to place at the top of -# the title page. -# latex_logo = None - -# For "manual" documents, if this is true, then toplevel headings are parts, -# not chapters. -# latex_use_parts = False - -# If true, show page references after internal links. -# latex_show_pagerefs = False - -# If true, show URL addresses after external links. -# latex_show_urls = False - -# Documents to append as an appendix to all manuals. -# latex_appendices = [] - -# If false, no module index is generated. -# latex_domain_indices = True - - -# -- Options for manual page output --------------------------------------- - -# One entry per manual page. List of tuples -# (source start file, name, description, authors, manual section). -man_pages = [ - ( - master_doc, - "google-cloud-video-transcoder", - u"Google Cloud Video Transcoder Documentation", - [author], - 1, - ) -] - -# If true, show URL addresses after external links. -# man_show_urls = False - - -# -- Options for Texinfo output ------------------------------------------- - -# Grouping the document tree into Texinfo files. List of tuples -# (source start file, target name, title, author, -# dir menu entry, description, category) -texinfo_documents = [ - ( - master_doc, - "google-cloud-video-transcoder", - u"google-cloud-video-transcoder Documentation", - author, - "google-cloud-video-transcoder", - "GAPIC library for Google Cloud Video Transcoder API", - "APIs", - ) -] - -# Documents to append as an appendix to all manuals. -# texinfo_appendices = [] - -# If false, no module index is generated. -# texinfo_domain_indices = True - -# How to display URL addresses: 'footnote', 'no', or 'inline'. -# texinfo_show_urls = 'footnote' - -# If true, do not generate a @detailmenu in the "Top" node's menu. -# texinfo_no_detailmenu = False - - -# Example configuration for intersphinx: refer to the Python standard library. -intersphinx_mapping = { - "python": ("http://python.readthedocs.org/en/latest/", None), - "gax": ("https://gax-python.readthedocs.org/en/latest/", None), - "google-auth": ("https://google-auth.readthedocs.io/en/stable", None), - "google-gax": ("https://gax-python.readthedocs.io/en/latest/", None), - "google.api_core": ("https://googleapis.dev/python/google-api-core/latest/", None), - "grpc": ("https://grpc.io/grpc/python/", None), - "requests": ("http://requests.kennethreitz.org/en/stable/", None), - "proto": ("https://proto-plus-python.readthedocs.io/en/stable", None), - "protobuf": ("https://googleapis.dev/python/protobuf/latest/", None), -} - - -# Napoleon settings -napoleon_google_docstring = True -napoleon_numpy_docstring = True -napoleon_include_private_with_doc = False -napoleon_include_special_with_doc = True -napoleon_use_admonition_for_examples = False -napoleon_use_admonition_for_notes = False -napoleon_use_admonition_for_references = False -napoleon_use_ivar = False -napoleon_use_param = True -napoleon_use_rtype = True diff --git a/owl-bot-staging/v1/docs/index.rst b/owl-bot-staging/v1/docs/index.rst deleted file mode 100644 index 0cfe564..0000000 --- a/owl-bot-staging/v1/docs/index.rst +++ /dev/null @@ -1,7 +0,0 @@ -API Reference -------------- -.. toctree:: - :maxdepth: 2 - - transcoder_v1/services - transcoder_v1/types diff --git a/owl-bot-staging/v1/docs/transcoder_v1/services.rst b/owl-bot-staging/v1/docs/transcoder_v1/services.rst deleted file mode 100644 index 1bd129e..0000000 --- a/owl-bot-staging/v1/docs/transcoder_v1/services.rst +++ /dev/null @@ -1,6 +0,0 @@ -Services for Google Cloud Video Transcoder v1 API -================================================= -.. toctree:: - :maxdepth: 2 - - transcoder_service diff --git a/owl-bot-staging/v1/docs/transcoder_v1/transcoder_service.rst b/owl-bot-staging/v1/docs/transcoder_v1/transcoder_service.rst deleted file mode 100644 index 5bf6bd8..0000000 --- a/owl-bot-staging/v1/docs/transcoder_v1/transcoder_service.rst +++ /dev/null @@ -1,10 +0,0 @@ -TranscoderService ------------------------------------ - -.. automodule:: google.cloud.video.transcoder_v1.services.transcoder_service - :members: - :inherited-members: - -.. automodule:: google.cloud.video.transcoder_v1.services.transcoder_service.pagers - :members: - :inherited-members: diff --git a/owl-bot-staging/v1/docs/transcoder_v1/types.rst b/owl-bot-staging/v1/docs/transcoder_v1/types.rst deleted file mode 100644 index 7dc3c71..0000000 --- a/owl-bot-staging/v1/docs/transcoder_v1/types.rst +++ /dev/null @@ -1,7 +0,0 @@ -Types for Google Cloud Video Transcoder v1 API -============================================== - -.. automodule:: google.cloud.video.transcoder_v1.types - :members: - :undoc-members: - :show-inheritance: diff --git a/owl-bot-staging/v1/google/cloud/video/transcoder/__init__.py b/owl-bot-staging/v1/google/cloud/video/transcoder/__init__.py deleted file mode 100644 index c203235..0000000 --- a/owl-bot-staging/v1/google/cloud/video/transcoder/__init__.py +++ /dev/null @@ -1,81 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -from google.cloud.video.transcoder_v1.services.transcoder_service.client import TranscoderServiceClient -from google.cloud.video.transcoder_v1.services.transcoder_service.async_client import TranscoderServiceAsyncClient - -from google.cloud.video.transcoder_v1.types.resources import AdBreak -from google.cloud.video.transcoder_v1.types.resources import AudioStream -from google.cloud.video.transcoder_v1.types.resources import EditAtom -from google.cloud.video.transcoder_v1.types.resources import ElementaryStream -from google.cloud.video.transcoder_v1.types.resources import Encryption -from google.cloud.video.transcoder_v1.types.resources import Input -from google.cloud.video.transcoder_v1.types.resources import Job -from google.cloud.video.transcoder_v1.types.resources import JobConfig -from google.cloud.video.transcoder_v1.types.resources import JobTemplate -from google.cloud.video.transcoder_v1.types.resources import Manifest -from google.cloud.video.transcoder_v1.types.resources import MuxStream -from google.cloud.video.transcoder_v1.types.resources import Output -from google.cloud.video.transcoder_v1.types.resources import Overlay -from google.cloud.video.transcoder_v1.types.resources import PreprocessingConfig -from google.cloud.video.transcoder_v1.types.resources import PubsubDestination -from google.cloud.video.transcoder_v1.types.resources import SegmentSettings -from google.cloud.video.transcoder_v1.types.resources import SpriteSheet -from google.cloud.video.transcoder_v1.types.resources import TextStream -from google.cloud.video.transcoder_v1.types.resources import VideoStream -from google.cloud.video.transcoder_v1.types.services import CreateJobRequest -from google.cloud.video.transcoder_v1.types.services import CreateJobTemplateRequest -from google.cloud.video.transcoder_v1.types.services import DeleteJobRequest -from google.cloud.video.transcoder_v1.types.services import DeleteJobTemplateRequest -from google.cloud.video.transcoder_v1.types.services import GetJobRequest -from google.cloud.video.transcoder_v1.types.services import GetJobTemplateRequest -from google.cloud.video.transcoder_v1.types.services import ListJobsRequest -from google.cloud.video.transcoder_v1.types.services import ListJobsResponse -from google.cloud.video.transcoder_v1.types.services import ListJobTemplatesRequest -from google.cloud.video.transcoder_v1.types.services import ListJobTemplatesResponse - -__all__ = ('TranscoderServiceClient', - 'TranscoderServiceAsyncClient', - 'AdBreak', - 'AudioStream', - 'EditAtom', - 'ElementaryStream', - 'Encryption', - 'Input', - 'Job', - 'JobConfig', - 'JobTemplate', - 'Manifest', - 'MuxStream', - 'Output', - 'Overlay', - 'PreprocessingConfig', - 'PubsubDestination', - 'SegmentSettings', - 'SpriteSheet', - 'TextStream', - 'VideoStream', - 'CreateJobRequest', - 'CreateJobTemplateRequest', - 'DeleteJobRequest', - 'DeleteJobTemplateRequest', - 'GetJobRequest', - 'GetJobTemplateRequest', - 'ListJobsRequest', - 'ListJobsResponse', - 'ListJobTemplatesRequest', - 'ListJobTemplatesResponse', -) diff --git a/owl-bot-staging/v1/google/cloud/video/transcoder/py.typed b/owl-bot-staging/v1/google/cloud/video/transcoder/py.typed deleted file mode 100644 index a2716a6..0000000 --- a/owl-bot-staging/v1/google/cloud/video/transcoder/py.typed +++ /dev/null @@ -1,2 +0,0 @@ -# Marker file for PEP 561. -# The google-cloud-video-transcoder package uses inline types. diff --git a/owl-bot-staging/v1/google/cloud/video/transcoder_v1/__init__.py b/owl-bot-staging/v1/google/cloud/video/transcoder_v1/__init__.py deleted file mode 100644 index d9d6f49..0000000 --- a/owl-bot-staging/v1/google/cloud/video/transcoder_v1/__init__.py +++ /dev/null @@ -1,82 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -from .services.transcoder_service import TranscoderServiceClient -from .services.transcoder_service import TranscoderServiceAsyncClient - -from .types.resources import AdBreak -from .types.resources import AudioStream -from .types.resources import EditAtom -from .types.resources import ElementaryStream -from .types.resources import Encryption -from .types.resources import Input -from .types.resources import Job -from .types.resources import JobConfig -from .types.resources import JobTemplate -from .types.resources import Manifest -from .types.resources import MuxStream -from .types.resources import Output -from .types.resources import Overlay -from .types.resources import PreprocessingConfig -from .types.resources import PubsubDestination -from .types.resources import SegmentSettings -from .types.resources import SpriteSheet -from .types.resources import TextStream -from .types.resources import VideoStream -from .types.services import CreateJobRequest -from .types.services import CreateJobTemplateRequest -from .types.services import DeleteJobRequest -from .types.services import DeleteJobTemplateRequest -from .types.services import GetJobRequest -from .types.services import GetJobTemplateRequest -from .types.services import ListJobsRequest -from .types.services import ListJobsResponse -from .types.services import ListJobTemplatesRequest -from .types.services import ListJobTemplatesResponse - -__all__ = ( - 'TranscoderServiceAsyncClient', -'AdBreak', -'AudioStream', -'CreateJobRequest', -'CreateJobTemplateRequest', -'DeleteJobRequest', -'DeleteJobTemplateRequest', -'EditAtom', -'ElementaryStream', -'Encryption', -'GetJobRequest', -'GetJobTemplateRequest', -'Input', -'Job', -'JobConfig', -'JobTemplate', -'ListJobTemplatesRequest', -'ListJobTemplatesResponse', -'ListJobsRequest', -'ListJobsResponse', -'Manifest', -'MuxStream', -'Output', -'Overlay', -'PreprocessingConfig', -'PubsubDestination', -'SegmentSettings', -'SpriteSheet', -'TextStream', -'TranscoderServiceClient', -'VideoStream', -) diff --git a/owl-bot-staging/v1/google/cloud/video/transcoder_v1/gapic_metadata.json b/owl-bot-staging/v1/google/cloud/video/transcoder_v1/gapic_metadata.json deleted file mode 100644 index 6651379..0000000 --- a/owl-bot-staging/v1/google/cloud/video/transcoder_v1/gapic_metadata.json +++ /dev/null @@ -1,103 +0,0 @@ - { - "comment": "This file maps proto services/RPCs to the corresponding library clients/methods", - "language": "python", - "libraryPackage": "google.cloud.video.transcoder_v1", - "protoPackage": "google.cloud.video.transcoder.v1", - "schema": "1.0", - "services": { - "TranscoderService": { - "clients": { - "grpc": { - "libraryClient": "TranscoderServiceClient", - "rpcs": { - "CreateJob": { - "methods": [ - "create_job" - ] - }, - "CreateJobTemplate": { - "methods": [ - "create_job_template" - ] - }, - "DeleteJob": { - "methods": [ - "delete_job" - ] - }, - "DeleteJobTemplate": { - "methods": [ - "delete_job_template" - ] - }, - "GetJob": { - "methods": [ - "get_job" - ] - }, - "GetJobTemplate": { - "methods": [ - "get_job_template" - ] - }, - "ListJobTemplates": { - "methods": [ - "list_job_templates" - ] - }, - "ListJobs": { - "methods": [ - "list_jobs" - ] - } - } - }, - "grpc-async": { - "libraryClient": "TranscoderServiceAsyncClient", - "rpcs": { - "CreateJob": { - "methods": [ - "create_job" - ] - }, - "CreateJobTemplate": { - "methods": [ - "create_job_template" - ] - }, - "DeleteJob": { - "methods": [ - "delete_job" - ] - }, - "DeleteJobTemplate": { - "methods": [ - "delete_job_template" - ] - }, - "GetJob": { - "methods": [ - "get_job" - ] - }, - "GetJobTemplate": { - "methods": [ - "get_job_template" - ] - }, - "ListJobTemplates": { - "methods": [ - "list_job_templates" - ] - }, - "ListJobs": { - "methods": [ - "list_jobs" - ] - } - } - } - } - } - } -} diff --git a/owl-bot-staging/v1/google/cloud/video/transcoder_v1/py.typed b/owl-bot-staging/v1/google/cloud/video/transcoder_v1/py.typed deleted file mode 100644 index a2716a6..0000000 --- a/owl-bot-staging/v1/google/cloud/video/transcoder_v1/py.typed +++ /dev/null @@ -1,2 +0,0 @@ -# Marker file for PEP 561. -# The google-cloud-video-transcoder package uses inline types. diff --git a/owl-bot-staging/v1/google/cloud/video/transcoder_v1/services/__init__.py b/owl-bot-staging/v1/google/cloud/video/transcoder_v1/services/__init__.py deleted file mode 100644 index 4de6597..0000000 --- a/owl-bot-staging/v1/google/cloud/video/transcoder_v1/services/__init__.py +++ /dev/null @@ -1,15 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/v1/google/cloud/video/transcoder_v1/services/transcoder_service/__init__.py b/owl-bot-staging/v1/google/cloud/video/transcoder_v1/services/transcoder_service/__init__.py deleted file mode 100644 index 1688786..0000000 --- a/owl-bot-staging/v1/google/cloud/video/transcoder_v1/services/transcoder_service/__init__.py +++ /dev/null @@ -1,22 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from .client import TranscoderServiceClient -from .async_client import TranscoderServiceAsyncClient - -__all__ = ( - 'TranscoderServiceClient', - 'TranscoderServiceAsyncClient', -) diff --git a/owl-bot-staging/v1/google/cloud/video/transcoder_v1/services/transcoder_service/async_client.py b/owl-bot-staging/v1/google/cloud/video/transcoder_v1/services/transcoder_service/async_client.py deleted file mode 100644 index 65d7b8e..0000000 --- a/owl-bot-staging/v1/google/cloud/video/transcoder_v1/services/transcoder_service/async_client.py +++ /dev/null @@ -1,810 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from collections import OrderedDict -import functools -import re -from typing import Dict, Sequence, Tuple, Type, Union -import pkg_resources - -import google.api_core.client_options as ClientOptions # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.api_core import gapic_v1 # type: ignore -from google.api_core import retry as retries # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.oauth2 import service_account # type: ignore - -from google.cloud.video.transcoder_v1.services.transcoder_service import pagers -from google.cloud.video.transcoder_v1.types import resources -from google.cloud.video.transcoder_v1.types import services -from google.protobuf import timestamp_pb2 # type: ignore -from google.rpc import status_pb2 # type: ignore -from .transports.base import TranscoderServiceTransport, DEFAULT_CLIENT_INFO -from .transports.grpc_asyncio import TranscoderServiceGrpcAsyncIOTransport -from .client import TranscoderServiceClient - - -class TranscoderServiceAsyncClient: - """Using the Transcoder API, you can queue asynchronous jobs for - transcoding media into various output formats. Output formats - may include different streaming standards such as HTTP Live - Streaming (HLS) and Dynamic Adaptive Streaming over HTTP (DASH). - You can also customize jobs using advanced features such as - Digital Rights Management (DRM), audio equalization, content - concatenation, and digital ad-stitch ready content generation. - """ - - _client: TranscoderServiceClient - - DEFAULT_ENDPOINT = TranscoderServiceClient.DEFAULT_ENDPOINT - DEFAULT_MTLS_ENDPOINT = TranscoderServiceClient.DEFAULT_MTLS_ENDPOINT - - job_path = staticmethod(TranscoderServiceClient.job_path) - parse_job_path = staticmethod(TranscoderServiceClient.parse_job_path) - job_template_path = staticmethod(TranscoderServiceClient.job_template_path) - parse_job_template_path = staticmethod(TranscoderServiceClient.parse_job_template_path) - common_billing_account_path = staticmethod(TranscoderServiceClient.common_billing_account_path) - parse_common_billing_account_path = staticmethod(TranscoderServiceClient.parse_common_billing_account_path) - common_folder_path = staticmethod(TranscoderServiceClient.common_folder_path) - parse_common_folder_path = staticmethod(TranscoderServiceClient.parse_common_folder_path) - common_organization_path = staticmethod(TranscoderServiceClient.common_organization_path) - parse_common_organization_path = staticmethod(TranscoderServiceClient.parse_common_organization_path) - common_project_path = staticmethod(TranscoderServiceClient.common_project_path) - parse_common_project_path = staticmethod(TranscoderServiceClient.parse_common_project_path) - common_location_path = staticmethod(TranscoderServiceClient.common_location_path) - parse_common_location_path = staticmethod(TranscoderServiceClient.parse_common_location_path) - - @classmethod - def from_service_account_info(cls, info: dict, *args, **kwargs): - """Creates an instance of this client using the provided credentials - info. - - Args: - info (dict): The service account private key info. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - TranscoderServiceAsyncClient: The constructed client. - """ - return TranscoderServiceClient.from_service_account_info.__func__(TranscoderServiceAsyncClient, info, *args, **kwargs) # type: ignore - - @classmethod - def from_service_account_file(cls, filename: str, *args, **kwargs): - """Creates an instance of this client using the provided credentials - file. - - Args: - filename (str): The path to the service account private key json - file. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - TranscoderServiceAsyncClient: The constructed client. - """ - return TranscoderServiceClient.from_service_account_file.__func__(TranscoderServiceAsyncClient, filename, *args, **kwargs) # type: ignore - - from_service_account_json = from_service_account_file - - @property - def transport(self) -> TranscoderServiceTransport: - """Returns the transport used by the client instance. - - Returns: - TranscoderServiceTransport: The transport used by the client instance. - """ - return self._client.transport - - get_transport_class = functools.partial(type(TranscoderServiceClient).get_transport_class, type(TranscoderServiceClient)) - - def __init__(self, *, - credentials: ga_credentials.Credentials = None, - transport: Union[str, TranscoderServiceTransport] = "grpc_asyncio", - client_options: ClientOptions = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - ) -> None: - """Instantiates the transcoder service client. - - Args: - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - transport (Union[str, ~.TranscoderServiceTransport]): The - transport to use. If set to None, a transport is chosen - automatically. - client_options (ClientOptions): Custom options for the client. It - won't take effect if a ``transport`` instance is provided. - (1) The ``api_endpoint`` property can be used to override the - default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT - environment variable can also be used to override the endpoint: - "always" (always use the default mTLS endpoint), "never" (always - use the default regular endpoint) and "auto" (auto switch to the - default mTLS endpoint if client certificate is present, this is - the default value). However, the ``api_endpoint`` property takes - precedence if provided. - (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable - is "true", then the ``client_cert_source`` property can be used - to provide client certificate for mutual TLS transport. If - not provided, the default SSL client certificate will be used if - present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not - set, no client certificate will be used. - - Raises: - google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport - creation failed for any reason. - """ - self._client = TranscoderServiceClient( - credentials=credentials, - transport=transport, - client_options=client_options, - client_info=client_info, - - ) - - async def create_job(self, - request: services.CreateJobRequest = None, - *, - parent: str = None, - job: resources.Job = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> resources.Job: - r"""Creates a job in the specified region. - - Args: - request (:class:`google.cloud.video.transcoder_v1.types.CreateJobRequest`): - The request object. Request message for - `TranscoderService.CreateJob`. - parent (:class:`str`): - Required. The parent location to create and process this - job. Format: ``projects/{project}/locations/{location}`` - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - job (:class:`google.cloud.video.transcoder_v1.types.Job`): - Required. Parameters for creating - transcoding job. - - This corresponds to the ``job`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.video.transcoder_v1.types.Job: - Transcoding job resource. - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, job]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = services.CreateJobRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if job is not None: - request.job = job - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.create_job, - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def list_jobs(self, - request: services.ListJobsRequest = None, - *, - parent: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListJobsAsyncPager: - r"""Lists jobs in the specified region. - - Args: - request (:class:`google.cloud.video.transcoder_v1.types.ListJobsRequest`): - The request object. Request message for - `TranscoderService.ListJobs`. The parent location from - which to retrieve the collection of jobs. - parent (:class:`str`): - Required. Format: - ``projects/{project}/locations/{location}`` - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.video.transcoder_v1.services.transcoder_service.pagers.ListJobsAsyncPager: - Response message for TranscoderService.ListJobs. - - Iterating over this object will yield results and - resolve additional pages automatically. - - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = services.ListJobsRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.list_jobs, - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__aiter__` convenience method. - response = pagers.ListJobsAsyncPager( - method=rpc, - request=request, - response=response, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def get_job(self, - request: services.GetJobRequest = None, - *, - name: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> resources.Job: - r"""Returns the job data. - - Args: - request (:class:`google.cloud.video.transcoder_v1.types.GetJobRequest`): - The request object. Request message for - `TranscoderService.GetJob`. - name (:class:`str`): - Required. The name of the job to retrieve. Format: - ``projects/{project}/locations/{location}/jobs/{job}`` - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.video.transcoder_v1.types.Job: - Transcoding job resource. - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = services.GetJobRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.get_job, - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def delete_job(self, - request: services.DeleteJobRequest = None, - *, - name: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Deletes a job. - - Args: - request (:class:`google.cloud.video.transcoder_v1.types.DeleteJobRequest`): - The request object. Request message for - `TranscoderService.DeleteJob`. - name (:class:`str`): - Required. The name of the job to delete. Format: - ``projects/{project}/locations/{location}/jobs/{job}`` - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = services.DeleteJobRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.delete_job, - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - async def create_job_template(self, - request: services.CreateJobTemplateRequest = None, - *, - parent: str = None, - job_template: resources.JobTemplate = None, - job_template_id: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> resources.JobTemplate: - r"""Creates a job template in the specified region. - - Args: - request (:class:`google.cloud.video.transcoder_v1.types.CreateJobTemplateRequest`): - The request object. Request message for - `TranscoderService.CreateJobTemplate`. - parent (:class:`str`): - Required. The parent location to create this job - template. Format: - ``projects/{project}/locations/{location}`` - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - job_template (:class:`google.cloud.video.transcoder_v1.types.JobTemplate`): - Required. Parameters for creating job - template. - - This corresponds to the ``job_template`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - job_template_id (:class:`str`): - Required. The ID to use for the job template, which will - become the final component of the job template's - resource name. - - This value should be 4-63 characters, and valid - characters must match the regular expression - ``[a-zA-Z][a-zA-Z0-9_-]*``. - - This corresponds to the ``job_template_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.video.transcoder_v1.types.JobTemplate: - Transcoding job template resource. - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, job_template, job_template_id]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = services.CreateJobTemplateRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if job_template is not None: - request.job_template = job_template - if job_template_id is not None: - request.job_template_id = job_template_id - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.create_job_template, - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def list_job_templates(self, - request: services.ListJobTemplatesRequest = None, - *, - parent: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListJobTemplatesAsyncPager: - r"""Lists job templates in the specified region. - - Args: - request (:class:`google.cloud.video.transcoder_v1.types.ListJobTemplatesRequest`): - The request object. Request message for - `TranscoderService.ListJobTemplates`. - parent (:class:`str`): - Required. The parent location from which to retrieve the - collection of job templates. Format: - ``projects/{project}/locations/{location}`` - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.video.transcoder_v1.services.transcoder_service.pagers.ListJobTemplatesAsyncPager: - Response message for TranscoderService.ListJobTemplates. - - Iterating over this object will yield results and - resolve additional pages automatically. - - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = services.ListJobTemplatesRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.list_job_templates, - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__aiter__` convenience method. - response = pagers.ListJobTemplatesAsyncPager( - method=rpc, - request=request, - response=response, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def get_job_template(self, - request: services.GetJobTemplateRequest = None, - *, - name: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> resources.JobTemplate: - r"""Returns the job template data. - - Args: - request (:class:`google.cloud.video.transcoder_v1.types.GetJobTemplateRequest`): - The request object. Request message for - `TranscoderService.GetJobTemplate`. - name (:class:`str`): - Required. The name of the job template to retrieve. - Format: - ``projects/{project}/locations/{location}/jobTemplates/{job_template}`` - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.video.transcoder_v1.types.JobTemplate: - Transcoding job template resource. - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = services.GetJobTemplateRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.get_job_template, - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def delete_job_template(self, - request: services.DeleteJobTemplateRequest = None, - *, - name: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Deletes a job template. - - Args: - request (:class:`google.cloud.video.transcoder_v1.types.DeleteJobTemplateRequest`): - The request object. Request message for - `TranscoderService.DeleteJobTemplate`. - name (:class:`str`): - Required. The name of the job template to delete. - ``projects/{project}/locations/{location}/jobTemplates/{job_template}`` - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = services.DeleteJobTemplateRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.delete_job_template, - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - - - - -try: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=pkg_resources.get_distribution( - "google-cloud-video-transcoder", - ).version, - ) -except pkg_resources.DistributionNotFound: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() - - -__all__ = ( - "TranscoderServiceAsyncClient", -) diff --git a/owl-bot-staging/v1/google/cloud/video/transcoder_v1/services/transcoder_service/client.py b/owl-bot-staging/v1/google/cloud/video/transcoder_v1/services/transcoder_service/client.py deleted file mode 100644 index 3f4470b..0000000 --- a/owl-bot-staging/v1/google/cloud/video/transcoder_v1/services/transcoder_service/client.py +++ /dev/null @@ -1,1010 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from collections import OrderedDict -from distutils import util -import os -import re -from typing import Callable, Dict, Optional, Sequence, Tuple, Type, Union -import pkg_resources - -from google.api_core import client_options as client_options_lib # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.api_core import gapic_v1 # type: ignore -from google.api_core import retry as retries # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport import mtls # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -from google.auth.exceptions import MutualTLSChannelError # type: ignore -from google.oauth2 import service_account # type: ignore - -from google.cloud.video.transcoder_v1.services.transcoder_service import pagers -from google.cloud.video.transcoder_v1.types import resources -from google.cloud.video.transcoder_v1.types import services -from google.protobuf import timestamp_pb2 # type: ignore -from google.rpc import status_pb2 # type: ignore -from .transports.base import TranscoderServiceTransport, DEFAULT_CLIENT_INFO -from .transports.grpc import TranscoderServiceGrpcTransport -from .transports.grpc_asyncio import TranscoderServiceGrpcAsyncIOTransport - - -class TranscoderServiceClientMeta(type): - """Metaclass for the TranscoderService client. - - This provides class-level methods for building and retrieving - support objects (e.g. transport) without polluting the client instance - objects. - """ - _transport_registry = OrderedDict() # type: Dict[str, Type[TranscoderServiceTransport]] - _transport_registry["grpc"] = TranscoderServiceGrpcTransport - _transport_registry["grpc_asyncio"] = TranscoderServiceGrpcAsyncIOTransport - - def get_transport_class(cls, - label: str = None, - ) -> Type[TranscoderServiceTransport]: - """Returns an appropriate transport class. - - Args: - label: The name of the desired transport. If none is - provided, then the first transport in the registry is used. - - Returns: - The transport class to use. - """ - # If a specific transport is requested, return that one. - if label: - return cls._transport_registry[label] - - # No transport is requested; return the default (that is, the first one - # in the dictionary). - return next(iter(cls._transport_registry.values())) - - -class TranscoderServiceClient(metaclass=TranscoderServiceClientMeta): - """Using the Transcoder API, you can queue asynchronous jobs for - transcoding media into various output formats. Output formats - may include different streaming standards such as HTTP Live - Streaming (HLS) and Dynamic Adaptive Streaming over HTTP (DASH). - You can also customize jobs using advanced features such as - Digital Rights Management (DRM), audio equalization, content - concatenation, and digital ad-stitch ready content generation. - """ - - @staticmethod - def _get_default_mtls_endpoint(api_endpoint): - """Converts api endpoint to mTLS endpoint. - - Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to - "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. - Args: - api_endpoint (Optional[str]): the api endpoint to convert. - Returns: - str: converted mTLS api endpoint. - """ - if not api_endpoint: - return api_endpoint - - mtls_endpoint_re = re.compile( - r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" - ) - - m = mtls_endpoint_re.match(api_endpoint) - name, mtls, sandbox, googledomain = m.groups() - if mtls or not googledomain: - return api_endpoint - - if sandbox: - return api_endpoint.replace( - "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" - ) - - return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") - - DEFAULT_ENDPOINT = "transcoder.googleapis.com" - DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore - DEFAULT_ENDPOINT - ) - - @classmethod - def from_service_account_info(cls, info: dict, *args, **kwargs): - """Creates an instance of this client using the provided credentials - info. - - Args: - info (dict): The service account private key info. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - TranscoderServiceClient: The constructed client. - """ - credentials = service_account.Credentials.from_service_account_info(info) - kwargs["credentials"] = credentials - return cls(*args, **kwargs) - - @classmethod - def from_service_account_file(cls, filename: str, *args, **kwargs): - """Creates an instance of this client using the provided credentials - file. - - Args: - filename (str): The path to the service account private key json - file. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - TranscoderServiceClient: The constructed client. - """ - credentials = service_account.Credentials.from_service_account_file( - filename) - kwargs["credentials"] = credentials - return cls(*args, **kwargs) - - from_service_account_json = from_service_account_file - - @property - def transport(self) -> TranscoderServiceTransport: - """Returns the transport used by the client instance. - - Returns: - TranscoderServiceTransport: The transport used by the client - instance. - """ - return self._transport - - @staticmethod - def job_path(project: str,location: str,job: str,) -> str: - """Returns a fully-qualified job string.""" - return "projects/{project}/locations/{location}/jobs/{job}".format(project=project, location=location, job=job, ) - - @staticmethod - def parse_job_path(path: str) -> Dict[str,str]: - """Parses a job path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/jobs/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def job_template_path(project: str,location: str,job_template: str,) -> str: - """Returns a fully-qualified job_template string.""" - return "projects/{project}/locations/{location}/jobTemplates/{job_template}".format(project=project, location=location, job_template=job_template, ) - - @staticmethod - def parse_job_template_path(path: str) -> Dict[str,str]: - """Parses a job_template path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/jobTemplates/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_billing_account_path(billing_account: str, ) -> str: - """Returns a fully-qualified billing_account string.""" - return "billingAccounts/{billing_account}".format(billing_account=billing_account, ) - - @staticmethod - def parse_common_billing_account_path(path: str) -> Dict[str,str]: - """Parse a billing_account path into its component segments.""" - m = re.match(r"^billingAccounts/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_folder_path(folder: str, ) -> str: - """Returns a fully-qualified folder string.""" - return "folders/{folder}".format(folder=folder, ) - - @staticmethod - def parse_common_folder_path(path: str) -> Dict[str,str]: - """Parse a folder path into its component segments.""" - m = re.match(r"^folders/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_organization_path(organization: str, ) -> str: - """Returns a fully-qualified organization string.""" - return "organizations/{organization}".format(organization=organization, ) - - @staticmethod - def parse_common_organization_path(path: str) -> Dict[str,str]: - """Parse a organization path into its component segments.""" - m = re.match(r"^organizations/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_project_path(project: str, ) -> str: - """Returns a fully-qualified project string.""" - return "projects/{project}".format(project=project, ) - - @staticmethod - def parse_common_project_path(path: str) -> Dict[str,str]: - """Parse a project path into its component segments.""" - m = re.match(r"^projects/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_location_path(project: str, location: str, ) -> str: - """Returns a fully-qualified location string.""" - return "projects/{project}/locations/{location}".format(project=project, location=location, ) - - @staticmethod - def parse_common_location_path(path: str) -> Dict[str,str]: - """Parse a location path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) - return m.groupdict() if m else {} - - def __init__(self, *, - credentials: Optional[ga_credentials.Credentials] = None, - transport: Union[str, TranscoderServiceTransport, None] = None, - client_options: Optional[client_options_lib.ClientOptions] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - ) -> None: - """Instantiates the transcoder service client. - - Args: - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - transport (Union[str, TranscoderServiceTransport]): The - transport to use. If set to None, a transport is chosen - automatically. - client_options (google.api_core.client_options.ClientOptions): Custom options for the - client. It won't take effect if a ``transport`` instance is provided. - (1) The ``api_endpoint`` property can be used to override the - default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT - environment variable can also be used to override the endpoint: - "always" (always use the default mTLS endpoint), "never" (always - use the default regular endpoint) and "auto" (auto switch to the - default mTLS endpoint if client certificate is present, this is - the default value). However, the ``api_endpoint`` property takes - precedence if provided. - (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable - is "true", then the ``client_cert_source`` property can be used - to provide client certificate for mutual TLS transport. If - not provided, the default SSL client certificate will be used if - present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not - set, no client certificate will be used. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport - creation failed for any reason. - """ - if isinstance(client_options, dict): - client_options = client_options_lib.from_dict(client_options) - if client_options is None: - client_options = client_options_lib.ClientOptions() - - # Create SSL credentials for mutual TLS if needed. - use_client_cert = bool(util.strtobool(os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false"))) - - client_cert_source_func = None - is_mtls = False - if use_client_cert: - if client_options.client_cert_source: - is_mtls = True - client_cert_source_func = client_options.client_cert_source - else: - is_mtls = mtls.has_default_client_cert_source() - if is_mtls: - client_cert_source_func = mtls.default_client_cert_source() - else: - client_cert_source_func = None - - # Figure out which api endpoint to use. - if client_options.api_endpoint is not None: - api_endpoint = client_options.api_endpoint - else: - use_mtls_env = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") - if use_mtls_env == "never": - api_endpoint = self.DEFAULT_ENDPOINT - elif use_mtls_env == "always": - api_endpoint = self.DEFAULT_MTLS_ENDPOINT - elif use_mtls_env == "auto": - if is_mtls: - api_endpoint = self.DEFAULT_MTLS_ENDPOINT - else: - api_endpoint = self.DEFAULT_ENDPOINT - else: - raise MutualTLSChannelError( - "Unsupported GOOGLE_API_USE_MTLS_ENDPOINT value. Accepted " - "values: never, auto, always" - ) - - # Save or instantiate the transport. - # Ordinarily, we provide the transport, but allowing a custom transport - # instance provides an extensibility point for unusual situations. - if isinstance(transport, TranscoderServiceTransport): - # transport is a TranscoderServiceTransport instance. - if credentials or client_options.credentials_file: - raise ValueError("When providing a transport instance, " - "provide its credentials directly.") - if client_options.scopes: - raise ValueError( - "When providing a transport instance, provide its scopes " - "directly." - ) - self._transport = transport - else: - Transport = type(self).get_transport_class(transport) - self._transport = Transport( - credentials=credentials, - credentials_file=client_options.credentials_file, - host=api_endpoint, - scopes=client_options.scopes, - client_cert_source_for_mtls=client_cert_source_func, - quota_project_id=client_options.quota_project_id, - client_info=client_info, - always_use_jwt_access=( - Transport == type(self).get_transport_class("grpc") - or Transport == type(self).get_transport_class("grpc_asyncio") - ), - ) - - def create_job(self, - request: services.CreateJobRequest = None, - *, - parent: str = None, - job: resources.Job = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> resources.Job: - r"""Creates a job in the specified region. - - Args: - request (google.cloud.video.transcoder_v1.types.CreateJobRequest): - The request object. Request message for - `TranscoderService.CreateJob`. - parent (str): - Required. The parent location to create and process this - job. Format: ``projects/{project}/locations/{location}`` - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - job (google.cloud.video.transcoder_v1.types.Job): - Required. Parameters for creating - transcoding job. - - This corresponds to the ``job`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.video.transcoder_v1.types.Job: - Transcoding job resource. - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, job]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a services.CreateJobRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, services.CreateJobRequest): - request = services.CreateJobRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if job is not None: - request.job = job - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.create_job] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def list_jobs(self, - request: services.ListJobsRequest = None, - *, - parent: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListJobsPager: - r"""Lists jobs in the specified region. - - Args: - request (google.cloud.video.transcoder_v1.types.ListJobsRequest): - The request object. Request message for - `TranscoderService.ListJobs`. The parent location from - which to retrieve the collection of jobs. - parent (str): - Required. Format: - ``projects/{project}/locations/{location}`` - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.video.transcoder_v1.services.transcoder_service.pagers.ListJobsPager: - Response message for TranscoderService.ListJobs. - - Iterating over this object will yield results and - resolve additional pages automatically. - - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a services.ListJobsRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, services.ListJobsRequest): - request = services.ListJobsRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_jobs] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__iter__` convenience method. - response = pagers.ListJobsPager( - method=rpc, - request=request, - response=response, - metadata=metadata, - ) - - # Done; return the response. - return response - - def get_job(self, - request: services.GetJobRequest = None, - *, - name: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> resources.Job: - r"""Returns the job data. - - Args: - request (google.cloud.video.transcoder_v1.types.GetJobRequest): - The request object. Request message for - `TranscoderService.GetJob`. - name (str): - Required. The name of the job to retrieve. Format: - ``projects/{project}/locations/{location}/jobs/{job}`` - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.video.transcoder_v1.types.Job: - Transcoding job resource. - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a services.GetJobRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, services.GetJobRequest): - request = services.GetJobRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_job] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def delete_job(self, - request: services.DeleteJobRequest = None, - *, - name: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Deletes a job. - - Args: - request (google.cloud.video.transcoder_v1.types.DeleteJobRequest): - The request object. Request message for - `TranscoderService.DeleteJob`. - name (str): - Required. The name of the job to delete. Format: - ``projects/{project}/locations/{location}/jobs/{job}`` - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a services.DeleteJobRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, services.DeleteJobRequest): - request = services.DeleteJobRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.delete_job] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - def create_job_template(self, - request: services.CreateJobTemplateRequest = None, - *, - parent: str = None, - job_template: resources.JobTemplate = None, - job_template_id: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> resources.JobTemplate: - r"""Creates a job template in the specified region. - - Args: - request (google.cloud.video.transcoder_v1.types.CreateJobTemplateRequest): - The request object. Request message for - `TranscoderService.CreateJobTemplate`. - parent (str): - Required. The parent location to create this job - template. Format: - ``projects/{project}/locations/{location}`` - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - job_template (google.cloud.video.transcoder_v1.types.JobTemplate): - Required. Parameters for creating job - template. - - This corresponds to the ``job_template`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - job_template_id (str): - Required. The ID to use for the job template, which will - become the final component of the job template's - resource name. - - This value should be 4-63 characters, and valid - characters must match the regular expression - ``[a-zA-Z][a-zA-Z0-9_-]*``. - - This corresponds to the ``job_template_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.video.transcoder_v1.types.JobTemplate: - Transcoding job template resource. - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, job_template, job_template_id]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a services.CreateJobTemplateRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, services.CreateJobTemplateRequest): - request = services.CreateJobTemplateRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if job_template is not None: - request.job_template = job_template - if job_template_id is not None: - request.job_template_id = job_template_id - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.create_job_template] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def list_job_templates(self, - request: services.ListJobTemplatesRequest = None, - *, - parent: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListJobTemplatesPager: - r"""Lists job templates in the specified region. - - Args: - request (google.cloud.video.transcoder_v1.types.ListJobTemplatesRequest): - The request object. Request message for - `TranscoderService.ListJobTemplates`. - parent (str): - Required. The parent location from which to retrieve the - collection of job templates. Format: - ``projects/{project}/locations/{location}`` - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.video.transcoder_v1.services.transcoder_service.pagers.ListJobTemplatesPager: - Response message for TranscoderService.ListJobTemplates. - - Iterating over this object will yield results and - resolve additional pages automatically. - - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a services.ListJobTemplatesRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, services.ListJobTemplatesRequest): - request = services.ListJobTemplatesRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_job_templates] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__iter__` convenience method. - response = pagers.ListJobTemplatesPager( - method=rpc, - request=request, - response=response, - metadata=metadata, - ) - - # Done; return the response. - return response - - def get_job_template(self, - request: services.GetJobTemplateRequest = None, - *, - name: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> resources.JobTemplate: - r"""Returns the job template data. - - Args: - request (google.cloud.video.transcoder_v1.types.GetJobTemplateRequest): - The request object. Request message for - `TranscoderService.GetJobTemplate`. - name (str): - Required. The name of the job template to retrieve. - Format: - ``projects/{project}/locations/{location}/jobTemplates/{job_template}`` - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.video.transcoder_v1.types.JobTemplate: - Transcoding job template resource. - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a services.GetJobTemplateRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, services.GetJobTemplateRequest): - request = services.GetJobTemplateRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_job_template] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def delete_job_template(self, - request: services.DeleteJobTemplateRequest = None, - *, - name: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Deletes a job template. - - Args: - request (google.cloud.video.transcoder_v1.types.DeleteJobTemplateRequest): - The request object. Request message for - `TranscoderService.DeleteJobTemplate`. - name (str): - Required. The name of the job template to delete. - ``projects/{project}/locations/{location}/jobTemplates/{job_template}`` - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a services.DeleteJobTemplateRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, services.DeleteJobTemplateRequest): - request = services.DeleteJobTemplateRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.delete_job_template] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - - - - -try: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=pkg_resources.get_distribution( - "google-cloud-video-transcoder", - ).version, - ) -except pkg_resources.DistributionNotFound: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() - - -__all__ = ( - "TranscoderServiceClient", -) diff --git a/owl-bot-staging/v1/google/cloud/video/transcoder_v1/services/transcoder_service/pagers.py b/owl-bot-staging/v1/google/cloud/video/transcoder_v1/services/transcoder_service/pagers.py deleted file mode 100644 index 0987d76..0000000 --- a/owl-bot-staging/v1/google/cloud/video/transcoder_v1/services/transcoder_service/pagers.py +++ /dev/null @@ -1,263 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from typing import Any, AsyncIterable, Awaitable, Callable, Iterable, Sequence, Tuple, Optional - -from google.cloud.video.transcoder_v1.types import resources -from google.cloud.video.transcoder_v1.types import services - - -class ListJobsPager: - """A pager for iterating through ``list_jobs`` requests. - - This class thinly wraps an initial - :class:`google.cloud.video.transcoder_v1.types.ListJobsResponse` object, and - provides an ``__iter__`` method to iterate through its - ``jobs`` field. - - If there are more pages, the ``__iter__`` method will make additional - ``ListJobs`` requests and continue to iterate - through the ``jobs`` field on the - corresponding responses. - - All the usual :class:`google.cloud.video.transcoder_v1.types.ListJobsResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., services.ListJobsResponse], - request: services.ListJobsRequest, - response: services.ListJobsResponse, - *, - metadata: Sequence[Tuple[str, str]] = ()): - """Instantiate the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.video.transcoder_v1.types.ListJobsRequest): - The initial request object. - response (google.cloud.video.transcoder_v1.types.ListJobsResponse): - The initial response object. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - self._method = method - self._request = services.ListJobsRequest(request) - self._response = response - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - def pages(self) -> Iterable[services.ListJobsResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, metadata=self._metadata) - yield self._response - - def __iter__(self) -> Iterable[resources.Job]: - for page in self.pages: - yield from page.jobs - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListJobsAsyncPager: - """A pager for iterating through ``list_jobs`` requests. - - This class thinly wraps an initial - :class:`google.cloud.video.transcoder_v1.types.ListJobsResponse` object, and - provides an ``__aiter__`` method to iterate through its - ``jobs`` field. - - If there are more pages, the ``__aiter__`` method will make additional - ``ListJobs`` requests and continue to iterate - through the ``jobs`` field on the - corresponding responses. - - All the usual :class:`google.cloud.video.transcoder_v1.types.ListJobsResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., Awaitable[services.ListJobsResponse]], - request: services.ListJobsRequest, - response: services.ListJobsResponse, - *, - metadata: Sequence[Tuple[str, str]] = ()): - """Instantiates the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.video.transcoder_v1.types.ListJobsRequest): - The initial request object. - response (google.cloud.video.transcoder_v1.types.ListJobsResponse): - The initial response object. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - self._method = method - self._request = services.ListJobsRequest(request) - self._response = response - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - async def pages(self) -> AsyncIterable[services.ListJobsResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, metadata=self._metadata) - yield self._response - - def __aiter__(self) -> AsyncIterable[resources.Job]: - async def async_generator(): - async for page in self.pages: - for response in page.jobs: - yield response - - return async_generator() - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListJobTemplatesPager: - """A pager for iterating through ``list_job_templates`` requests. - - This class thinly wraps an initial - :class:`google.cloud.video.transcoder_v1.types.ListJobTemplatesResponse` object, and - provides an ``__iter__`` method to iterate through its - ``job_templates`` field. - - If there are more pages, the ``__iter__`` method will make additional - ``ListJobTemplates`` requests and continue to iterate - through the ``job_templates`` field on the - corresponding responses. - - All the usual :class:`google.cloud.video.transcoder_v1.types.ListJobTemplatesResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., services.ListJobTemplatesResponse], - request: services.ListJobTemplatesRequest, - response: services.ListJobTemplatesResponse, - *, - metadata: Sequence[Tuple[str, str]] = ()): - """Instantiate the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.video.transcoder_v1.types.ListJobTemplatesRequest): - The initial request object. - response (google.cloud.video.transcoder_v1.types.ListJobTemplatesResponse): - The initial response object. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - self._method = method - self._request = services.ListJobTemplatesRequest(request) - self._response = response - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - def pages(self) -> Iterable[services.ListJobTemplatesResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, metadata=self._metadata) - yield self._response - - def __iter__(self) -> Iterable[resources.JobTemplate]: - for page in self.pages: - yield from page.job_templates - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListJobTemplatesAsyncPager: - """A pager for iterating through ``list_job_templates`` requests. - - This class thinly wraps an initial - :class:`google.cloud.video.transcoder_v1.types.ListJobTemplatesResponse` object, and - provides an ``__aiter__`` method to iterate through its - ``job_templates`` field. - - If there are more pages, the ``__aiter__`` method will make additional - ``ListJobTemplates`` requests and continue to iterate - through the ``job_templates`` field on the - corresponding responses. - - All the usual :class:`google.cloud.video.transcoder_v1.types.ListJobTemplatesResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., Awaitable[services.ListJobTemplatesResponse]], - request: services.ListJobTemplatesRequest, - response: services.ListJobTemplatesResponse, - *, - metadata: Sequence[Tuple[str, str]] = ()): - """Instantiates the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.video.transcoder_v1.types.ListJobTemplatesRequest): - The initial request object. - response (google.cloud.video.transcoder_v1.types.ListJobTemplatesResponse): - The initial response object. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - self._method = method - self._request = services.ListJobTemplatesRequest(request) - self._response = response - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - async def pages(self) -> AsyncIterable[services.ListJobTemplatesResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, metadata=self._metadata) - yield self._response - - def __aiter__(self) -> AsyncIterable[resources.JobTemplate]: - async def async_generator(): - async for page in self.pages: - for response in page.job_templates: - yield response - - return async_generator() - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) diff --git a/owl-bot-staging/v1/google/cloud/video/transcoder_v1/services/transcoder_service/transports/__init__.py b/owl-bot-staging/v1/google/cloud/video/transcoder_v1/services/transcoder_service/transports/__init__.py deleted file mode 100644 index 5ed2b9a..0000000 --- a/owl-bot-staging/v1/google/cloud/video/transcoder_v1/services/transcoder_service/transports/__init__.py +++ /dev/null @@ -1,33 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from collections import OrderedDict -from typing import Dict, Type - -from .base import TranscoderServiceTransport -from .grpc import TranscoderServiceGrpcTransport -from .grpc_asyncio import TranscoderServiceGrpcAsyncIOTransport - - -# Compile a registry of transports. -_transport_registry = OrderedDict() # type: Dict[str, Type[TranscoderServiceTransport]] -_transport_registry['grpc'] = TranscoderServiceGrpcTransport -_transport_registry['grpc_asyncio'] = TranscoderServiceGrpcAsyncIOTransport - -__all__ = ( - 'TranscoderServiceTransport', - 'TranscoderServiceGrpcTransport', - 'TranscoderServiceGrpcAsyncIOTransport', -) diff --git a/owl-bot-staging/v1/google/cloud/video/transcoder_v1/services/transcoder_service/transports/base.py b/owl-bot-staging/v1/google/cloud/video/transcoder_v1/services/transcoder_service/transports/base.py deleted file mode 100644 index 68e3d92..0000000 --- a/owl-bot-staging/v1/google/cloud/video/transcoder_v1/services/transcoder_service/transports/base.py +++ /dev/null @@ -1,268 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import abc -from typing import Awaitable, Callable, Dict, Optional, Sequence, Union -import packaging.version -import pkg_resources - -import google.auth # type: ignore -import google.api_core # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.api_core import gapic_v1 # type: ignore -from google.api_core import retry as retries # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.oauth2 import service_account # type: ignore - -from google.cloud.video.transcoder_v1.types import resources -from google.cloud.video.transcoder_v1.types import services -from google.protobuf import empty_pb2 # type: ignore - -try: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=pkg_resources.get_distribution( - 'google-cloud-video-transcoder', - ).version, - ) -except pkg_resources.DistributionNotFound: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() - -try: - # google.auth.__version__ was added in 1.26.0 - _GOOGLE_AUTH_VERSION = google.auth.__version__ -except AttributeError: - try: # try pkg_resources if it is available - _GOOGLE_AUTH_VERSION = pkg_resources.get_distribution("google-auth").version - except pkg_resources.DistributionNotFound: # pragma: NO COVER - _GOOGLE_AUTH_VERSION = None - - -class TranscoderServiceTransport(abc.ABC): - """Abstract transport class for TranscoderService.""" - - AUTH_SCOPES = ( - 'https://www.googleapis.com/auth/cloud-platform', - ) - - DEFAULT_HOST: str = 'transcoder.googleapis.com' - def __init__( - self, *, - host: str = DEFAULT_HOST, - credentials: ga_credentials.Credentials = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - **kwargs, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to. - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is mutually exclusive with credentials. - scopes (Optional[Sequence[str]]): A list of scopes. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - """ - # Save the hostname. Default to port 443 (HTTPS) if none is specified. - if ':' not in host: - host += ':443' - self._host = host - - scopes_kwargs = self._get_scopes_kwargs(self._host, scopes) - - # Save the scopes. - self._scopes = scopes - - # If no credentials are provided, then determine the appropriate - # defaults. - if credentials and credentials_file: - raise core_exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive") - - if credentials_file is not None: - credentials, _ = google.auth.load_credentials_from_file( - credentials_file, - **scopes_kwargs, - quota_project_id=quota_project_id - ) - - elif credentials is None: - credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) - - # If the credentials is service account credentials, then always try to use self signed JWT. - if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): - credentials = credentials.with_always_use_jwt_access(True) - - # Save the credentials. - self._credentials = credentials - - # TODO(busunkim): This method is in the base transport - # to avoid duplicating code across the transport classes. These functions - # should be deleted once the minimum required versions of google-auth is increased. - - # TODO: Remove this function once google-auth >= 1.25.0 is required - @classmethod - def _get_scopes_kwargs(cls, host: str, scopes: Optional[Sequence[str]]) -> Dict[str, Optional[Sequence[str]]]: - """Returns scopes kwargs to pass to google-auth methods depending on the google-auth version""" - - scopes_kwargs = {} - - if _GOOGLE_AUTH_VERSION and ( - packaging.version.parse(_GOOGLE_AUTH_VERSION) - >= packaging.version.parse("1.25.0") - ): - scopes_kwargs = {"scopes": scopes, "default_scopes": cls.AUTH_SCOPES} - else: - scopes_kwargs = {"scopes": scopes or cls.AUTH_SCOPES} - - return scopes_kwargs - - def _prep_wrapped_messages(self, client_info): - # Precompute the wrapped methods. - self._wrapped_methods = { - self.create_job: gapic_v1.method.wrap_method( - self.create_job, - default_timeout=60.0, - client_info=client_info, - ), - self.list_jobs: gapic_v1.method.wrap_method( - self.list_jobs, - default_timeout=60.0, - client_info=client_info, - ), - self.get_job: gapic_v1.method.wrap_method( - self.get_job, - default_timeout=60.0, - client_info=client_info, - ), - self.delete_job: gapic_v1.method.wrap_method( - self.delete_job, - default_timeout=60.0, - client_info=client_info, - ), - self.create_job_template: gapic_v1.method.wrap_method( - self.create_job_template, - default_timeout=60.0, - client_info=client_info, - ), - self.list_job_templates: gapic_v1.method.wrap_method( - self.list_job_templates, - default_timeout=60.0, - client_info=client_info, - ), - self.get_job_template: gapic_v1.method.wrap_method( - self.get_job_template, - default_timeout=60.0, - client_info=client_info, - ), - self.delete_job_template: gapic_v1.method.wrap_method( - self.delete_job_template, - default_timeout=60.0, - client_info=client_info, - ), - } - - @property - def create_job(self) -> Callable[ - [services.CreateJobRequest], - Union[ - resources.Job, - Awaitable[resources.Job] - ]]: - raise NotImplementedError() - - @property - def list_jobs(self) -> Callable[ - [services.ListJobsRequest], - Union[ - services.ListJobsResponse, - Awaitable[services.ListJobsResponse] - ]]: - raise NotImplementedError() - - @property - def get_job(self) -> Callable[ - [services.GetJobRequest], - Union[ - resources.Job, - Awaitable[resources.Job] - ]]: - raise NotImplementedError() - - @property - def delete_job(self) -> Callable[ - [services.DeleteJobRequest], - Union[ - empty_pb2.Empty, - Awaitable[empty_pb2.Empty] - ]]: - raise NotImplementedError() - - @property - def create_job_template(self) -> Callable[ - [services.CreateJobTemplateRequest], - Union[ - resources.JobTemplate, - Awaitable[resources.JobTemplate] - ]]: - raise NotImplementedError() - - @property - def list_job_templates(self) -> Callable[ - [services.ListJobTemplatesRequest], - Union[ - services.ListJobTemplatesResponse, - Awaitable[services.ListJobTemplatesResponse] - ]]: - raise NotImplementedError() - - @property - def get_job_template(self) -> Callable[ - [services.GetJobTemplateRequest], - Union[ - resources.JobTemplate, - Awaitable[resources.JobTemplate] - ]]: - raise NotImplementedError() - - @property - def delete_job_template(self) -> Callable[ - [services.DeleteJobTemplateRequest], - Union[ - empty_pb2.Empty, - Awaitable[empty_pb2.Empty] - ]]: - raise NotImplementedError() - - -__all__ = ( - 'TranscoderServiceTransport', -) diff --git a/owl-bot-staging/v1/google/cloud/video/transcoder_v1/services/transcoder_service/transports/grpc.py b/owl-bot-staging/v1/google/cloud/video/transcoder_v1/services/transcoder_service/transports/grpc.py deleted file mode 100644 index a40fae1..0000000 --- a/owl-bot-staging/v1/google/cloud/video/transcoder_v1/services/transcoder_service/transports/grpc.py +++ /dev/null @@ -1,442 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import warnings -from typing import Callable, Dict, Optional, Sequence, Tuple, Union - -from google.api_core import grpc_helpers # type: ignore -from google.api_core import gapic_v1 # type: ignore -import google.auth # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore - -import grpc # type: ignore - -from google.cloud.video.transcoder_v1.types import resources -from google.cloud.video.transcoder_v1.types import services -from google.protobuf import empty_pb2 # type: ignore -from .base import TranscoderServiceTransport, DEFAULT_CLIENT_INFO - - -class TranscoderServiceGrpcTransport(TranscoderServiceTransport): - """gRPC backend transport for TranscoderService. - - Using the Transcoder API, you can queue asynchronous jobs for - transcoding media into various output formats. Output formats - may include different streaming standards such as HTTP Live - Streaming (HLS) and Dynamic Adaptive Streaming over HTTP (DASH). - You can also customize jobs using advanced features such as - Digital Rights Management (DRM), audio equalization, content - concatenation, and digital ad-stitch ready content generation. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends protocol buffers over the wire using gRPC (which is built on - top of HTTP/2); the ``grpcio`` package must be installed. - """ - _stubs: Dict[str, Callable] - - def __init__(self, *, - host: str = 'transcoder.googleapis.com', - credentials: ga_credentials.Credentials = None, - credentials_file: str = None, - scopes: Sequence[str] = None, - channel: grpc.Channel = None, - api_mtls_endpoint: str = None, - client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, - ssl_channel_credentials: grpc.ChannelCredentials = None, - client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to. - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - This argument is ignored if ``channel`` is provided. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. - scopes (Optional(Sequence[str])): A list of scopes. This argument is - ignored if ``channel`` is provided. - channel (Optional[grpc.Channel]): A ``Channel`` instance through - which to make calls. - api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. - If provided, it overrides the ``host`` argument and tries to create - a mutual TLS channel with client SSL credentials from - ``client_cert_source`` or applicatin default SSL credentials. - client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): - Deprecated. A callback to provide client SSL certificate bytes and - private key bytes, both in PEM format. It is ignored if - ``api_mtls_endpoint`` is None. - ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for grpc channel. It is ignored if ``channel`` is provided. - client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): - A callback to provide client certificate bytes and private key bytes, - both in PEM format. It is used to configure mutual TLS channel. It is - ignored if ``channel`` or ``ssl_channel_credentials`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport - creation failed for any reason. - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - self._grpc_channel = None - self._ssl_channel_credentials = ssl_channel_credentials - self._stubs: Dict[str, Callable] = {} - - if api_mtls_endpoint: - warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) - if client_cert_source: - warnings.warn("client_cert_source is deprecated", DeprecationWarning) - - if channel: - # Ignore credentials if a channel was passed. - credentials = False - # If a channel was explicitly provided, set it. - self._grpc_channel = channel - self._ssl_channel_credentials = None - - else: - if api_mtls_endpoint: - host = api_mtls_endpoint - - # Create SSL credentials with client_cert_source or application - # default SSL credentials. - if client_cert_source: - cert, key = client_cert_source() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - else: - self._ssl_channel_credentials = SslCredentials().ssl_credentials - - else: - if client_cert_source_for_mtls and not ssl_channel_credentials: - cert, key = client_cert_source_for_mtls() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - - # The base transport sets the host, credentials and scopes - super().__init__( - host=host, - credentials=credentials, - credentials_file=credentials_file, - scopes=scopes, - quota_project_id=quota_project_id, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - ) - - if not self._grpc_channel: - self._grpc_channel = type(self).create_channel( - self._host, - credentials=self._credentials, - credentials_file=credentials_file, - scopes=self._scopes, - ssl_credentials=self._ssl_channel_credentials, - quota_project_id=quota_project_id, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - # Wrap messages. This must be done after self._grpc_channel exists - self._prep_wrapped_messages(client_info) - - @classmethod - def create_channel(cls, - host: str = 'transcoder.googleapis.com', - credentials: ga_credentials.Credentials = None, - credentials_file: str = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - **kwargs) -> grpc.Channel: - """Create and return a gRPC channel object. - Args: - host (Optional[str]): The host for the channel to use. - credentials (Optional[~.Credentials]): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If - none are specified, the client will attempt to ascertain - the credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is mutually exclusive with credentials. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - kwargs (Optional[dict]): Keyword arguments, which are passed to the - channel creation. - Returns: - grpc.Channel: A gRPC channel object. - - Raises: - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - - return grpc_helpers.create_channel( - host, - credentials=credentials, - credentials_file=credentials_file, - quota_project_id=quota_project_id, - default_scopes=cls.AUTH_SCOPES, - scopes=scopes, - default_host=cls.DEFAULT_HOST, - **kwargs - ) - - @property - def grpc_channel(self) -> grpc.Channel: - """Return the channel designed to connect to this service. - """ - return self._grpc_channel - - @property - def create_job(self) -> Callable[ - [services.CreateJobRequest], - resources.Job]: - r"""Return a callable for the create job method over gRPC. - - Creates a job in the specified region. - - Returns: - Callable[[~.CreateJobRequest], - ~.Job]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_job' not in self._stubs: - self._stubs['create_job'] = self.grpc_channel.unary_unary( - '/google.cloud.video.transcoder.v1.TranscoderService/CreateJob', - request_serializer=services.CreateJobRequest.serialize, - response_deserializer=resources.Job.deserialize, - ) - return self._stubs['create_job'] - - @property - def list_jobs(self) -> Callable[ - [services.ListJobsRequest], - services.ListJobsResponse]: - r"""Return a callable for the list jobs method over gRPC. - - Lists jobs in the specified region. - - Returns: - Callable[[~.ListJobsRequest], - ~.ListJobsResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_jobs' not in self._stubs: - self._stubs['list_jobs'] = self.grpc_channel.unary_unary( - '/google.cloud.video.transcoder.v1.TranscoderService/ListJobs', - request_serializer=services.ListJobsRequest.serialize, - response_deserializer=services.ListJobsResponse.deserialize, - ) - return self._stubs['list_jobs'] - - @property - def get_job(self) -> Callable[ - [services.GetJobRequest], - resources.Job]: - r"""Return a callable for the get job method over gRPC. - - Returns the job data. - - Returns: - Callable[[~.GetJobRequest], - ~.Job]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_job' not in self._stubs: - self._stubs['get_job'] = self.grpc_channel.unary_unary( - '/google.cloud.video.transcoder.v1.TranscoderService/GetJob', - request_serializer=services.GetJobRequest.serialize, - response_deserializer=resources.Job.deserialize, - ) - return self._stubs['get_job'] - - @property - def delete_job(self) -> Callable[ - [services.DeleteJobRequest], - empty_pb2.Empty]: - r"""Return a callable for the delete job method over gRPC. - - Deletes a job. - - Returns: - Callable[[~.DeleteJobRequest], - ~.Empty]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_job' not in self._stubs: - self._stubs['delete_job'] = self.grpc_channel.unary_unary( - '/google.cloud.video.transcoder.v1.TranscoderService/DeleteJob', - request_serializer=services.DeleteJobRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs['delete_job'] - - @property - def create_job_template(self) -> Callable[ - [services.CreateJobTemplateRequest], - resources.JobTemplate]: - r"""Return a callable for the create job template method over gRPC. - - Creates a job template in the specified region. - - Returns: - Callable[[~.CreateJobTemplateRequest], - ~.JobTemplate]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_job_template' not in self._stubs: - self._stubs['create_job_template'] = self.grpc_channel.unary_unary( - '/google.cloud.video.transcoder.v1.TranscoderService/CreateJobTemplate', - request_serializer=services.CreateJobTemplateRequest.serialize, - response_deserializer=resources.JobTemplate.deserialize, - ) - return self._stubs['create_job_template'] - - @property - def list_job_templates(self) -> Callable[ - [services.ListJobTemplatesRequest], - services.ListJobTemplatesResponse]: - r"""Return a callable for the list job templates method over gRPC. - - Lists job templates in the specified region. - - Returns: - Callable[[~.ListJobTemplatesRequest], - ~.ListJobTemplatesResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_job_templates' not in self._stubs: - self._stubs['list_job_templates'] = self.grpc_channel.unary_unary( - '/google.cloud.video.transcoder.v1.TranscoderService/ListJobTemplates', - request_serializer=services.ListJobTemplatesRequest.serialize, - response_deserializer=services.ListJobTemplatesResponse.deserialize, - ) - return self._stubs['list_job_templates'] - - @property - def get_job_template(self) -> Callable[ - [services.GetJobTemplateRequest], - resources.JobTemplate]: - r"""Return a callable for the get job template method over gRPC. - - Returns the job template data. - - Returns: - Callable[[~.GetJobTemplateRequest], - ~.JobTemplate]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_job_template' not in self._stubs: - self._stubs['get_job_template'] = self.grpc_channel.unary_unary( - '/google.cloud.video.transcoder.v1.TranscoderService/GetJobTemplate', - request_serializer=services.GetJobTemplateRequest.serialize, - response_deserializer=resources.JobTemplate.deserialize, - ) - return self._stubs['get_job_template'] - - @property - def delete_job_template(self) -> Callable[ - [services.DeleteJobTemplateRequest], - empty_pb2.Empty]: - r"""Return a callable for the delete job template method over gRPC. - - Deletes a job template. - - Returns: - Callable[[~.DeleteJobTemplateRequest], - ~.Empty]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_job_template' not in self._stubs: - self._stubs['delete_job_template'] = self.grpc_channel.unary_unary( - '/google.cloud.video.transcoder.v1.TranscoderService/DeleteJobTemplate', - request_serializer=services.DeleteJobTemplateRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs['delete_job_template'] - - -__all__ = ( - 'TranscoderServiceGrpcTransport', -) diff --git a/owl-bot-staging/v1/google/cloud/video/transcoder_v1/services/transcoder_service/transports/grpc_asyncio.py b/owl-bot-staging/v1/google/cloud/video/transcoder_v1/services/transcoder_service/transports/grpc_asyncio.py deleted file mode 100644 index 7eef79b..0000000 --- a/owl-bot-staging/v1/google/cloud/video/transcoder_v1/services/transcoder_service/transports/grpc_asyncio.py +++ /dev/null @@ -1,446 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import warnings -from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union - -from google.api_core import gapic_v1 # type: ignore -from google.api_core import grpc_helpers_async # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -import packaging.version - -import grpc # type: ignore -from grpc.experimental import aio # type: ignore - -from google.cloud.video.transcoder_v1.types import resources -from google.cloud.video.transcoder_v1.types import services -from google.protobuf import empty_pb2 # type: ignore -from .base import TranscoderServiceTransport, DEFAULT_CLIENT_INFO -from .grpc import TranscoderServiceGrpcTransport - - -class TranscoderServiceGrpcAsyncIOTransport(TranscoderServiceTransport): - """gRPC AsyncIO backend transport for TranscoderService. - - Using the Transcoder API, you can queue asynchronous jobs for - transcoding media into various output formats. Output formats - may include different streaming standards such as HTTP Live - Streaming (HLS) and Dynamic Adaptive Streaming over HTTP (DASH). - You can also customize jobs using advanced features such as - Digital Rights Management (DRM), audio equalization, content - concatenation, and digital ad-stitch ready content generation. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends protocol buffers over the wire using gRPC (which is built on - top of HTTP/2); the ``grpcio`` package must be installed. - """ - - _grpc_channel: aio.Channel - _stubs: Dict[str, Callable] = {} - - @classmethod - def create_channel(cls, - host: str = 'transcoder.googleapis.com', - credentials: ga_credentials.Credentials = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - **kwargs) -> aio.Channel: - """Create and return a gRPC AsyncIO channel object. - Args: - host (Optional[str]): The host for the channel to use. - credentials (Optional[~.Credentials]): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If - none are specified, the client will attempt to ascertain - the credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - kwargs (Optional[dict]): Keyword arguments, which are passed to the - channel creation. - Returns: - aio.Channel: A gRPC AsyncIO channel object. - """ - - return grpc_helpers_async.create_channel( - host, - credentials=credentials, - credentials_file=credentials_file, - quota_project_id=quota_project_id, - default_scopes=cls.AUTH_SCOPES, - scopes=scopes, - default_host=cls.DEFAULT_HOST, - **kwargs - ) - - def __init__(self, *, - host: str = 'transcoder.googleapis.com', - credentials: ga_credentials.Credentials = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - channel: aio.Channel = None, - api_mtls_endpoint: str = None, - client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, - ssl_channel_credentials: grpc.ChannelCredentials = None, - client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, - quota_project_id=None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to. - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - This argument is ignored if ``channel`` is provided. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - channel (Optional[aio.Channel]): A ``Channel`` instance through - which to make calls. - api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. - If provided, it overrides the ``host`` argument and tries to create - a mutual TLS channel with client SSL credentials from - ``client_cert_source`` or applicatin default SSL credentials. - client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): - Deprecated. A callback to provide client SSL certificate bytes and - private key bytes, both in PEM format. It is ignored if - ``api_mtls_endpoint`` is None. - ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for grpc channel. It is ignored if ``channel`` is provided. - client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): - A callback to provide client certificate bytes and private key bytes, - both in PEM format. It is used to configure mutual TLS channel. It is - ignored if ``channel`` or ``ssl_channel_credentials`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - - Raises: - google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport - creation failed for any reason. - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - self._grpc_channel = None - self._ssl_channel_credentials = ssl_channel_credentials - self._stubs: Dict[str, Callable] = {} - - if api_mtls_endpoint: - warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) - if client_cert_source: - warnings.warn("client_cert_source is deprecated", DeprecationWarning) - - if channel: - # Ignore credentials if a channel was passed. - credentials = False - # If a channel was explicitly provided, set it. - self._grpc_channel = channel - self._ssl_channel_credentials = None - else: - if api_mtls_endpoint: - host = api_mtls_endpoint - - # Create SSL credentials with client_cert_source or application - # default SSL credentials. - if client_cert_source: - cert, key = client_cert_source() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - else: - self._ssl_channel_credentials = SslCredentials().ssl_credentials - - else: - if client_cert_source_for_mtls and not ssl_channel_credentials: - cert, key = client_cert_source_for_mtls() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - - # The base transport sets the host, credentials and scopes - super().__init__( - host=host, - credentials=credentials, - credentials_file=credentials_file, - scopes=scopes, - quota_project_id=quota_project_id, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - ) - - if not self._grpc_channel: - self._grpc_channel = type(self).create_channel( - self._host, - credentials=self._credentials, - credentials_file=credentials_file, - scopes=self._scopes, - ssl_credentials=self._ssl_channel_credentials, - quota_project_id=quota_project_id, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - # Wrap messages. This must be done after self._grpc_channel exists - self._prep_wrapped_messages(client_info) - - @property - def grpc_channel(self) -> aio.Channel: - """Create the channel designed to connect to this service. - - This property caches on the instance; repeated calls return - the same channel. - """ - # Return the channel from cache. - return self._grpc_channel - - @property - def create_job(self) -> Callable[ - [services.CreateJobRequest], - Awaitable[resources.Job]]: - r"""Return a callable for the create job method over gRPC. - - Creates a job in the specified region. - - Returns: - Callable[[~.CreateJobRequest], - Awaitable[~.Job]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_job' not in self._stubs: - self._stubs['create_job'] = self.grpc_channel.unary_unary( - '/google.cloud.video.transcoder.v1.TranscoderService/CreateJob', - request_serializer=services.CreateJobRequest.serialize, - response_deserializer=resources.Job.deserialize, - ) - return self._stubs['create_job'] - - @property - def list_jobs(self) -> Callable[ - [services.ListJobsRequest], - Awaitable[services.ListJobsResponse]]: - r"""Return a callable for the list jobs method over gRPC. - - Lists jobs in the specified region. - - Returns: - Callable[[~.ListJobsRequest], - Awaitable[~.ListJobsResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_jobs' not in self._stubs: - self._stubs['list_jobs'] = self.grpc_channel.unary_unary( - '/google.cloud.video.transcoder.v1.TranscoderService/ListJobs', - request_serializer=services.ListJobsRequest.serialize, - response_deserializer=services.ListJobsResponse.deserialize, - ) - return self._stubs['list_jobs'] - - @property - def get_job(self) -> Callable[ - [services.GetJobRequest], - Awaitable[resources.Job]]: - r"""Return a callable for the get job method over gRPC. - - Returns the job data. - - Returns: - Callable[[~.GetJobRequest], - Awaitable[~.Job]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_job' not in self._stubs: - self._stubs['get_job'] = self.grpc_channel.unary_unary( - '/google.cloud.video.transcoder.v1.TranscoderService/GetJob', - request_serializer=services.GetJobRequest.serialize, - response_deserializer=resources.Job.deserialize, - ) - return self._stubs['get_job'] - - @property - def delete_job(self) -> Callable[ - [services.DeleteJobRequest], - Awaitable[empty_pb2.Empty]]: - r"""Return a callable for the delete job method over gRPC. - - Deletes a job. - - Returns: - Callable[[~.DeleteJobRequest], - Awaitable[~.Empty]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_job' not in self._stubs: - self._stubs['delete_job'] = self.grpc_channel.unary_unary( - '/google.cloud.video.transcoder.v1.TranscoderService/DeleteJob', - request_serializer=services.DeleteJobRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs['delete_job'] - - @property - def create_job_template(self) -> Callable[ - [services.CreateJobTemplateRequest], - Awaitable[resources.JobTemplate]]: - r"""Return a callable for the create job template method over gRPC. - - Creates a job template in the specified region. - - Returns: - Callable[[~.CreateJobTemplateRequest], - Awaitable[~.JobTemplate]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_job_template' not in self._stubs: - self._stubs['create_job_template'] = self.grpc_channel.unary_unary( - '/google.cloud.video.transcoder.v1.TranscoderService/CreateJobTemplate', - request_serializer=services.CreateJobTemplateRequest.serialize, - response_deserializer=resources.JobTemplate.deserialize, - ) - return self._stubs['create_job_template'] - - @property - def list_job_templates(self) -> Callable[ - [services.ListJobTemplatesRequest], - Awaitable[services.ListJobTemplatesResponse]]: - r"""Return a callable for the list job templates method over gRPC. - - Lists job templates in the specified region. - - Returns: - Callable[[~.ListJobTemplatesRequest], - Awaitable[~.ListJobTemplatesResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_job_templates' not in self._stubs: - self._stubs['list_job_templates'] = self.grpc_channel.unary_unary( - '/google.cloud.video.transcoder.v1.TranscoderService/ListJobTemplates', - request_serializer=services.ListJobTemplatesRequest.serialize, - response_deserializer=services.ListJobTemplatesResponse.deserialize, - ) - return self._stubs['list_job_templates'] - - @property - def get_job_template(self) -> Callable[ - [services.GetJobTemplateRequest], - Awaitable[resources.JobTemplate]]: - r"""Return a callable for the get job template method over gRPC. - - Returns the job template data. - - Returns: - Callable[[~.GetJobTemplateRequest], - Awaitable[~.JobTemplate]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_job_template' not in self._stubs: - self._stubs['get_job_template'] = self.grpc_channel.unary_unary( - '/google.cloud.video.transcoder.v1.TranscoderService/GetJobTemplate', - request_serializer=services.GetJobTemplateRequest.serialize, - response_deserializer=resources.JobTemplate.deserialize, - ) - return self._stubs['get_job_template'] - - @property - def delete_job_template(self) -> Callable[ - [services.DeleteJobTemplateRequest], - Awaitable[empty_pb2.Empty]]: - r"""Return a callable for the delete job template method over gRPC. - - Deletes a job template. - - Returns: - Callable[[~.DeleteJobTemplateRequest], - Awaitable[~.Empty]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_job_template' not in self._stubs: - self._stubs['delete_job_template'] = self.grpc_channel.unary_unary( - '/google.cloud.video.transcoder.v1.TranscoderService/DeleteJobTemplate', - request_serializer=services.DeleteJobTemplateRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs['delete_job_template'] - - -__all__ = ( - 'TranscoderServiceGrpcAsyncIOTransport', -) diff --git a/owl-bot-staging/v1/google/cloud/video/transcoder_v1/types/__init__.py b/owl-bot-staging/v1/google/cloud/video/transcoder_v1/types/__init__.py deleted file mode 100644 index 51231fa..0000000 --- a/owl-bot-staging/v1/google/cloud/video/transcoder_v1/types/__init__.py +++ /dev/null @@ -1,80 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from .resources import ( - AdBreak, - AudioStream, - EditAtom, - ElementaryStream, - Encryption, - Input, - Job, - JobConfig, - JobTemplate, - Manifest, - MuxStream, - Output, - Overlay, - PreprocessingConfig, - PubsubDestination, - SegmentSettings, - SpriteSheet, - TextStream, - VideoStream, -) -from .services import ( - CreateJobRequest, - CreateJobTemplateRequest, - DeleteJobRequest, - DeleteJobTemplateRequest, - GetJobRequest, - GetJobTemplateRequest, - ListJobsRequest, - ListJobsResponse, - ListJobTemplatesRequest, - ListJobTemplatesResponse, -) - -__all__ = ( - 'AdBreak', - 'AudioStream', - 'EditAtom', - 'ElementaryStream', - 'Encryption', - 'Input', - 'Job', - 'JobConfig', - 'JobTemplate', - 'Manifest', - 'MuxStream', - 'Output', - 'Overlay', - 'PreprocessingConfig', - 'PubsubDestination', - 'SegmentSettings', - 'SpriteSheet', - 'TextStream', - 'VideoStream', - 'CreateJobRequest', - 'CreateJobTemplateRequest', - 'DeleteJobRequest', - 'DeleteJobTemplateRequest', - 'GetJobRequest', - 'GetJobTemplateRequest', - 'ListJobsRequest', - 'ListJobsResponse', - 'ListJobTemplatesRequest', - 'ListJobTemplatesResponse', -) diff --git a/owl-bot-staging/v1/google/cloud/video/transcoder_v1/types/resources.py b/owl-bot-staging/v1/google/cloud/video/transcoder_v1/types/resources.py deleted file mode 100644 index a3130a6..0000000 --- a/owl-bot-staging/v1/google/cloud/video/transcoder_v1/types/resources.py +++ /dev/null @@ -1,1942 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import proto # type: ignore - -from google.protobuf import duration_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore -from google.rpc import status_pb2 # type: ignore - - -__protobuf__ = proto.module( - package='google.cloud.video.transcoder.v1', - manifest={ - 'Job', - 'JobTemplate', - 'JobConfig', - 'Input', - 'Output', - 'EditAtom', - 'AdBreak', - 'ElementaryStream', - 'MuxStream', - 'Manifest', - 'PubsubDestination', - 'SpriteSheet', - 'Overlay', - 'PreprocessingConfig', - 'VideoStream', - 'AudioStream', - 'TextStream', - 'SegmentSettings', - 'Encryption', - }, -) - - -class Job(proto.Message): - r"""Transcoding job resource. - Attributes: - name (str): - The resource name of the job. Format: - ``projects/{project}/locations/{location}/jobs/{job}`` - input_uri (str): - Input only. Specify the ``input_uri`` to populate empty - ``uri`` fields in each element of ``Job.config.inputs`` or - ``JobTemplate.config.inputs`` when using template. URI of - the media. Input files must be at least 5 seconds in - duration and stored in Cloud Storage (for example, - ``gs://bucket/inputs/file.mp4``). - output_uri (str): - Input only. Specify the ``output_uri`` to populate an empty - ``Job.config.output.uri`` or - ``JobTemplate.config.output.uri`` when using template. URI - for the output file(s). For example, - ``gs://my-bucket/outputs/``. - template_id (str): - Input only. Specify the ``template_id`` to use for - populating ``Job.config``. The default is ``preset/web-hd``. - - Preset Transcoder templates: - - - ``preset/{preset_id}`` - - - User defined JobTemplate: ``{job_template_id}`` - config (google.cloud.video.transcoder_v1.types.JobConfig): - The configuration for this job. - state (google.cloud.video.transcoder_v1.types.Job.ProcessingState): - Output only. The current state of the job. - create_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The time the job was created. - start_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The time the transcoding - started. - end_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The time the transcoding - finished. - ttl_after_completion_days (int): - Job time to live value in days, which will be - effective after job completion. Job should be - deleted automatically after the given TTL. Enter - a value between 1 and 90. The default is 30. - error (google.rpc.status_pb2.Status): - Output only. An error object that describes the reason for - the failure. This property is always present when ``state`` - is ``FAILED``. - """ - class ProcessingState(proto.Enum): - r"""The current state of the job.""" - PROCESSING_STATE_UNSPECIFIED = 0 - PENDING = 1 - RUNNING = 2 - SUCCEEDED = 3 - FAILED = 4 - - name = proto.Field( - proto.STRING, - number=1, - ) - input_uri = proto.Field( - proto.STRING, - number=2, - ) - output_uri = proto.Field( - proto.STRING, - number=3, - ) - template_id = proto.Field( - proto.STRING, - number=4, - oneof='job_config', - ) - config = proto.Field( - proto.MESSAGE, - number=5, - oneof='job_config', - message='JobConfig', - ) - state = proto.Field( - proto.ENUM, - number=8, - enum=ProcessingState, - ) - create_time = proto.Field( - proto.MESSAGE, - number=12, - message=timestamp_pb2.Timestamp, - ) - start_time = proto.Field( - proto.MESSAGE, - number=13, - message=timestamp_pb2.Timestamp, - ) - end_time = proto.Field( - proto.MESSAGE, - number=14, - message=timestamp_pb2.Timestamp, - ) - ttl_after_completion_days = proto.Field( - proto.INT32, - number=15, - ) - error = proto.Field( - proto.MESSAGE, - number=17, - message=status_pb2.Status, - ) - - -class JobTemplate(proto.Message): - r"""Transcoding job template resource. - Attributes: - name (str): - The resource name of the job template. Format: - ``projects/{project}/locations/{location}/jobTemplates/{job_template}`` - config (google.cloud.video.transcoder_v1.types.JobConfig): - The configuration for this template. - """ - - name = proto.Field( - proto.STRING, - number=1, - ) - config = proto.Field( - proto.MESSAGE, - number=2, - message='JobConfig', - ) - - -class JobConfig(proto.Message): - r"""Job configuration - Attributes: - inputs (Sequence[google.cloud.video.transcoder_v1.types.Input]): - List of input assets stored in Cloud Storage. - edit_list (Sequence[google.cloud.video.transcoder_v1.types.EditAtom]): - List of ``Edit atom``\ s. Defines the ultimate timeline of - the resulting file or manifest. - elementary_streams (Sequence[google.cloud.video.transcoder_v1.types.ElementaryStream]): - List of elementary streams. - mux_streams (Sequence[google.cloud.video.transcoder_v1.types.MuxStream]): - List of multiplexing settings for output - streams. - manifests (Sequence[google.cloud.video.transcoder_v1.types.Manifest]): - List of output manifests. - output (google.cloud.video.transcoder_v1.types.Output): - Output configuration. - ad_breaks (Sequence[google.cloud.video.transcoder_v1.types.AdBreak]): - List of ad breaks. Specifies where to insert - ad break tags in the output manifests. - pubsub_destination (google.cloud.video.transcoder_v1.types.PubsubDestination): - Destination on Pub/Sub. - sprite_sheets (Sequence[google.cloud.video.transcoder_v1.types.SpriteSheet]): - List of output sprite sheets. - overlays (Sequence[google.cloud.video.transcoder_v1.types.Overlay]): - List of overlays on the output video, in - descending Z-order. - """ - - inputs = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='Input', - ) - edit_list = proto.RepeatedField( - proto.MESSAGE, - number=2, - message='EditAtom', - ) - elementary_streams = proto.RepeatedField( - proto.MESSAGE, - number=3, - message='ElementaryStream', - ) - mux_streams = proto.RepeatedField( - proto.MESSAGE, - number=4, - message='MuxStream', - ) - manifests = proto.RepeatedField( - proto.MESSAGE, - number=5, - message='Manifest', - ) - output = proto.Field( - proto.MESSAGE, - number=6, - message='Output', - ) - ad_breaks = proto.RepeatedField( - proto.MESSAGE, - number=7, - message='AdBreak', - ) - pubsub_destination = proto.Field( - proto.MESSAGE, - number=8, - message='PubsubDestination', - ) - sprite_sheets = proto.RepeatedField( - proto.MESSAGE, - number=9, - message='SpriteSheet', - ) - overlays = proto.RepeatedField( - proto.MESSAGE, - number=10, - message='Overlay', - ) - - -class Input(proto.Message): - r"""Input asset. - Attributes: - key (str): - A unique key for this input. Must be - specified when using advanced mapping and edit - lists. - uri (str): - URI of the media. Input files must be at least 5 seconds in - duration and stored in Cloud Storage (for example, - ``gs://bucket/inputs/file.mp4``). If empty, the value will - be populated from ``Job.input_uri``. - preprocessing_config (google.cloud.video.transcoder_v1.types.PreprocessingConfig): - Preprocessing configurations. - """ - - key = proto.Field( - proto.STRING, - number=1, - ) - uri = proto.Field( - proto.STRING, - number=2, - ) - preprocessing_config = proto.Field( - proto.MESSAGE, - number=3, - message='PreprocessingConfig', - ) - - -class Output(proto.Message): - r"""Location of output file(s) in a Cloud Storage bucket. - Attributes: - uri (str): - URI for the output file(s). For example, - ``gs://my-bucket/outputs/``. If empty the value is populated - from ``Job.output_uri``. - """ - - uri = proto.Field( - proto.STRING, - number=1, - ) - - -class EditAtom(proto.Message): - r"""Edit atom. - Attributes: - key (str): - A unique key for this atom. Must be specified - when using advanced mapping. - inputs (Sequence[str]): - List of ``Input.key``\ s identifying files that should be - used in this atom. The listed ``inputs`` must have the same - timeline. - end_time_offset (google.protobuf.duration_pb2.Duration): - End time in seconds for the atom, relative to the input file - timeline. When ``end_time_offset`` is not specified, the - ``inputs`` are used until the end of the atom. - start_time_offset (google.protobuf.duration_pb2.Duration): - Start time in seconds for the atom, relative to the input - file timeline. The default is ``0s``. - """ - - key = proto.Field( - proto.STRING, - number=1, - ) - inputs = proto.RepeatedField( - proto.STRING, - number=2, - ) - end_time_offset = proto.Field( - proto.MESSAGE, - number=3, - message=duration_pb2.Duration, - ) - start_time_offset = proto.Field( - proto.MESSAGE, - number=4, - message=duration_pb2.Duration, - ) - - -class AdBreak(proto.Message): - r"""Ad break. - Attributes: - start_time_offset (google.protobuf.duration_pb2.Duration): - Start time in seconds for the ad break, relative to the - output file timeline. The default is ``0s``. - """ - - start_time_offset = proto.Field( - proto.MESSAGE, - number=1, - message=duration_pb2.Duration, - ) - - -class ElementaryStream(proto.Message): - r"""Encoding of an input file such as an audio, video, or text - track. Elementary streams must be packaged before - mapping and sharing between different output formats. - - Attributes: - key (str): - A unique key for this elementary stream. - video_stream (google.cloud.video.transcoder_v1.types.VideoStream): - Encoding of a video stream. - audio_stream (google.cloud.video.transcoder_v1.types.AudioStream): - Encoding of an audio stream. - text_stream (google.cloud.video.transcoder_v1.types.TextStream): - Encoding of a text stream. For example, - closed captions or subtitles. - """ - - key = proto.Field( - proto.STRING, - number=4, - ) - video_stream = proto.Field( - proto.MESSAGE, - number=1, - oneof='elementary_stream', - message='VideoStream', - ) - audio_stream = proto.Field( - proto.MESSAGE, - number=2, - oneof='elementary_stream', - message='AudioStream', - ) - text_stream = proto.Field( - proto.MESSAGE, - number=3, - oneof='elementary_stream', - message='TextStream', - ) - - -class MuxStream(proto.Message): - r"""Multiplexing settings for output stream. - Attributes: - key (str): - A unique key for this multiplexed stream. HLS media - manifests will be named ``MuxStream.key`` with the - ``".m3u8"`` extension suffix. - file_name (str): - The name of the generated file. The default is - ``MuxStream.key`` with the extension suffix corresponding to - the ``MuxStream.container``. - - Individual segments also have an incremental 10-digit - zero-padded suffix starting from 0 before the extension, - such as ``"mux_stream0000000123.ts"``. - container (str): - The container format. The default is ``"mp4"`` - - Supported container formats: - - - 'ts' - - 'fmp4'- the corresponding file extension is ``".m4s"`` - - 'mp4' - - 'vtt' - elementary_streams (Sequence[str]): - List of ``ElementaryStream.key``\ s multiplexed in this - stream. - segment_settings (google.cloud.video.transcoder_v1.types.SegmentSettings): - Segment settings for ``"ts"``, ``"fmp4"`` and ``"vtt"``. - encryption (google.cloud.video.transcoder_v1.types.Encryption): - Encryption settings. - """ - - key = proto.Field( - proto.STRING, - number=1, - ) - file_name = proto.Field( - proto.STRING, - number=2, - ) - container = proto.Field( - proto.STRING, - number=3, - ) - elementary_streams = proto.RepeatedField( - proto.STRING, - number=4, - ) - segment_settings = proto.Field( - proto.MESSAGE, - number=5, - message='SegmentSettings', - ) - encryption = proto.Field( - proto.MESSAGE, - number=6, - message='Encryption', - ) - - -class Manifest(proto.Message): - r"""Manifest configuration. - Attributes: - file_name (str): - The name of the generated file. The default is - ``"manifest"`` with the extension suffix corresponding to - the ``Manifest.type``. - type_ (google.cloud.video.transcoder_v1.types.Manifest.ManifestType): - Required. Type of the manifest, can be "HLS" - or "DASH". - mux_streams (Sequence[str]): - Required. List of user given ``MuxStream.key``\ s that - should appear in this manifest. - - When ``Manifest.type`` is ``HLS``, a media manifest with - name ``MuxStream.key`` and ``.m3u8`` extension is generated - for each element of the ``Manifest.mux_streams``. - """ - class ManifestType(proto.Enum): - r"""The manifest type can be either ``"HLS"`` or ``"DASH"``.""" - MANIFEST_TYPE_UNSPECIFIED = 0 - HLS = 1 - DASH = 2 - - file_name = proto.Field( - proto.STRING, - number=1, - ) - type_ = proto.Field( - proto.ENUM, - number=2, - enum=ManifestType, - ) - mux_streams = proto.RepeatedField( - proto.STRING, - number=3, - ) - - -class PubsubDestination(proto.Message): - r"""A Pub/Sub destination. - Attributes: - topic (str): - The name of the Pub/Sub topic to publish job completion - notification to. For example: - ``projects/{project}/topics/{topic}``. - """ - - topic = proto.Field( - proto.STRING, - number=1, - ) - - -class SpriteSheet(proto.Message): - r"""Sprite sheet configuration. - Attributes: - format_ (str): - Format type. The default is ``"jpeg"``. - - Supported formats: - - - 'jpeg' - file_prefix (str): - Required. File name prefix for the generated sprite sheets. - - Each sprite sheet has an incremental 10-digit zero-padded - suffix starting from 0 before the extension, such as - ``"sprite_sheet0000000123.jpeg"``. - sprite_width_pixels (int): - Required. The width of sprite in pixels. Must be an even - integer. To preserve the source aspect ratio, set the - [SpriteSheet.sprite_width_pixels][google.cloud.video.transcoder.v1.SpriteSheet.sprite_width_pixels] - field or the - [SpriteSheet.sprite_height_pixels][google.cloud.video.transcoder.v1.SpriteSheet.sprite_height_pixels] - field, but not both (the API will automatically calculate - the missing field). - sprite_height_pixels (int): - Required. The height of sprite in pixels. Must be an even - integer. To preserve the source aspect ratio, set the - [SpriteSheet.sprite_height_pixels][google.cloud.video.transcoder.v1.SpriteSheet.sprite_height_pixels] - field or the - [SpriteSheet.sprite_width_pixels][google.cloud.video.transcoder.v1.SpriteSheet.sprite_width_pixels] - field, but not both (the API will automatically calculate - the missing field). - column_count (int): - The maximum number of sprites per row in a - sprite sheet. The default is 0, which indicates - no maximum limit. - row_count (int): - The maximum number of rows per sprite sheet. - When the sprite sheet is full, a new sprite - sheet is created. The default is 0, which - indicates no maximum limit. - start_time_offset (google.protobuf.duration_pb2.Duration): - Start time in seconds, relative to the output file timeline. - Determines the first sprite to pick. The default is ``0s``. - end_time_offset (google.protobuf.duration_pb2.Duration): - End time in seconds, relative to the output file timeline. - When ``end_time_offset`` is not specified, the sprites are - generated until the end of the output file. - total_count (int): - Total number of sprites. Create the specified - number of sprites distributed evenly across the - timeline of the output media. The default is - 100. - interval (google.protobuf.duration_pb2.Duration): - Starting from ``0s``, create sprites at regular intervals. - Specify the interval value in seconds. - quality (int): - The quality of the generated sprite sheet. - Enter a value between 1 and 100, where 1 is the - lowest quality and 100 is the highest quality. - The default is 100. A high quality value - corresponds to a low image data compression - ratio. - """ - - format_ = proto.Field( - proto.STRING, - number=1, - ) - file_prefix = proto.Field( - proto.STRING, - number=2, - ) - sprite_width_pixels = proto.Field( - proto.INT32, - number=3, - ) - sprite_height_pixels = proto.Field( - proto.INT32, - number=4, - ) - column_count = proto.Field( - proto.INT32, - number=5, - ) - row_count = proto.Field( - proto.INT32, - number=6, - ) - start_time_offset = proto.Field( - proto.MESSAGE, - number=7, - message=duration_pb2.Duration, - ) - end_time_offset = proto.Field( - proto.MESSAGE, - number=8, - message=duration_pb2.Duration, - ) - total_count = proto.Field( - proto.INT32, - number=9, - oneof='extraction_strategy', - ) - interval = proto.Field( - proto.MESSAGE, - number=10, - oneof='extraction_strategy', - message=duration_pb2.Duration, - ) - quality = proto.Field( - proto.INT32, - number=11, - ) - - -class Overlay(proto.Message): - r"""Overlay configuration. - Attributes: - image (google.cloud.video.transcoder_v1.types.Overlay.Image): - Image overlay. - animations (Sequence[google.cloud.video.transcoder_v1.types.Overlay.Animation]): - List of Animations. The list should be - chronological, without any time overlap. - """ - class FadeType(proto.Enum): - r"""Fade type for the overlay: ``FADE_IN`` or ``FADE_OUT``.""" - FADE_TYPE_UNSPECIFIED = 0 - FADE_IN = 1 - FADE_OUT = 2 - - class NormalizedCoordinate(proto.Message): - r"""2D normalized coordinates. Default: ``{0.0, 0.0}`` - Attributes: - x (float): - Normalized x coordinate. - y (float): - Normalized y coordinate. - """ - - x = proto.Field( - proto.DOUBLE, - number=1, - ) - y = proto.Field( - proto.DOUBLE, - number=2, - ) - - class Image(proto.Message): - r"""Overlaid jpeg image. - Attributes: - uri (str): - Required. URI of the JPEG image in Cloud Storage. For - example, ``gs://bucket/inputs/image.jpeg``. JPEG is the only - supported image type. - resolution (google.cloud.video.transcoder_v1.types.Overlay.NormalizedCoordinate): - Normalized image resolution, based on output video - resolution. Valid values: ``0.0``–``1.0``. To respect the - original image aspect ratio, set either ``x`` or ``y`` to - ``0.0``. To use the original image resolution, set both - ``x`` and ``y`` to ``0.0``. - alpha (float): - Target image opacity. Valid values are from ``1.0`` (solid, - default) to ``0.0`` (transparent), exclusive. Set this to a - value greater than ``0.0``. - """ - - uri = proto.Field( - proto.STRING, - number=1, - ) - resolution = proto.Field( - proto.MESSAGE, - number=2, - message='Overlay.NormalizedCoordinate', - ) - alpha = proto.Field( - proto.DOUBLE, - number=3, - ) - - class AnimationStatic(proto.Message): - r"""Display static overlay object. - Attributes: - xy (google.cloud.video.transcoder_v1.types.Overlay.NormalizedCoordinate): - Normalized coordinates based on output video resolution. - Valid values: ``0.0``–``1.0``. ``xy`` is the upper-left - coordinate of the overlay object. For example, use the x and - y coordinates {0,0} to position the top-left corner of the - overlay animation in the top-left corner of the output - video. - start_time_offset (google.protobuf.duration_pb2.Duration): - The time to start displaying the overlay - object, in seconds. Default: 0 - """ - - xy = proto.Field( - proto.MESSAGE, - number=1, - message='Overlay.NormalizedCoordinate', - ) - start_time_offset = proto.Field( - proto.MESSAGE, - number=2, - message=duration_pb2.Duration, - ) - - class AnimationFade(proto.Message): - r"""Display overlay object with fade animation. - Attributes: - fade_type (google.cloud.video.transcoder_v1.types.Overlay.FadeType): - Required. Type of fade animation: ``FADE_IN`` or - ``FADE_OUT``. - xy (google.cloud.video.transcoder_v1.types.Overlay.NormalizedCoordinate): - Normalized coordinates based on output video resolution. - Valid values: ``0.0``–``1.0``. ``xy`` is the upper-left - coordinate of the overlay object. For example, use the x and - y coordinates {0,0} to position the top-left corner of the - overlay animation in the top-left corner of the output - video. - start_time_offset (google.protobuf.duration_pb2.Duration): - The time to start the fade animation, in - seconds. Default: 0 - end_time_offset (google.protobuf.duration_pb2.Duration): - The time to end the fade animation, in seconds. Default: - ``start_time_offset`` + 1s - """ - - fade_type = proto.Field( - proto.ENUM, - number=1, - enum='Overlay.FadeType', - ) - xy = proto.Field( - proto.MESSAGE, - number=2, - message='Overlay.NormalizedCoordinate', - ) - start_time_offset = proto.Field( - proto.MESSAGE, - number=3, - message=duration_pb2.Duration, - ) - end_time_offset = proto.Field( - proto.MESSAGE, - number=4, - message=duration_pb2.Duration, - ) - - class AnimationEnd(proto.Message): - r"""End previous overlay animation from the video. Without - AnimationEnd, the overlay object will keep the state of previous - animation until the end of the video. - - Attributes: - start_time_offset (google.protobuf.duration_pb2.Duration): - The time to end overlay object, in seconds. - Default: 0 - """ - - start_time_offset = proto.Field( - proto.MESSAGE, - number=1, - message=duration_pb2.Duration, - ) - - class Animation(proto.Message): - r"""Animation types. - Attributes: - animation_static (google.cloud.video.transcoder_v1.types.Overlay.AnimationStatic): - Display static overlay object. - animation_fade (google.cloud.video.transcoder_v1.types.Overlay.AnimationFade): - Display overlay object with fade animation. - animation_end (google.cloud.video.transcoder_v1.types.Overlay.AnimationEnd): - End previous animation. - """ - - animation_static = proto.Field( - proto.MESSAGE, - number=1, - oneof='animation_type', - message='Overlay.AnimationStatic', - ) - animation_fade = proto.Field( - proto.MESSAGE, - number=2, - oneof='animation_type', - message='Overlay.AnimationFade', - ) - animation_end = proto.Field( - proto.MESSAGE, - number=3, - oneof='animation_type', - message='Overlay.AnimationEnd', - ) - - image = proto.Field( - proto.MESSAGE, - number=1, - message=Image, - ) - animations = proto.RepeatedField( - proto.MESSAGE, - number=2, - message=Animation, - ) - - -class PreprocessingConfig(proto.Message): - r"""Preprocessing configurations. - Attributes: - color (google.cloud.video.transcoder_v1.types.PreprocessingConfig.Color): - Color preprocessing configuration. - denoise (google.cloud.video.transcoder_v1.types.PreprocessingConfig.Denoise): - Denoise preprocessing configuration. - deblock (google.cloud.video.transcoder_v1.types.PreprocessingConfig.Deblock): - Deblock preprocessing configuration. - audio (google.cloud.video.transcoder_v1.types.PreprocessingConfig.Audio): - Audio preprocessing configuration. - crop (google.cloud.video.transcoder_v1.types.PreprocessingConfig.Crop): - Specify the video cropping configuration. - pad (google.cloud.video.transcoder_v1.types.PreprocessingConfig.Pad): - Specify the video pad filter configuration. - """ - - class Color(proto.Message): - r"""Color preprocessing configuration. - Attributes: - saturation (float): - Control color saturation of the video. Enter - a value between -1 and 1, where -1 is fully - desaturated and 1 is maximum saturation. 0 is no - change. The default is 0. - contrast (float): - Control black and white contrast of the - video. Enter a value between -1 and 1, where -1 - is minimum contrast and 1 is maximum contrast. 0 - is no change. The default is 0. - brightness (float): - Control brightness of the video. Enter a - value between -1 and 1, where -1 is minimum - brightness and 1 is maximum brightness. 0 is no - change. The default is 0. - """ - - saturation = proto.Field( - proto.DOUBLE, - number=1, - ) - contrast = proto.Field( - proto.DOUBLE, - number=2, - ) - brightness = proto.Field( - proto.DOUBLE, - number=3, - ) - - class Denoise(proto.Message): - r"""Denoise preprocessing configuration. - Attributes: - strength (float): - Set strength of the denoise. Enter a value - between 0 and 1. The higher the value, the - smoother the image. 0 is no denoising. The - default is 0. - tune (str): - Set the denoiser mode. The default is ``"standard"``. - - Supported denoiser modes: - - - 'standard' - - 'grain' - """ - - strength = proto.Field( - proto.DOUBLE, - number=1, - ) - tune = proto.Field( - proto.STRING, - number=2, - ) - - class Deblock(proto.Message): - r"""Deblock preprocessing configuration. - Attributes: - strength (float): - Set strength of the deblocker. Enter a value - between 0 and 1. The higher the value, the - stronger the block removal. 0 is no deblocking. - The default is 0. - enabled (bool): - Enable deblocker. The default is ``false``. - """ - - strength = proto.Field( - proto.DOUBLE, - number=1, - ) - enabled = proto.Field( - proto.BOOL, - number=2, - ) - - class Audio(proto.Message): - r"""Audio preprocessing configuration. - Attributes: - lufs (float): - Specify audio loudness normalization in loudness units - relative to full scale (LUFS). Enter a value between -24 and - 0 (the default), where: - - - -24 is the Advanced Television Systems Committee (ATSC - A/85) standard - - -23 is the EU R128 broadcast standard - - -19 is the prior standard for online mono audio - - -18 is the ReplayGain standard - - -16 is the prior standard for stereo audio - - -14 is the new online audio standard recommended by - Spotify, as well as Amazon Echo - - 0 disables normalization - high_boost (bool): - Enable boosting high frequency components. The default is - ``false``. - low_boost (bool): - Enable boosting low frequency components. The default is - ``false``. - """ - - lufs = proto.Field( - proto.DOUBLE, - number=1, - ) - high_boost = proto.Field( - proto.BOOL, - number=2, - ) - low_boost = proto.Field( - proto.BOOL, - number=3, - ) - - class Crop(proto.Message): - r"""Video cropping configuration for the input video. The cropped - input video is scaled to match the output resolution. - - Attributes: - top_pixels (int): - The number of pixels to crop from the top. - The default is 0. - bottom_pixels (int): - The number of pixels to crop from the bottom. - The default is 0. - left_pixels (int): - The number of pixels to crop from the left. - The default is 0. - right_pixels (int): - The number of pixels to crop from the right. - The default is 0. - """ - - top_pixels = proto.Field( - proto.INT32, - number=1, - ) - bottom_pixels = proto.Field( - proto.INT32, - number=2, - ) - left_pixels = proto.Field( - proto.INT32, - number=3, - ) - right_pixels = proto.Field( - proto.INT32, - number=4, - ) - - class Pad(proto.Message): - r"""Pad filter configuration for the input video. The padded - input video is scaled after padding with black to match the - output resolution. - - Attributes: - top_pixels (int): - The number of pixels to add to the top. The - default is 0. - bottom_pixels (int): - The number of pixels to add to the bottom. - The default is 0. - left_pixels (int): - The number of pixels to add to the left. The - default is 0. - right_pixels (int): - The number of pixels to add to the right. The - default is 0. - """ - - top_pixels = proto.Field( - proto.INT32, - number=1, - ) - bottom_pixels = proto.Field( - proto.INT32, - number=2, - ) - left_pixels = proto.Field( - proto.INT32, - number=3, - ) - right_pixels = proto.Field( - proto.INT32, - number=4, - ) - - color = proto.Field( - proto.MESSAGE, - number=1, - message=Color, - ) - denoise = proto.Field( - proto.MESSAGE, - number=2, - message=Denoise, - ) - deblock = proto.Field( - proto.MESSAGE, - number=3, - message=Deblock, - ) - audio = proto.Field( - proto.MESSAGE, - number=4, - message=Audio, - ) - crop = proto.Field( - proto.MESSAGE, - number=5, - message=Crop, - ) - pad = proto.Field( - proto.MESSAGE, - number=6, - message=Pad, - ) - - -class VideoStream(proto.Message): - r"""Video stream resource. - Attributes: - h264 (google.cloud.video.transcoder_v1.types.VideoStream.H264CodecSettings): - H264 codec settings. - h265 (google.cloud.video.transcoder_v1.types.VideoStream.H265CodecSettings): - H265 codec settings. - vp9 (google.cloud.video.transcoder_v1.types.VideoStream.Vp9CodecSettings): - VP9 codec settings. - """ - - class H264CodecSettings(proto.Message): - r"""H264 codec settings. - Attributes: - width_pixels (int): - The width of the video in pixels. Must be an - even integer. When not specified, the width is - adjusted to match the specified height and input - aspect ratio. If both are omitted, the input - width is used. - height_pixels (int): - The height of the video in pixels. Must be an - even integer. When not specified, the height is - adjusted to match the specified width and input - aspect ratio. If both are omitted, the input - height is used. - frame_rate (float): - Required. The target video frame rate in frames per second - (FPS). Must be less than or equal to 120. Will default to - the input frame rate if larger than the input frame rate. - The API will generate an output FPS that is divisible by the - input FPS, and smaller or equal to the target FPS. See - `Calculating frame - rate `__ - for more information. - bitrate_bps (int): - Required. The video bitrate in bits per - second. Must be between 1 and 1,000,000,000. - pixel_format (str): - Pixel format to use. The default is ``"yuv420p"``. - - Supported pixel formats: - - - 'yuv420p' pixel format. - - 'yuv422p' pixel format. - - 'yuv444p' pixel format. - - 'yuv420p10' 10-bit HDR pixel format. - - 'yuv422p10' 10-bit HDR pixel format. - - 'yuv444p10' 10-bit HDR pixel format. - - 'yuv420p12' 12-bit HDR pixel format. - - 'yuv422p12' 12-bit HDR pixel format. - - 'yuv444p12' 12-bit HDR pixel format. - rate_control_mode (str): - Specify the ``rate_control_mode``. The default is ``"vbr"``. - - Supported rate control modes: - - - 'vbr' - variable bitrate - - 'crf' - constant rate factor - crf_level (int): - Target CRF level. Must be between 10 and 36, - where 10 is the highest quality and 36 is the - most efficient compression. The default is 21. - allow_open_gop (bool): - Specifies whether an open Group of Pictures (GOP) structure - should be allowed or not. The default is ``false``. - gop_frame_count (int): - Select the GOP size based on the specified - frame count. Must be greater than zero. - gop_duration (google.protobuf.duration_pb2.Duration): - Select the GOP size based on the specified duration. The - default is ``"3s"``. Note that ``gopDuration`` must be less - than or equal to ```segmentDuration`` <#SegmentSettings>`__, - and ```segmentDuration`` <#SegmentSettings>`__ must be - divisible by ``gopDuration``. - enable_two_pass (bool): - Use two-pass encoding strategy to achieve better video - quality. ``VideoStream.rate_control_mode`` must be - ``"vbr"``. The default is ``false``. - vbv_size_bits (int): - Size of the Video Buffering Verifier (VBV) buffer in bits. - Must be greater than zero. The default is equal to - ``VideoStream.bitrate_bps``. - vbv_fullness_bits (int): - Initial fullness of the Video Buffering Verifier (VBV) - buffer in bits. Must be greater than zero. The default is - equal to 90% of ``VideoStream.vbv_size_bits``. - entropy_coder (str): - The entropy coder to use. The default is ``"cabac"``. - - Supported entropy coders: - - - 'cavlc' - - 'cabac' - b_pyramid (bool): - Allow B-pyramid for reference frame selection. This may not - be supported on all decoders. The default is ``false``. - b_frame_count (int): - The number of consecutive B-frames. Must be greater than or - equal to zero. Must be less than - ``VideoStream.gop_frame_count`` if set. The default is 0. - aq_strength (float): - Specify the intensity of the adaptive - quantizer (AQ). Must be between 0 and 1, where 0 - disables the quantizer and 1 maximizes the - quantizer. A higher value equals a lower bitrate - but smoother image. The default is 0. - profile (str): - Enforces the specified codec profile. The following profiles - are supported: - - - ``baseline`` - - ``main`` - - ``high`` (default) - - The available options are - `FFmpeg-compatible `__\ {: - class="external" }. Note that certain values for this field - may cause the transcoder to override other fields you set in - the ``H264CodecSettings`` message. - tune (str): - Enforces the specified codec tune. The available options are - `FFmpeg-compatible `__\ {: - class="external" }. Note that certain values for this field - may cause the transcoder to override other fields you set in - the ``H264CodecSettings`` message. - preset (str): - Enforces the specified codec preset. The default is - ``veryfast``. The available options are - `FFmpeg-compatible `__\ {: - class="external" }. Note that certain values for this field - may cause the transcoder to override other fields you set in - the ``H264CodecSettings`` message. - """ - - width_pixels = proto.Field( - proto.INT32, - number=1, - ) - height_pixels = proto.Field( - proto.INT32, - number=2, - ) - frame_rate = proto.Field( - proto.DOUBLE, - number=3, - ) - bitrate_bps = proto.Field( - proto.INT32, - number=4, - ) - pixel_format = proto.Field( - proto.STRING, - number=5, - ) - rate_control_mode = proto.Field( - proto.STRING, - number=6, - ) - crf_level = proto.Field( - proto.INT32, - number=7, - ) - allow_open_gop = proto.Field( - proto.BOOL, - number=8, - ) - gop_frame_count = proto.Field( - proto.INT32, - number=9, - oneof='gop_mode', - ) - gop_duration = proto.Field( - proto.MESSAGE, - number=10, - oneof='gop_mode', - message=duration_pb2.Duration, - ) - enable_two_pass = proto.Field( - proto.BOOL, - number=11, - ) - vbv_size_bits = proto.Field( - proto.INT32, - number=12, - ) - vbv_fullness_bits = proto.Field( - proto.INT32, - number=13, - ) - entropy_coder = proto.Field( - proto.STRING, - number=14, - ) - b_pyramid = proto.Field( - proto.BOOL, - number=15, - ) - b_frame_count = proto.Field( - proto.INT32, - number=16, - ) - aq_strength = proto.Field( - proto.DOUBLE, - number=17, - ) - profile = proto.Field( - proto.STRING, - number=18, - ) - tune = proto.Field( - proto.STRING, - number=19, - ) - preset = proto.Field( - proto.STRING, - number=20, - ) - - class H265CodecSettings(proto.Message): - r"""H265 codec settings. - Attributes: - width_pixels (int): - The width of the video in pixels. Must be an - even integer. When not specified, the width is - adjusted to match the specified height and input - aspect ratio. If both are omitted, the input - width is used. - height_pixels (int): - The height of the video in pixels. Must be an - even integer. When not specified, the height is - adjusted to match the specified width and input - aspect ratio. If both are omitted, the input - height is used. - frame_rate (float): - Required. The target video frame rate in frames per second - (FPS). Must be less than or equal to 120. Will default to - the input frame rate if larger than the input frame rate. - The API will generate an output FPS that is divisible by the - input FPS, and smaller or equal to the target FPS. See - `Calculating frame - rate `__ - for more information. - bitrate_bps (int): - Required. The video bitrate in bits per - second. Must be between 1 and 1,000,000,000. - pixel_format (str): - Pixel format to use. The default is ``"yuv420p"``. - - Supported pixel formats: - - - 'yuv420p' pixel format. - - 'yuv422p' pixel format. - - 'yuv444p' pixel format. - - 'yuv420p10' 10-bit HDR pixel format. - - 'yuv422p10' 10-bit HDR pixel format. - - 'yuv444p10' 10-bit HDR pixel format. - - 'yuv420p12' 12-bit HDR pixel format. - - 'yuv422p12' 12-bit HDR pixel format. - - 'yuv444p12' 12-bit HDR pixel format. - rate_control_mode (str): - Specify the ``rate_control_mode``. The default is ``"vbr"``. - - Supported rate control modes: - - - 'vbr' - variable bitrate - - 'crf' - constant rate factor - crf_level (int): - Target CRF level. Must be between 10 and 36, - where 10 is the highest quality and 36 is the - most efficient compression. The default is 21. - allow_open_gop (bool): - Specifies whether an open Group of Pictures (GOP) structure - should be allowed or not. The default is ``false``. - gop_frame_count (int): - Select the GOP size based on the specified - frame count. Must be greater than zero. - gop_duration (google.protobuf.duration_pb2.Duration): - Select the GOP size based on the specified duration. The - default is ``"3s"``. Note that ``gopDuration`` must be less - than or equal to ```segmentDuration`` <#SegmentSettings>`__, - and ```segmentDuration`` <#SegmentSettings>`__ must be - divisible by ``gopDuration``. - enable_two_pass (bool): - Use two-pass encoding strategy to achieve better video - quality. ``VideoStream.rate_control_mode`` must be - ``"vbr"``. The default is ``false``. - vbv_size_bits (int): - Size of the Video Buffering Verifier (VBV) buffer in bits. - Must be greater than zero. The default is equal to - ``VideoStream.bitrate_bps``. - vbv_fullness_bits (int): - Initial fullness of the Video Buffering Verifier (VBV) - buffer in bits. Must be greater than zero. The default is - equal to 90% of ``VideoStream.vbv_size_bits``. - b_pyramid (bool): - Allow B-pyramid for reference frame selection. This may not - be supported on all decoders. The default is ``false``. - b_frame_count (int): - The number of consecutive B-frames. Must be greater than or - equal to zero. Must be less than - ``VideoStream.gop_frame_count`` if set. The default is 0. - aq_strength (float): - Specify the intensity of the adaptive - quantizer (AQ). Must be between 0 and 1, where 0 - disables the quantizer and 1 maximizes the - quantizer. A higher value equals a lower bitrate - but smoother image. The default is 0. - profile (str): - Enforces the specified codec profile. The following profiles - are supported: - - 8bit profiles - - - ``main`` (default) - - ``main-intra`` - - ``mainstillpicture`` - - 10bit profiles - - - ``main10`` (default) - - ``main10-intra`` - - ``main422-10`` - - ``main422-10-intra`` - - ``main444-10`` - - ``main444-10-intra`` - - 12bit profiles - - - ``main12`` (default) - - ``main12-intra`` - - ``main422-12`` - - ``main422-12-intra`` - - ``main444-12`` - - ``main444-12-intra`` - - The available options are - `FFmpeg-compatible `__\ {: - class="external" }. Note that certain values for this field - may cause the transcoder to override other fields you set in - the ``H265CodecSettings`` message. - tune (str): - Enforces the specified codec tune. The available options are - `FFmpeg-compatible `__\ {: - class="external" }. Note that certain values for this field - may cause the transcoder to override other fields you set in - the ``H265CodecSettings`` message. - preset (str): - Enforces the specified codec preset. The default is - ``veryfast``. The available options are - `FFmpeg-compatible `__\ {: - class="external" }. Note that certain values for this field - may cause the transcoder to override other fields you set in - the ``H265CodecSettings`` message. - """ - - width_pixels = proto.Field( - proto.INT32, - number=1, - ) - height_pixels = proto.Field( - proto.INT32, - number=2, - ) - frame_rate = proto.Field( - proto.DOUBLE, - number=3, - ) - bitrate_bps = proto.Field( - proto.INT32, - number=4, - ) - pixel_format = proto.Field( - proto.STRING, - number=5, - ) - rate_control_mode = proto.Field( - proto.STRING, - number=6, - ) - crf_level = proto.Field( - proto.INT32, - number=7, - ) - allow_open_gop = proto.Field( - proto.BOOL, - number=8, - ) - gop_frame_count = proto.Field( - proto.INT32, - number=9, - oneof='gop_mode', - ) - gop_duration = proto.Field( - proto.MESSAGE, - number=10, - oneof='gop_mode', - message=duration_pb2.Duration, - ) - enable_two_pass = proto.Field( - proto.BOOL, - number=11, - ) - vbv_size_bits = proto.Field( - proto.INT32, - number=12, - ) - vbv_fullness_bits = proto.Field( - proto.INT32, - number=13, - ) - b_pyramid = proto.Field( - proto.BOOL, - number=14, - ) - b_frame_count = proto.Field( - proto.INT32, - number=15, - ) - aq_strength = proto.Field( - proto.DOUBLE, - number=16, - ) - profile = proto.Field( - proto.STRING, - number=17, - ) - tune = proto.Field( - proto.STRING, - number=18, - ) - preset = proto.Field( - proto.STRING, - number=19, - ) - - class Vp9CodecSettings(proto.Message): - r"""VP9 codec settings. - Attributes: - width_pixels (int): - The width of the video in pixels. Must be an - even integer. When not specified, the width is - adjusted to match the specified height and input - aspect ratio. If both are omitted, the input - width is used. - height_pixels (int): - The height of the video in pixels. Must be an - even integer. When not specified, the height is - adjusted to match the specified width and input - aspect ratio. If both are omitted, the input - height is used. - frame_rate (float): - Required. The target video frame rate in frames per second - (FPS). Must be less than or equal to 120. Will default to - the input frame rate if larger than the input frame rate. - The API will generate an output FPS that is divisible by the - input FPS, and smaller or equal to the target FPS. See - `Calculating frame - rate `__ - for more information. - bitrate_bps (int): - Required. The video bitrate in bits per - second. Must be between 1 and 1,000,000,000. - pixel_format (str): - Pixel format to use. The default is ``"yuv420p"``. - - Supported pixel formats: - - - 'yuv420p' pixel format. - - 'yuv422p' pixel format. - - 'yuv444p' pixel format. - - 'yuv420p10' 10-bit HDR pixel format. - - 'yuv422p10' 10-bit HDR pixel format. - - 'yuv444p10' 10-bit HDR pixel format. - - 'yuv420p12' 12-bit HDR pixel format. - - 'yuv422p12' 12-bit HDR pixel format. - - 'yuv444p12' 12-bit HDR pixel format. - rate_control_mode (str): - Specify the ``rate_control_mode``. The default is ``"vbr"``. - - Supported rate control modes: - - - 'vbr' - variable bitrate - - 'crf' - constant rate factor - crf_level (int): - Target CRF level. Must be between 10 and 36, - where 10 is the highest quality and 36 is the - most efficient compression. The default is 21. - gop_frame_count (int): - Select the GOP size based on the specified - frame count. Must be greater than zero. - gop_duration (google.protobuf.duration_pb2.Duration): - Select the GOP size based on the specified duration. The - default is ``"3s"``. Note that ``gopDuration`` must be less - than or equal to ```segmentDuration`` <#SegmentSettings>`__, - and ```segmentDuration`` <#SegmentSettings>`__ must be - divisible by ``gopDuration``. - profile (str): - Enforces the specified codec profile. The following profiles - are supported: - - - ``profile0`` (default) - - ``profile1`` - - ``profile2`` - - ``profile3`` - - The available options are - `WebM-compatible `__\ {: - class="external" }. Note that certain values for this field - may cause the transcoder to override other fields you set in - the ``Vp9CodecSettings`` message. - """ - - width_pixels = proto.Field( - proto.INT32, - number=1, - ) - height_pixels = proto.Field( - proto.INT32, - number=2, - ) - frame_rate = proto.Field( - proto.DOUBLE, - number=3, - ) - bitrate_bps = proto.Field( - proto.INT32, - number=4, - ) - pixel_format = proto.Field( - proto.STRING, - number=5, - ) - rate_control_mode = proto.Field( - proto.STRING, - number=6, - ) - crf_level = proto.Field( - proto.INT32, - number=7, - ) - gop_frame_count = proto.Field( - proto.INT32, - number=8, - oneof='gop_mode', - ) - gop_duration = proto.Field( - proto.MESSAGE, - number=9, - oneof='gop_mode', - message=duration_pb2.Duration, - ) - profile = proto.Field( - proto.STRING, - number=10, - ) - - h264 = proto.Field( - proto.MESSAGE, - number=1, - oneof='codec_settings', - message=H264CodecSettings, - ) - h265 = proto.Field( - proto.MESSAGE, - number=2, - oneof='codec_settings', - message=H265CodecSettings, - ) - vp9 = proto.Field( - proto.MESSAGE, - number=3, - oneof='codec_settings', - message=Vp9CodecSettings, - ) - - -class AudioStream(proto.Message): - r"""Audio stream resource. - Attributes: - codec (str): - The codec for this audio stream. The default is ``"aac"``. - - Supported audio codecs: - - - 'aac' - - 'aac-he' - - 'aac-he-v2' - - 'mp3' - - 'ac3' - - 'eac3' - bitrate_bps (int): - Required. Audio bitrate in bits per second. - Must be between 1 and 10,000,000. - channel_count (int): - Number of audio channels. Must be between 1 - and 6. The default is 2. - channel_layout (Sequence[str]): - A list of channel names specifying layout of the audio - channels. This only affects the metadata embedded in the - container headers, if supported by the specified format. The - default is ``["fl", "fr"]``. - - Supported channel names: - - - 'fl' - Front left channel - - 'fr' - Front right channel - - 'sl' - Side left channel - - 'sr' - Side right channel - - 'fc' - Front center channel - - 'lfe' - Low frequency - mapping (Sequence[google.cloud.video.transcoder_v1.types.AudioStream.AudioMapping]): - The mapping for the ``Job.edit_list`` atoms with audio - ``EditAtom.inputs``. - sample_rate_hertz (int): - The audio sample rate in Hertz. The default - is 48000 Hertz. - """ - - class AudioMapping(proto.Message): - r"""The mapping for the ``Job.edit_list`` atoms with audio - ``EditAtom.inputs``. - - Attributes: - atom_key (str): - Required. The ``EditAtom.key`` that references the atom with - audio inputs in the ``Job.edit_list``. - input_key (str): - Required. The ``Input.key`` that identifies the input file. - input_track (int): - Required. The zero-based index of the track - in the input file. - input_channel (int): - Required. The zero-based index of the channel - in the input audio stream. - output_channel (int): - Required. The zero-based index of the channel - in the output audio stream. - gain_db (float): - Audio volume control in dB. Negative values - decrease volume, positive values increase. The - default is 0. - """ - - atom_key = proto.Field( - proto.STRING, - number=1, - ) - input_key = proto.Field( - proto.STRING, - number=2, - ) - input_track = proto.Field( - proto.INT32, - number=3, - ) - input_channel = proto.Field( - proto.INT32, - number=4, - ) - output_channel = proto.Field( - proto.INT32, - number=5, - ) - gain_db = proto.Field( - proto.DOUBLE, - number=6, - ) - - codec = proto.Field( - proto.STRING, - number=1, - ) - bitrate_bps = proto.Field( - proto.INT32, - number=2, - ) - channel_count = proto.Field( - proto.INT32, - number=3, - ) - channel_layout = proto.RepeatedField( - proto.STRING, - number=4, - ) - mapping = proto.RepeatedField( - proto.MESSAGE, - number=5, - message=AudioMapping, - ) - sample_rate_hertz = proto.Field( - proto.INT32, - number=6, - ) - - -class TextStream(proto.Message): - r"""Encoding of a text stream. For example, closed captions or - subtitles. - - Attributes: - codec (str): - The codec for this text stream. The default is ``"webvtt"``. - - Supported text codecs: - - - 'srt' - - 'ttml' - - 'cea608' - - 'cea708' - - 'webvtt' - mapping (Sequence[google.cloud.video.transcoder_v1.types.TextStream.TextMapping]): - The mapping for the ``Job.edit_list`` atoms with text - ``EditAtom.inputs``. - """ - - class TextMapping(proto.Message): - r"""The mapping for the ``Job.edit_list`` atoms with text - ``EditAtom.inputs``. - - Attributes: - atom_key (str): - Required. The ``EditAtom.key`` that references atom with - text inputs in the ``Job.edit_list``. - input_key (str): - Required. The ``Input.key`` that identifies the input file. - input_track (int): - Required. The zero-based index of the track - in the input file. - """ - - atom_key = proto.Field( - proto.STRING, - number=1, - ) - input_key = proto.Field( - proto.STRING, - number=2, - ) - input_track = proto.Field( - proto.INT32, - number=3, - ) - - codec = proto.Field( - proto.STRING, - number=1, - ) - mapping = proto.RepeatedField( - proto.MESSAGE, - number=3, - message=TextMapping, - ) - - -class SegmentSettings(proto.Message): - r"""Segment settings for ``"ts"``, ``"fmp4"`` and ``"vtt"``. - Attributes: - segment_duration (google.protobuf.duration_pb2.Duration): - Duration of the segments in seconds. The default is - ``"6.0s"``. Note that ``segmentDuration`` must be greater - than or equal to ```gopDuration`` <#videostream>`__, and - ``segmentDuration`` must be divisible by - ```gopDuration`` <#videostream>`__. - individual_segments (bool): - Required. Create an individual segment file. The default is - ``false``. - """ - - segment_duration = proto.Field( - proto.MESSAGE, - number=1, - message=duration_pb2.Duration, - ) - individual_segments = proto.Field( - proto.BOOL, - number=3, - ) - - -class Encryption(proto.Message): - r"""Encryption settings. - Attributes: - key (str): - Required. 128 bit encryption key represented - as lowercase hexadecimal digits. - iv (str): - Required. 128 bit Initialization Vector (IV) - represented as lowercase hexadecimal digits. - aes_128 (google.cloud.video.transcoder_v1.types.Encryption.Aes128Encryption): - Configuration for AES-128 encryption. - sample_aes (google.cloud.video.transcoder_v1.types.Encryption.SampleAesEncryption): - Configuration for SAMPLE-AES encryption. - mpeg_cenc (google.cloud.video.transcoder_v1.types.Encryption.MpegCommonEncryption): - Configuration for MPEG Common Encryption - (MPEG-CENC). - """ - - class Aes128Encryption(proto.Message): - r"""Configuration for AES-128 encryption. - Attributes: - key_uri (str): - Required. URI of the key delivery service. - This URI is inserted into the M3U8 header. - """ - - key_uri = proto.Field( - proto.STRING, - number=1, - ) - - class SampleAesEncryption(proto.Message): - r"""Configuration for SAMPLE-AES encryption. - Attributes: - key_uri (str): - Required. URI of the key delivery service. - This URI is inserted into the M3U8 header. - """ - - key_uri = proto.Field( - proto.STRING, - number=1, - ) - - class MpegCommonEncryption(proto.Message): - r"""Configuration for MPEG Common Encryption (MPEG-CENC). - Attributes: - key_id (str): - Required. 128 bit Key ID represented as - lowercase hexadecimal digits for use with common - encryption. - scheme (str): - Required. Specify the encryption scheme. - Supported encryption schemes: - - 'cenc' - - 'cbcs' - """ - - key_id = proto.Field( - proto.STRING, - number=1, - ) - scheme = proto.Field( - proto.STRING, - number=2, - ) - - key = proto.Field( - proto.STRING, - number=1, - ) - iv = proto.Field( - proto.STRING, - number=2, - ) - aes_128 = proto.Field( - proto.MESSAGE, - number=3, - oneof='encryption_mode', - message=Aes128Encryption, - ) - sample_aes = proto.Field( - proto.MESSAGE, - number=4, - oneof='encryption_mode', - message=SampleAesEncryption, - ) - mpeg_cenc = proto.Field( - proto.MESSAGE, - number=5, - oneof='encryption_mode', - message=MpegCommonEncryption, - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/v1/google/cloud/video/transcoder_v1/types/services.py b/owl-bot-staging/v1/google/cloud/video/transcoder_v1/types/services.py deleted file mode 100644 index 3f885d7..0000000 --- a/owl-bot-staging/v1/google/cloud/video/transcoder_v1/types/services.py +++ /dev/null @@ -1,298 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import proto # type: ignore - -from google.cloud.video.transcoder_v1.types import resources - - -__protobuf__ = proto.module( - package='google.cloud.video.transcoder.v1', - manifest={ - 'CreateJobRequest', - 'ListJobsRequest', - 'GetJobRequest', - 'DeleteJobRequest', - 'ListJobsResponse', - 'CreateJobTemplateRequest', - 'ListJobTemplatesRequest', - 'GetJobTemplateRequest', - 'DeleteJobTemplateRequest', - 'ListJobTemplatesResponse', - }, -) - - -class CreateJobRequest(proto.Message): - r"""Request message for ``TranscoderService.CreateJob``. - Attributes: - parent (str): - Required. The parent location to create and process this - job. Format: ``projects/{project}/locations/{location}`` - job (google.cloud.video.transcoder_v1.types.Job): - Required. Parameters for creating transcoding - job. - """ - - parent = proto.Field( - proto.STRING, - number=1, - ) - job = proto.Field( - proto.MESSAGE, - number=2, - message=resources.Job, - ) - - -class ListJobsRequest(proto.Message): - r"""Request message for ``TranscoderService.ListJobs``. The parent - location from which to retrieve the collection of jobs. - - Attributes: - parent (str): - Required. Format: - ``projects/{project}/locations/{location}`` - page_size (int): - The maximum number of items to return. - page_token (str): - The ``next_page_token`` value returned from a previous List - request, if any. - filter (str): - The filter expression, following the syntax - outlined in https://google.aip.dev/160. - order_by (str): - One or more fields to compare and use to sort - the output. See - https://google.aip.dev/132#ordering. - """ - - parent = proto.Field( - proto.STRING, - number=1, - ) - page_size = proto.Field( - proto.INT32, - number=2, - ) - page_token = proto.Field( - proto.STRING, - number=3, - ) - filter = proto.Field( - proto.STRING, - number=4, - ) - order_by = proto.Field( - proto.STRING, - number=5, - ) - - -class GetJobRequest(proto.Message): - r"""Request message for ``TranscoderService.GetJob``. - Attributes: - name (str): - Required. The name of the job to retrieve. Format: - ``projects/{project}/locations/{location}/jobs/{job}`` - """ - - name = proto.Field( - proto.STRING, - number=1, - ) - - -class DeleteJobRequest(proto.Message): - r"""Request message for ``TranscoderService.DeleteJob``. - Attributes: - name (str): - Required. The name of the job to delete. Format: - ``projects/{project}/locations/{location}/jobs/{job}`` - """ - - name = proto.Field( - proto.STRING, - number=1, - ) - - -class ListJobsResponse(proto.Message): - r"""Response message for ``TranscoderService.ListJobs``. - Attributes: - jobs (Sequence[google.cloud.video.transcoder_v1.types.Job]): - List of jobs in the specified region. - next_page_token (str): - The pagination token. - unreachable (Sequence[str]): - List of regions that could not be reached. - """ - - @property - def raw_page(self): - return self - - jobs = proto.RepeatedField( - proto.MESSAGE, - number=1, - message=resources.Job, - ) - next_page_token = proto.Field( - proto.STRING, - number=2, - ) - unreachable = proto.RepeatedField( - proto.STRING, - number=3, - ) - - -class CreateJobTemplateRequest(proto.Message): - r"""Request message for ``TranscoderService.CreateJobTemplate``. - Attributes: - parent (str): - Required. The parent location to create this job template. - Format: ``projects/{project}/locations/{location}`` - job_template (google.cloud.video.transcoder_v1.types.JobTemplate): - Required. Parameters for creating job - template. - job_template_id (str): - Required. The ID to use for the job template, which will - become the final component of the job template's resource - name. - - This value should be 4-63 characters, and valid characters - must match the regular expression - ``[a-zA-Z][a-zA-Z0-9_-]*``. - """ - - parent = proto.Field( - proto.STRING, - number=1, - ) - job_template = proto.Field( - proto.MESSAGE, - number=2, - message=resources.JobTemplate, - ) - job_template_id = proto.Field( - proto.STRING, - number=3, - ) - - -class ListJobTemplatesRequest(proto.Message): - r"""Request message for ``TranscoderService.ListJobTemplates``. - Attributes: - parent (str): - Required. The parent location from which to retrieve the - collection of job templates. Format: - ``projects/{project}/locations/{location}`` - page_size (int): - The maximum number of items to return. - page_token (str): - The ``next_page_token`` value returned from a previous List - request, if any. - filter (str): - The filter expression, following the syntax - outlined in https://google.aip.dev/160. - order_by (str): - One or more fields to compare and use to sort - the output. See - https://google.aip.dev/132#ordering. - """ - - parent = proto.Field( - proto.STRING, - number=1, - ) - page_size = proto.Field( - proto.INT32, - number=2, - ) - page_token = proto.Field( - proto.STRING, - number=3, - ) - filter = proto.Field( - proto.STRING, - number=4, - ) - order_by = proto.Field( - proto.STRING, - number=5, - ) - - -class GetJobTemplateRequest(proto.Message): - r"""Request message for ``TranscoderService.GetJobTemplate``. - Attributes: - name (str): - Required. The name of the job template to retrieve. Format: - ``projects/{project}/locations/{location}/jobTemplates/{job_template}`` - """ - - name = proto.Field( - proto.STRING, - number=1, - ) - - -class DeleteJobTemplateRequest(proto.Message): - r"""Request message for ``TranscoderService.DeleteJobTemplate``. - Attributes: - name (str): - Required. The name of the job template to delete. - ``projects/{project}/locations/{location}/jobTemplates/{job_template}`` - """ - - name = proto.Field( - proto.STRING, - number=1, - ) - - -class ListJobTemplatesResponse(proto.Message): - r"""Response message for ``TranscoderService.ListJobTemplates``. - Attributes: - job_templates (Sequence[google.cloud.video.transcoder_v1.types.JobTemplate]): - List of job templates in the specified - region. - next_page_token (str): - The pagination token. - unreachable (Sequence[str]): - List of regions that could not be reached. - """ - - @property - def raw_page(self): - return self - - job_templates = proto.RepeatedField( - proto.MESSAGE, - number=1, - message=resources.JobTemplate, - ) - next_page_token = proto.Field( - proto.STRING, - number=2, - ) - unreachable = proto.RepeatedField( - proto.STRING, - number=3, - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/v1/mypy.ini b/owl-bot-staging/v1/mypy.ini deleted file mode 100644 index 4505b48..0000000 --- a/owl-bot-staging/v1/mypy.ini +++ /dev/null @@ -1,3 +0,0 @@ -[mypy] -python_version = 3.6 -namespace_packages = True diff --git a/owl-bot-staging/v1/noxfile.py b/owl-bot-staging/v1/noxfile.py deleted file mode 100644 index 5c33374..0000000 --- a/owl-bot-staging/v1/noxfile.py +++ /dev/null @@ -1,132 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import os -import pathlib -import shutil -import subprocess -import sys - - -import nox # type: ignore - -CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() - -LOWER_BOUND_CONSTRAINTS_FILE = CURRENT_DIRECTORY / "constraints.txt" -PACKAGE_NAME = subprocess.check_output([sys.executable, "setup.py", "--name"], encoding="utf-8") - - -nox.sessions = [ - "unit", - "cover", - "mypy", - "check_lower_bounds" - # exclude update_lower_bounds from default - "docs", -] - -@nox.session(python=['3.6', '3.7', '3.8', '3.9']) -def unit(session): - """Run the unit test suite.""" - - session.install('coverage', 'pytest', 'pytest-cov', 'asyncmock', 'pytest-asyncio') - session.install('-e', '.') - - session.run( - 'py.test', - '--quiet', - '--cov=google/cloud/video/transcoder_v1/', - '--cov-config=.coveragerc', - '--cov-report=term', - '--cov-report=html', - os.path.join('tests', 'unit', ''.join(session.posargs)) - ) - - -@nox.session(python='3.7') -def cover(session): - """Run the final coverage report. - This outputs the coverage report aggregating coverage from the unit - test runs (not system test runs), and then erases coverage data. - """ - session.install("coverage", "pytest-cov") - session.run("coverage", "report", "--show-missing", "--fail-under=100") - - session.run("coverage", "erase") - - -@nox.session(python=['3.6', '3.7']) -def mypy(session): - """Run the type checker.""" - session.install('mypy', 'types-pkg_resources') - session.install('.') - session.run( - 'mypy', - '--explicit-package-bases', - 'google', - ) - - -@nox.session -def update_lower_bounds(session): - """Update lower bounds in constraints.txt to match setup.py""" - session.install('google-cloud-testutils') - session.install('.') - - session.run( - 'lower-bound-checker', - 'update', - '--package-name', - PACKAGE_NAME, - '--constraints-file', - str(LOWER_BOUND_CONSTRAINTS_FILE), - ) - - -@nox.session -def check_lower_bounds(session): - """Check lower bounds in setup.py are reflected in constraints file""" - session.install('google-cloud-testutils') - session.install('.') - - session.run( - 'lower-bound-checker', - 'check', - '--package-name', - PACKAGE_NAME, - '--constraints-file', - str(LOWER_BOUND_CONSTRAINTS_FILE), - ) - -@nox.session(python='3.6') -def docs(session): - """Build the docs for this library.""" - - session.install("-e", ".") - session.install("sphinx<3.0.0", "alabaster", "recommonmark") - - shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) - session.run( - "sphinx-build", - "-W", # warnings as errors - "-T", # show full traceback on exception - "-N", # no colors - "-b", - "html", - "-d", - os.path.join("docs", "_build", "doctrees", ""), - os.path.join("docs", ""), - os.path.join("docs", "_build", "html", ""), - ) diff --git a/owl-bot-staging/v1/scripts/fixup_transcoder_v1_keywords.py b/owl-bot-staging/v1/scripts/fixup_transcoder_v1_keywords.py deleted file mode 100644 index 912892a..0000000 --- a/owl-bot-staging/v1/scripts/fixup_transcoder_v1_keywords.py +++ /dev/null @@ -1,183 +0,0 @@ -#! /usr/bin/env python3 -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import argparse -import os -import libcst as cst -import pathlib -import sys -from typing import (Any, Callable, Dict, List, Sequence, Tuple) - - -def partition( - predicate: Callable[[Any], bool], - iterator: Sequence[Any] -) -> Tuple[List[Any], List[Any]]: - """A stable, out-of-place partition.""" - results = ([], []) - - for i in iterator: - results[int(predicate(i))].append(i) - - # Returns trueList, falseList - return results[1], results[0] - - -class transcoderCallTransformer(cst.CSTTransformer): - CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') - METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { - 'create_job': ('parent', 'job', ), - 'create_job_template': ('parent', 'job_template', 'job_template_id', ), - 'delete_job': ('name', ), - 'delete_job_template': ('name', ), - 'get_job': ('name', ), - 'get_job_template': ('name', ), - 'list_jobs': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), - 'list_job_templates': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), - } - - def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: - try: - key = original.func.attr.value - kword_params = self.METHOD_TO_PARAMS[key] - except (AttributeError, KeyError): - # Either not a method from the API or too convoluted to be sure. - return updated - - # If the existing code is valid, keyword args come after positional args. - # Therefore, all positional args must map to the first parameters. - args, kwargs = partition(lambda a: not bool(a.keyword), updated.args) - if any(k.keyword.value == "request" for k in kwargs): - # We've already fixed this file, don't fix it again. - return updated - - kwargs, ctrl_kwargs = partition( - lambda a: not a.keyword.value in self.CTRL_PARAMS, - kwargs - ) - - args, ctrl_args = args[:len(kword_params)], args[len(kword_params):] - ctrl_kwargs.extend(cst.Arg(value=a.value, keyword=cst.Name(value=ctrl)) - for a, ctrl in zip(ctrl_args, self.CTRL_PARAMS)) - - request_arg = cst.Arg( - value=cst.Dict([ - cst.DictElement( - cst.SimpleString("'{}'".format(name)), -cst.Element(value=arg.value) - ) - # Note: the args + kwargs looks silly, but keep in mind that - # the control parameters had to be stripped out, and that - # those could have been passed positionally or by keyword. - for name, arg in zip(kword_params, args + kwargs)]), - keyword=cst.Name("request") - ) - - return updated.with_changes( - args=[request_arg] + ctrl_kwargs - ) - - -def fix_files( - in_dir: pathlib.Path, - out_dir: pathlib.Path, - *, - transformer=transcoderCallTransformer(), -): - """Duplicate the input dir to the output dir, fixing file method calls. - - Preconditions: - * in_dir is a real directory - * out_dir is a real, empty directory - """ - pyfile_gen = ( - pathlib.Path(os.path.join(root, f)) - for root, _, files in os.walk(in_dir) - for f in files if os.path.splitext(f)[1] == ".py" - ) - - for fpath in pyfile_gen: - with open(fpath, 'r') as f: - src = f.read() - - # Parse the code and insert method call fixes. - tree = cst.parse_module(src) - updated = tree.visit(transformer) - - # Create the path and directory structure for the new file. - updated_path = out_dir.joinpath(fpath.relative_to(in_dir)) - updated_path.parent.mkdir(parents=True, exist_ok=True) - - # Generate the updated source file at the corresponding path. - with open(updated_path, 'w') as f: - f.write(updated.code) - - -if __name__ == '__main__': - parser = argparse.ArgumentParser( - description="""Fix up source that uses the transcoder client library. - -The existing sources are NOT overwritten but are copied to output_dir with changes made. - -Note: This tool operates at a best-effort level at converting positional - parameters in client method calls to keyword based parameters. - Cases where it WILL FAIL include - A) * or ** expansion in a method call. - B) Calls via function or method alias (includes free function calls) - C) Indirect or dispatched calls (e.g. the method is looked up dynamically) - - These all constitute false negatives. The tool will also detect false - positives when an API method shares a name with another method. -""") - parser.add_argument( - '-d', - '--input-directory', - required=True, - dest='input_dir', - help='the input directory to walk for python files to fix up', - ) - parser.add_argument( - '-o', - '--output-directory', - required=True, - dest='output_dir', - help='the directory to output files fixed via un-flattening', - ) - args = parser.parse_args() - input_dir = pathlib.Path(args.input_dir) - output_dir = pathlib.Path(args.output_dir) - if not input_dir.is_dir(): - print( - f"input directory '{input_dir}' does not exist or is not a directory", - file=sys.stderr, - ) - sys.exit(-1) - - if not output_dir.is_dir(): - print( - f"output directory '{output_dir}' does not exist or is not a directory", - file=sys.stderr, - ) - sys.exit(-1) - - if os.listdir(output_dir): - print( - f"output directory '{output_dir}' is not empty", - file=sys.stderr, - ) - sys.exit(-1) - - fix_files(input_dir, output_dir) diff --git a/owl-bot-staging/v1/setup.py b/owl-bot-staging/v1/setup.py deleted file mode 100644 index 8f17241..0000000 --- a/owl-bot-staging/v1/setup.py +++ /dev/null @@ -1,54 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import io -import os -import setuptools # type: ignore - -version = '0.1.0' - -package_root = os.path.abspath(os.path.dirname(__file__)) - -readme_filename = os.path.join(package_root, 'README.rst') -with io.open(readme_filename, encoding='utf-8') as readme_file: - readme = readme_file.read() - -setuptools.setup( - name='google-cloud-video-transcoder', - version=version, - long_description=readme, - packages=setuptools.PEP420PackageFinder.find(), - namespace_packages=('google', 'google.cloud', 'google.cloud.video'), - platforms='Posix; MacOS X; Windows', - include_package_data=True, - install_requires=( - 'google-api-core[grpc] >= 1.27.0, < 3.0.0dev', - 'libcst >= 0.2.5', - 'proto-plus >= 1.15.0', - 'packaging >= 14.3', ), - python_requires='>=3.6', - classifiers=[ - 'Development Status :: 3 - Alpha', - 'Intended Audience :: Developers', - 'Operating System :: OS Independent', - 'Programming Language :: Python :: 3.6', - 'Programming Language :: Python :: 3.7', - 'Programming Language :: Python :: 3.8', - 'Programming Language :: Python :: 3.9', - 'Topic :: Internet', - 'Topic :: Software Development :: Libraries :: Python Modules', - ], - zip_safe=False, -) diff --git a/owl-bot-staging/v1/tests/__init__.py b/owl-bot-staging/v1/tests/__init__.py deleted file mode 100644 index b54a5fc..0000000 --- a/owl-bot-staging/v1/tests/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ - -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/v1/tests/unit/__init__.py b/owl-bot-staging/v1/tests/unit/__init__.py deleted file mode 100644 index b54a5fc..0000000 --- a/owl-bot-staging/v1/tests/unit/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ - -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/v1/tests/unit/gapic/__init__.py b/owl-bot-staging/v1/tests/unit/gapic/__init__.py deleted file mode 100644 index b54a5fc..0000000 --- a/owl-bot-staging/v1/tests/unit/gapic/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ - -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/v1/tests/unit/gapic/transcoder_v1/__init__.py b/owl-bot-staging/v1/tests/unit/gapic/transcoder_v1/__init__.py deleted file mode 100644 index b54a5fc..0000000 --- a/owl-bot-staging/v1/tests/unit/gapic/transcoder_v1/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ - -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/v1/tests/unit/gapic/transcoder_v1/test_transcoder_service.py b/owl-bot-staging/v1/tests/unit/gapic/transcoder_v1/test_transcoder_service.py deleted file mode 100644 index 1e521e0..0000000 --- a/owl-bot-staging/v1/tests/unit/gapic/transcoder_v1/test_transcoder_service.py +++ /dev/null @@ -1,3237 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import os -import mock -import packaging.version - -import grpc -from grpc.experimental import aio -import math -import pytest -from proto.marshal.rules.dates import DurationRule, TimestampRule - - -from google.api_core import client_options -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import grpc_helpers -from google.api_core import grpc_helpers_async -from google.auth import credentials as ga_credentials -from google.auth.exceptions import MutualTLSChannelError -from google.cloud.video.transcoder_v1.services.transcoder_service import TranscoderServiceAsyncClient -from google.cloud.video.transcoder_v1.services.transcoder_service import TranscoderServiceClient -from google.cloud.video.transcoder_v1.services.transcoder_service import pagers -from google.cloud.video.transcoder_v1.services.transcoder_service import transports -from google.cloud.video.transcoder_v1.services.transcoder_service.transports.base import _GOOGLE_AUTH_VERSION -from google.cloud.video.transcoder_v1.types import resources -from google.cloud.video.transcoder_v1.types import services -from google.oauth2 import service_account -from google.protobuf import any_pb2 # type: ignore -from google.protobuf import duration_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore -from google.rpc import status_pb2 # type: ignore -import google.auth - - -# TODO(busunkim): Once google-auth >= 1.25.0 is required transitively -# through google-api-core: -# - Delete the auth "less than" test cases -# - Delete these pytest markers (Make the "greater than or equal to" tests the default). -requires_google_auth_lt_1_25_0 = pytest.mark.skipif( - packaging.version.parse(_GOOGLE_AUTH_VERSION) >= packaging.version.parse("1.25.0"), - reason="This test requires google-auth < 1.25.0", -) -requires_google_auth_gte_1_25_0 = pytest.mark.skipif( - packaging.version.parse(_GOOGLE_AUTH_VERSION) < packaging.version.parse("1.25.0"), - reason="This test requires google-auth >= 1.25.0", -) - -def client_cert_source_callback(): - return b"cert bytes", b"key bytes" - - -# If default endpoint is localhost, then default mtls endpoint will be the same. -# This method modifies the default endpoint so the client can produce a different -# mtls endpoint for endpoint testing purposes. -def modify_default_endpoint(client): - return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT - - -def test__get_default_mtls_endpoint(): - api_endpoint = "example.googleapis.com" - api_mtls_endpoint = "example.mtls.googleapis.com" - sandbox_endpoint = "example.sandbox.googleapis.com" - sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" - non_googleapi = "api.example.com" - - assert TranscoderServiceClient._get_default_mtls_endpoint(None) is None - assert TranscoderServiceClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint - assert TranscoderServiceClient._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint - assert TranscoderServiceClient._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint - assert TranscoderServiceClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint - assert TranscoderServiceClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi - - -@pytest.mark.parametrize("client_class", [ - TranscoderServiceClient, - TranscoderServiceAsyncClient, -]) -def test_transcoder_service_client_from_service_account_info(client_class): - creds = ga_credentials.AnonymousCredentials() - with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: - factory.return_value = creds - info = {"valid": True} - client = client_class.from_service_account_info(info) - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - assert client.transport._host == 'transcoder.googleapis.com:443' - - -@pytest.mark.parametrize("transport_class,transport_name", [ - (transports.TranscoderServiceGrpcTransport, "grpc"), - (transports.TranscoderServiceGrpcAsyncIOTransport, "grpc_asyncio"), -]) -def test_transcoder_service_client_service_account_always_use_jwt(transport_class, transport_name): - with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: - creds = service_account.Credentials(None, None, None) - transport = transport_class(credentials=creds, always_use_jwt_access=True) - use_jwt.assert_called_once_with(True) - - with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: - creds = service_account.Credentials(None, None, None) - transport = transport_class(credentials=creds, always_use_jwt_access=False) - use_jwt.assert_not_called() - - -@pytest.mark.parametrize("client_class", [ - TranscoderServiceClient, - TranscoderServiceAsyncClient, -]) -def test_transcoder_service_client_from_service_account_file(client_class): - creds = ga_credentials.AnonymousCredentials() - with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: - factory.return_value = creds - client = client_class.from_service_account_file("dummy/file/path.json") - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - client = client_class.from_service_account_json("dummy/file/path.json") - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - assert client.transport._host == 'transcoder.googleapis.com:443' - - -def test_transcoder_service_client_get_transport_class(): - transport = TranscoderServiceClient.get_transport_class() - available_transports = [ - transports.TranscoderServiceGrpcTransport, - ] - assert transport in available_transports - - transport = TranscoderServiceClient.get_transport_class("grpc") - assert transport == transports.TranscoderServiceGrpcTransport - - -@pytest.mark.parametrize("client_class,transport_class,transport_name", [ - (TranscoderServiceClient, transports.TranscoderServiceGrpcTransport, "grpc"), - (TranscoderServiceAsyncClient, transports.TranscoderServiceGrpcAsyncIOTransport, "grpc_asyncio"), -]) -@mock.patch.object(TranscoderServiceClient, "DEFAULT_ENDPOINT", modify_default_endpoint(TranscoderServiceClient)) -@mock.patch.object(TranscoderServiceAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(TranscoderServiceAsyncClient)) -def test_transcoder_service_client_client_options(client_class, transport_class, transport_name): - # Check that if channel is provided we won't create a new one. - with mock.patch.object(TranscoderServiceClient, 'get_transport_class') as gtc: - transport = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ) - client = client_class(transport=transport) - gtc.assert_not_called() - - # Check that if channel is provided via str we will create a new one. - with mock.patch.object(TranscoderServiceClient, 'get_transport_class') as gtc: - client = client_class(transport=transport_name) - gtc.assert_called() - - # Check the case api_endpoint is provided. - options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host="squid.clam.whelk", - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is - # "never". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class() - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is - # "always". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class() - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_MTLS_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has - # unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): - with pytest.raises(MutualTLSChannelError): - client = client_class() - - # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): - with pytest.raises(ValueError): - client = client_class() - - # Check the case quota_project_id is provided - options = client_options.ClientOptions(quota_project_id="octopus") - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id="octopus", - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - ) - -@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ - (TranscoderServiceClient, transports.TranscoderServiceGrpcTransport, "grpc", "true"), - (TranscoderServiceAsyncClient, transports.TranscoderServiceGrpcAsyncIOTransport, "grpc_asyncio", "true"), - (TranscoderServiceClient, transports.TranscoderServiceGrpcTransport, "grpc", "false"), - (TranscoderServiceAsyncClient, transports.TranscoderServiceGrpcAsyncIOTransport, "grpc_asyncio", "false"), -]) -@mock.patch.object(TranscoderServiceClient, "DEFAULT_ENDPOINT", modify_default_endpoint(TranscoderServiceClient)) -@mock.patch.object(TranscoderServiceAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(TranscoderServiceAsyncClient)) -@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) -def test_transcoder_service_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env): - # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default - # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. - - # Check the case client_cert_source is provided. Whether client cert is used depends on - # GOOGLE_API_USE_CLIENT_CERTIFICATE value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options) - - if use_client_cert_env == "false": - expected_client_cert_source = None - expected_host = client.DEFAULT_ENDPOINT - else: - expected_client_cert_source = client_cert_source_callback - expected_host = client.DEFAULT_MTLS_ENDPOINT - - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=expected_host, - scopes=None, - client_cert_source_for_mtls=expected_client_cert_source, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - ) - - # Check the case ADC client cert is provided. Whether client cert is used depends on - # GOOGLE_API_USE_CLIENT_CERTIFICATE value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - with mock.patch.object(transport_class, '__init__') as patched: - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback): - if use_client_cert_env == "false": - expected_host = client.DEFAULT_ENDPOINT - expected_client_cert_source = None - else: - expected_host = client.DEFAULT_MTLS_ENDPOINT - expected_client_cert_source = client_cert_source_callback - - patched.return_value = None - client = client_class() - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=expected_host, - scopes=None, - client_cert_source_for_mtls=expected_client_cert_source, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - ) - - # Check the case client_cert_source and ADC client cert are not provided. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - with mock.patch.object(transport_class, '__init__') as patched: - with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False): - patched.return_value = None - client = client_class() - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - ) - - -@pytest.mark.parametrize("client_class,transport_class,transport_name", [ - (TranscoderServiceClient, transports.TranscoderServiceGrpcTransport, "grpc"), - (TranscoderServiceAsyncClient, transports.TranscoderServiceGrpcAsyncIOTransport, "grpc_asyncio"), -]) -def test_transcoder_service_client_client_options_scopes(client_class, transport_class, transport_name): - # Check the case scopes are provided. - options = client_options.ClientOptions( - scopes=["1", "2"], - ) - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_ENDPOINT, - scopes=["1", "2"], - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - ) - -@pytest.mark.parametrize("client_class,transport_class,transport_name", [ - (TranscoderServiceClient, transports.TranscoderServiceGrpcTransport, "grpc"), - (TranscoderServiceAsyncClient, transports.TranscoderServiceGrpcAsyncIOTransport, "grpc_asyncio"), -]) -def test_transcoder_service_client_client_options_credentials_file(client_class, transport_class, transport_name): - # Check the case credentials file is provided. - options = client_options.ClientOptions( - credentials_file="credentials.json" - ) - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options) - patched.assert_called_once_with( - credentials=None, - credentials_file="credentials.json", - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - ) - - -def test_transcoder_service_client_client_options_from_dict(): - with mock.patch('google.cloud.video.transcoder_v1.services.transcoder_service.transports.TranscoderServiceGrpcTransport.__init__') as grpc_transport: - grpc_transport.return_value = None - client = TranscoderServiceClient( - client_options={'api_endpoint': 'squid.clam.whelk'} - ) - grpc_transport.assert_called_once_with( - credentials=None, - credentials_file=None, - host="squid.clam.whelk", - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - ) - - -def test_create_job(transport: str = 'grpc', request_type=services.CreateJobRequest): - client = TranscoderServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_job), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = resources.Job( - name='name_value', - input_uri='input_uri_value', - output_uri='output_uri_value', - state=resources.Job.ProcessingState.PENDING, - ttl_after_completion_days=2670, - template_id='template_id_value', - ) - response = client.create_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == services.CreateJobRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, resources.Job) - assert response.name == 'name_value' - assert response.input_uri == 'input_uri_value' - assert response.output_uri == 'output_uri_value' - assert response.state == resources.Job.ProcessingState.PENDING - assert response.ttl_after_completion_days == 2670 - - -def test_create_job_from_dict(): - test_create_job(request_type=dict) - - -def test_create_job_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = TranscoderServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_job), - '__call__') as call: - client.create_job() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == services.CreateJobRequest() - - -@pytest.mark.asyncio -async def test_create_job_async(transport: str = 'grpc_asyncio', request_type=services.CreateJobRequest): - client = TranscoderServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_job), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(resources.Job( - name='name_value', - input_uri='input_uri_value', - output_uri='output_uri_value', - state=resources.Job.ProcessingState.PENDING, - ttl_after_completion_days=2670, - )) - response = await client.create_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == services.CreateJobRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, resources.Job) - assert response.name == 'name_value' - assert response.input_uri == 'input_uri_value' - assert response.output_uri == 'output_uri_value' - assert response.state == resources.Job.ProcessingState.PENDING - assert response.ttl_after_completion_days == 2670 - - -@pytest.mark.asyncio -async def test_create_job_async_from_dict(): - await test_create_job_async(request_type=dict) - - -def test_create_job_field_headers(): - client = TranscoderServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = services.CreateJobRequest() - - request.parent = 'parent/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_job), - '__call__') as call: - call.return_value = resources.Job() - client.create_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent/value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_create_job_field_headers_async(): - client = TranscoderServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = services.CreateJobRequest() - - request.parent = 'parent/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_job), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.Job()) - await client.create_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent/value', - ) in kw['metadata'] - - -def test_create_job_flattened(): - client = TranscoderServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_job), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = resources.Job() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.create_job( - parent='parent_value', - job=resources.Job(name='name_value'), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0].parent == 'parent_value' - assert args[0].job == resources.Job(name='name_value') - - -def test_create_job_flattened_error(): - client = TranscoderServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_job( - services.CreateJobRequest(), - parent='parent_value', - job=resources.Job(name='name_value'), - ) - - -@pytest.mark.asyncio -async def test_create_job_flattened_async(): - client = TranscoderServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_job), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = resources.Job() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.Job()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.create_job( - parent='parent_value', - job=resources.Job(name='name_value'), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0].parent == 'parent_value' - assert args[0].job == resources.Job(name='name_value') - - -@pytest.mark.asyncio -async def test_create_job_flattened_error_async(): - client = TranscoderServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.create_job( - services.CreateJobRequest(), - parent='parent_value', - job=resources.Job(name='name_value'), - ) - - -def test_list_jobs(transport: str = 'grpc', request_type=services.ListJobsRequest): - client = TranscoderServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_jobs), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = services.ListJobsResponse( - next_page_token='next_page_token_value', - unreachable=['unreachable_value'], - ) - response = client.list_jobs(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == services.ListJobsRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListJobsPager) - assert response.next_page_token == 'next_page_token_value' - assert response.unreachable == ['unreachable_value'] - - -def test_list_jobs_from_dict(): - test_list_jobs(request_type=dict) - - -def test_list_jobs_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = TranscoderServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_jobs), - '__call__') as call: - client.list_jobs() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == services.ListJobsRequest() - - -@pytest.mark.asyncio -async def test_list_jobs_async(transport: str = 'grpc_asyncio', request_type=services.ListJobsRequest): - client = TranscoderServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_jobs), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(services.ListJobsResponse( - next_page_token='next_page_token_value', - unreachable=['unreachable_value'], - )) - response = await client.list_jobs(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == services.ListJobsRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListJobsAsyncPager) - assert response.next_page_token == 'next_page_token_value' - assert response.unreachable == ['unreachable_value'] - - -@pytest.mark.asyncio -async def test_list_jobs_async_from_dict(): - await test_list_jobs_async(request_type=dict) - - -def test_list_jobs_field_headers(): - client = TranscoderServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = services.ListJobsRequest() - - request.parent = 'parent/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_jobs), - '__call__') as call: - call.return_value = services.ListJobsResponse() - client.list_jobs(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent/value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_list_jobs_field_headers_async(): - client = TranscoderServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = services.ListJobsRequest() - - request.parent = 'parent/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_jobs), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(services.ListJobsResponse()) - await client.list_jobs(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent/value', - ) in kw['metadata'] - - -def test_list_jobs_flattened(): - client = TranscoderServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_jobs), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = services.ListJobsResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list_jobs( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0].parent == 'parent_value' - - -def test_list_jobs_flattened_error(): - client = TranscoderServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_jobs( - services.ListJobsRequest(), - parent='parent_value', - ) - - -@pytest.mark.asyncio -async def test_list_jobs_flattened_async(): - client = TranscoderServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_jobs), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = services.ListJobsResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(services.ListJobsResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.list_jobs( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0].parent == 'parent_value' - - -@pytest.mark.asyncio -async def test_list_jobs_flattened_error_async(): - client = TranscoderServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.list_jobs( - services.ListJobsRequest(), - parent='parent_value', - ) - - -def test_list_jobs_pager(): - client = TranscoderServiceClient( - credentials=ga_credentials.AnonymousCredentials, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_jobs), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - services.ListJobsResponse( - jobs=[ - resources.Job(), - resources.Job(), - resources.Job(), - ], - next_page_token='abc', - ), - services.ListJobsResponse( - jobs=[], - next_page_token='def', - ), - services.ListJobsResponse( - jobs=[ - resources.Job(), - ], - next_page_token='ghi', - ), - services.ListJobsResponse( - jobs=[ - resources.Job(), - resources.Job(), - ], - ), - RuntimeError, - ) - - metadata = () - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ('parent', ''), - )), - ) - pager = client.list_jobs(request={}) - - assert pager._metadata == metadata - - results = [i for i in pager] - assert len(results) == 6 - assert all(isinstance(i, resources.Job) - for i in results) - -def test_list_jobs_pages(): - client = TranscoderServiceClient( - credentials=ga_credentials.AnonymousCredentials, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_jobs), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - services.ListJobsResponse( - jobs=[ - resources.Job(), - resources.Job(), - resources.Job(), - ], - next_page_token='abc', - ), - services.ListJobsResponse( - jobs=[], - next_page_token='def', - ), - services.ListJobsResponse( - jobs=[ - resources.Job(), - ], - next_page_token='ghi', - ), - services.ListJobsResponse( - jobs=[ - resources.Job(), - resources.Job(), - ], - ), - RuntimeError, - ) - pages = list(client.list_jobs(request={}).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.asyncio -async def test_list_jobs_async_pager(): - client = TranscoderServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_jobs), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - services.ListJobsResponse( - jobs=[ - resources.Job(), - resources.Job(), - resources.Job(), - ], - next_page_token='abc', - ), - services.ListJobsResponse( - jobs=[], - next_page_token='def', - ), - services.ListJobsResponse( - jobs=[ - resources.Job(), - ], - next_page_token='ghi', - ), - services.ListJobsResponse( - jobs=[ - resources.Job(), - resources.Job(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_jobs(request={},) - assert async_pager.next_page_token == 'abc' - responses = [] - async for response in async_pager: - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, resources.Job) - for i in responses) - -@pytest.mark.asyncio -async def test_list_jobs_async_pages(): - client = TranscoderServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_jobs), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - services.ListJobsResponse( - jobs=[ - resources.Job(), - resources.Job(), - resources.Job(), - ], - next_page_token='abc', - ), - services.ListJobsResponse( - jobs=[], - next_page_token='def', - ), - services.ListJobsResponse( - jobs=[ - resources.Job(), - ], - next_page_token='ghi', - ), - services.ListJobsResponse( - jobs=[ - resources.Job(), - resources.Job(), - ], - ), - RuntimeError, - ) - pages = [] - async for page_ in (await client.list_jobs(request={})).pages: - pages.append(page_) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -def test_get_job(transport: str = 'grpc', request_type=services.GetJobRequest): - client = TranscoderServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_job), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = resources.Job( - name='name_value', - input_uri='input_uri_value', - output_uri='output_uri_value', - state=resources.Job.ProcessingState.PENDING, - ttl_after_completion_days=2670, - template_id='template_id_value', - ) - response = client.get_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == services.GetJobRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, resources.Job) - assert response.name == 'name_value' - assert response.input_uri == 'input_uri_value' - assert response.output_uri == 'output_uri_value' - assert response.state == resources.Job.ProcessingState.PENDING - assert response.ttl_after_completion_days == 2670 - - -def test_get_job_from_dict(): - test_get_job(request_type=dict) - - -def test_get_job_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = TranscoderServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_job), - '__call__') as call: - client.get_job() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == services.GetJobRequest() - - -@pytest.mark.asyncio -async def test_get_job_async(transport: str = 'grpc_asyncio', request_type=services.GetJobRequest): - client = TranscoderServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_job), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(resources.Job( - name='name_value', - input_uri='input_uri_value', - output_uri='output_uri_value', - state=resources.Job.ProcessingState.PENDING, - ttl_after_completion_days=2670, - )) - response = await client.get_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == services.GetJobRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, resources.Job) - assert response.name == 'name_value' - assert response.input_uri == 'input_uri_value' - assert response.output_uri == 'output_uri_value' - assert response.state == resources.Job.ProcessingState.PENDING - assert response.ttl_after_completion_days == 2670 - - -@pytest.mark.asyncio -async def test_get_job_async_from_dict(): - await test_get_job_async(request_type=dict) - - -def test_get_job_field_headers(): - client = TranscoderServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = services.GetJobRequest() - - request.name = 'name/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_job), - '__call__') as call: - call.return_value = resources.Job() - client.get_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name/value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_get_job_field_headers_async(): - client = TranscoderServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = services.GetJobRequest() - - request.name = 'name/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_job), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.Job()) - await client.get_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name/value', - ) in kw['metadata'] - - -def test_get_job_flattened(): - client = TranscoderServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_job), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = resources.Job() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get_job( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0].name == 'name_value' - - -def test_get_job_flattened_error(): - client = TranscoderServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_job( - services.GetJobRequest(), - name='name_value', - ) - - -@pytest.mark.asyncio -async def test_get_job_flattened_async(): - client = TranscoderServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_job), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = resources.Job() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.Job()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.get_job( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0].name == 'name_value' - - -@pytest.mark.asyncio -async def test_get_job_flattened_error_async(): - client = TranscoderServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.get_job( - services.GetJobRequest(), - name='name_value', - ) - - -def test_delete_job(transport: str = 'grpc', request_type=services.DeleteJobRequest): - client = TranscoderServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_job), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.delete_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == services.DeleteJobRequest() - - # Establish that the response is the type that we expect. - assert response is None - - -def test_delete_job_from_dict(): - test_delete_job(request_type=dict) - - -def test_delete_job_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = TranscoderServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_job), - '__call__') as call: - client.delete_job() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == services.DeleteJobRequest() - - -@pytest.mark.asyncio -async def test_delete_job_async(transport: str = 'grpc_asyncio', request_type=services.DeleteJobRequest): - client = TranscoderServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_job), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == services.DeleteJobRequest() - - # Establish that the response is the type that we expect. - assert response is None - - -@pytest.mark.asyncio -async def test_delete_job_async_from_dict(): - await test_delete_job_async(request_type=dict) - - -def test_delete_job_field_headers(): - client = TranscoderServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = services.DeleteJobRequest() - - request.name = 'name/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_job), - '__call__') as call: - call.return_value = None - client.delete_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name/value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_delete_job_field_headers_async(): - client = TranscoderServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = services.DeleteJobRequest() - - request.name = 'name/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_job), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name/value', - ) in kw['metadata'] - - -def test_delete_job_flattened(): - client = TranscoderServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_job), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.delete_job( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0].name == 'name_value' - - -def test_delete_job_flattened_error(): - client = TranscoderServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_job( - services.DeleteJobRequest(), - name='name_value', - ) - - -@pytest.mark.asyncio -async def test_delete_job_flattened_async(): - client = TranscoderServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_job), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.delete_job( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0].name == 'name_value' - - -@pytest.mark.asyncio -async def test_delete_job_flattened_error_async(): - client = TranscoderServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.delete_job( - services.DeleteJobRequest(), - name='name_value', - ) - - -def test_create_job_template(transport: str = 'grpc', request_type=services.CreateJobTemplateRequest): - client = TranscoderServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_job_template), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = resources.JobTemplate( - name='name_value', - ) - response = client.create_job_template(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == services.CreateJobTemplateRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, resources.JobTemplate) - assert response.name == 'name_value' - - -def test_create_job_template_from_dict(): - test_create_job_template(request_type=dict) - - -def test_create_job_template_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = TranscoderServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_job_template), - '__call__') as call: - client.create_job_template() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == services.CreateJobTemplateRequest() - - -@pytest.mark.asyncio -async def test_create_job_template_async(transport: str = 'grpc_asyncio', request_type=services.CreateJobTemplateRequest): - client = TranscoderServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_job_template), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(resources.JobTemplate( - name='name_value', - )) - response = await client.create_job_template(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == services.CreateJobTemplateRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, resources.JobTemplate) - assert response.name == 'name_value' - - -@pytest.mark.asyncio -async def test_create_job_template_async_from_dict(): - await test_create_job_template_async(request_type=dict) - - -def test_create_job_template_field_headers(): - client = TranscoderServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = services.CreateJobTemplateRequest() - - request.parent = 'parent/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_job_template), - '__call__') as call: - call.return_value = resources.JobTemplate() - client.create_job_template(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent/value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_create_job_template_field_headers_async(): - client = TranscoderServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = services.CreateJobTemplateRequest() - - request.parent = 'parent/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_job_template), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.JobTemplate()) - await client.create_job_template(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent/value', - ) in kw['metadata'] - - -def test_create_job_template_flattened(): - client = TranscoderServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_job_template), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = resources.JobTemplate() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.create_job_template( - parent='parent_value', - job_template=resources.JobTemplate(name='name_value'), - job_template_id='job_template_id_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0].parent == 'parent_value' - assert args[0].job_template == resources.JobTemplate(name='name_value') - assert args[0].job_template_id == 'job_template_id_value' - - -def test_create_job_template_flattened_error(): - client = TranscoderServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_job_template( - services.CreateJobTemplateRequest(), - parent='parent_value', - job_template=resources.JobTemplate(name='name_value'), - job_template_id='job_template_id_value', - ) - - -@pytest.mark.asyncio -async def test_create_job_template_flattened_async(): - client = TranscoderServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_job_template), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = resources.JobTemplate() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.JobTemplate()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.create_job_template( - parent='parent_value', - job_template=resources.JobTemplate(name='name_value'), - job_template_id='job_template_id_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0].parent == 'parent_value' - assert args[0].job_template == resources.JobTemplate(name='name_value') - assert args[0].job_template_id == 'job_template_id_value' - - -@pytest.mark.asyncio -async def test_create_job_template_flattened_error_async(): - client = TranscoderServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.create_job_template( - services.CreateJobTemplateRequest(), - parent='parent_value', - job_template=resources.JobTemplate(name='name_value'), - job_template_id='job_template_id_value', - ) - - -def test_list_job_templates(transport: str = 'grpc', request_type=services.ListJobTemplatesRequest): - client = TranscoderServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_job_templates), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = services.ListJobTemplatesResponse( - next_page_token='next_page_token_value', - unreachable=['unreachable_value'], - ) - response = client.list_job_templates(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == services.ListJobTemplatesRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListJobTemplatesPager) - assert response.next_page_token == 'next_page_token_value' - assert response.unreachable == ['unreachable_value'] - - -def test_list_job_templates_from_dict(): - test_list_job_templates(request_type=dict) - - -def test_list_job_templates_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = TranscoderServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_job_templates), - '__call__') as call: - client.list_job_templates() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == services.ListJobTemplatesRequest() - - -@pytest.mark.asyncio -async def test_list_job_templates_async(transport: str = 'grpc_asyncio', request_type=services.ListJobTemplatesRequest): - client = TranscoderServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_job_templates), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(services.ListJobTemplatesResponse( - next_page_token='next_page_token_value', - unreachable=['unreachable_value'], - )) - response = await client.list_job_templates(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == services.ListJobTemplatesRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListJobTemplatesAsyncPager) - assert response.next_page_token == 'next_page_token_value' - assert response.unreachable == ['unreachable_value'] - - -@pytest.mark.asyncio -async def test_list_job_templates_async_from_dict(): - await test_list_job_templates_async(request_type=dict) - - -def test_list_job_templates_field_headers(): - client = TranscoderServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = services.ListJobTemplatesRequest() - - request.parent = 'parent/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_job_templates), - '__call__') as call: - call.return_value = services.ListJobTemplatesResponse() - client.list_job_templates(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent/value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_list_job_templates_field_headers_async(): - client = TranscoderServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = services.ListJobTemplatesRequest() - - request.parent = 'parent/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_job_templates), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(services.ListJobTemplatesResponse()) - await client.list_job_templates(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent/value', - ) in kw['metadata'] - - -def test_list_job_templates_flattened(): - client = TranscoderServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_job_templates), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = services.ListJobTemplatesResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list_job_templates( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0].parent == 'parent_value' - - -def test_list_job_templates_flattened_error(): - client = TranscoderServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_job_templates( - services.ListJobTemplatesRequest(), - parent='parent_value', - ) - - -@pytest.mark.asyncio -async def test_list_job_templates_flattened_async(): - client = TranscoderServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_job_templates), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = services.ListJobTemplatesResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(services.ListJobTemplatesResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.list_job_templates( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0].parent == 'parent_value' - - -@pytest.mark.asyncio -async def test_list_job_templates_flattened_error_async(): - client = TranscoderServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.list_job_templates( - services.ListJobTemplatesRequest(), - parent='parent_value', - ) - - -def test_list_job_templates_pager(): - client = TranscoderServiceClient( - credentials=ga_credentials.AnonymousCredentials, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_job_templates), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - services.ListJobTemplatesResponse( - job_templates=[ - resources.JobTemplate(), - resources.JobTemplate(), - resources.JobTemplate(), - ], - next_page_token='abc', - ), - services.ListJobTemplatesResponse( - job_templates=[], - next_page_token='def', - ), - services.ListJobTemplatesResponse( - job_templates=[ - resources.JobTemplate(), - ], - next_page_token='ghi', - ), - services.ListJobTemplatesResponse( - job_templates=[ - resources.JobTemplate(), - resources.JobTemplate(), - ], - ), - RuntimeError, - ) - - metadata = () - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ('parent', ''), - )), - ) - pager = client.list_job_templates(request={}) - - assert pager._metadata == metadata - - results = [i for i in pager] - assert len(results) == 6 - assert all(isinstance(i, resources.JobTemplate) - for i in results) - -def test_list_job_templates_pages(): - client = TranscoderServiceClient( - credentials=ga_credentials.AnonymousCredentials, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_job_templates), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - services.ListJobTemplatesResponse( - job_templates=[ - resources.JobTemplate(), - resources.JobTemplate(), - resources.JobTemplate(), - ], - next_page_token='abc', - ), - services.ListJobTemplatesResponse( - job_templates=[], - next_page_token='def', - ), - services.ListJobTemplatesResponse( - job_templates=[ - resources.JobTemplate(), - ], - next_page_token='ghi', - ), - services.ListJobTemplatesResponse( - job_templates=[ - resources.JobTemplate(), - resources.JobTemplate(), - ], - ), - RuntimeError, - ) - pages = list(client.list_job_templates(request={}).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.asyncio -async def test_list_job_templates_async_pager(): - client = TranscoderServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_job_templates), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - services.ListJobTemplatesResponse( - job_templates=[ - resources.JobTemplate(), - resources.JobTemplate(), - resources.JobTemplate(), - ], - next_page_token='abc', - ), - services.ListJobTemplatesResponse( - job_templates=[], - next_page_token='def', - ), - services.ListJobTemplatesResponse( - job_templates=[ - resources.JobTemplate(), - ], - next_page_token='ghi', - ), - services.ListJobTemplatesResponse( - job_templates=[ - resources.JobTemplate(), - resources.JobTemplate(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_job_templates(request={},) - assert async_pager.next_page_token == 'abc' - responses = [] - async for response in async_pager: - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, resources.JobTemplate) - for i in responses) - -@pytest.mark.asyncio -async def test_list_job_templates_async_pages(): - client = TranscoderServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_job_templates), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - services.ListJobTemplatesResponse( - job_templates=[ - resources.JobTemplate(), - resources.JobTemplate(), - resources.JobTemplate(), - ], - next_page_token='abc', - ), - services.ListJobTemplatesResponse( - job_templates=[], - next_page_token='def', - ), - services.ListJobTemplatesResponse( - job_templates=[ - resources.JobTemplate(), - ], - next_page_token='ghi', - ), - services.ListJobTemplatesResponse( - job_templates=[ - resources.JobTemplate(), - resources.JobTemplate(), - ], - ), - RuntimeError, - ) - pages = [] - async for page_ in (await client.list_job_templates(request={})).pages: - pages.append(page_) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -def test_get_job_template(transport: str = 'grpc', request_type=services.GetJobTemplateRequest): - client = TranscoderServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_job_template), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = resources.JobTemplate( - name='name_value', - ) - response = client.get_job_template(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == services.GetJobTemplateRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, resources.JobTemplate) - assert response.name == 'name_value' - - -def test_get_job_template_from_dict(): - test_get_job_template(request_type=dict) - - -def test_get_job_template_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = TranscoderServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_job_template), - '__call__') as call: - client.get_job_template() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == services.GetJobTemplateRequest() - - -@pytest.mark.asyncio -async def test_get_job_template_async(transport: str = 'grpc_asyncio', request_type=services.GetJobTemplateRequest): - client = TranscoderServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_job_template), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(resources.JobTemplate( - name='name_value', - )) - response = await client.get_job_template(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == services.GetJobTemplateRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, resources.JobTemplate) - assert response.name == 'name_value' - - -@pytest.mark.asyncio -async def test_get_job_template_async_from_dict(): - await test_get_job_template_async(request_type=dict) - - -def test_get_job_template_field_headers(): - client = TranscoderServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = services.GetJobTemplateRequest() - - request.name = 'name/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_job_template), - '__call__') as call: - call.return_value = resources.JobTemplate() - client.get_job_template(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name/value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_get_job_template_field_headers_async(): - client = TranscoderServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = services.GetJobTemplateRequest() - - request.name = 'name/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_job_template), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.JobTemplate()) - await client.get_job_template(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name/value', - ) in kw['metadata'] - - -def test_get_job_template_flattened(): - client = TranscoderServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_job_template), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = resources.JobTemplate() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get_job_template( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0].name == 'name_value' - - -def test_get_job_template_flattened_error(): - client = TranscoderServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_job_template( - services.GetJobTemplateRequest(), - name='name_value', - ) - - -@pytest.mark.asyncio -async def test_get_job_template_flattened_async(): - client = TranscoderServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_job_template), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = resources.JobTemplate() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.JobTemplate()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.get_job_template( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0].name == 'name_value' - - -@pytest.mark.asyncio -async def test_get_job_template_flattened_error_async(): - client = TranscoderServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.get_job_template( - services.GetJobTemplateRequest(), - name='name_value', - ) - - -def test_delete_job_template(transport: str = 'grpc', request_type=services.DeleteJobTemplateRequest): - client = TranscoderServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_job_template), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.delete_job_template(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == services.DeleteJobTemplateRequest() - - # Establish that the response is the type that we expect. - assert response is None - - -def test_delete_job_template_from_dict(): - test_delete_job_template(request_type=dict) - - -def test_delete_job_template_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = TranscoderServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_job_template), - '__call__') as call: - client.delete_job_template() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == services.DeleteJobTemplateRequest() - - -@pytest.mark.asyncio -async def test_delete_job_template_async(transport: str = 'grpc_asyncio', request_type=services.DeleteJobTemplateRequest): - client = TranscoderServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_job_template), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_job_template(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == services.DeleteJobTemplateRequest() - - # Establish that the response is the type that we expect. - assert response is None - - -@pytest.mark.asyncio -async def test_delete_job_template_async_from_dict(): - await test_delete_job_template_async(request_type=dict) - - -def test_delete_job_template_field_headers(): - client = TranscoderServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = services.DeleteJobTemplateRequest() - - request.name = 'name/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_job_template), - '__call__') as call: - call.return_value = None - client.delete_job_template(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name/value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_delete_job_template_field_headers_async(): - client = TranscoderServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = services.DeleteJobTemplateRequest() - - request.name = 'name/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_job_template), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_job_template(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name/value', - ) in kw['metadata'] - - -def test_delete_job_template_flattened(): - client = TranscoderServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_job_template), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.delete_job_template( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0].name == 'name_value' - - -def test_delete_job_template_flattened_error(): - client = TranscoderServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_job_template( - services.DeleteJobTemplateRequest(), - name='name_value', - ) - - -@pytest.mark.asyncio -async def test_delete_job_template_flattened_async(): - client = TranscoderServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_job_template), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.delete_job_template( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0].name == 'name_value' - - -@pytest.mark.asyncio -async def test_delete_job_template_flattened_error_async(): - client = TranscoderServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.delete_job_template( - services.DeleteJobTemplateRequest(), - name='name_value', - ) - - -def test_credentials_transport_error(): - # It is an error to provide credentials and a transport instance. - transport = transports.TranscoderServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = TranscoderServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # It is an error to provide a credentials file and a transport instance. - transport = transports.TranscoderServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = TranscoderServiceClient( - client_options={"credentials_file": "credentials.json"}, - transport=transport, - ) - - # It is an error to provide scopes and a transport instance. - transport = transports.TranscoderServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = TranscoderServiceClient( - client_options={"scopes": ["1", "2"]}, - transport=transport, - ) - - -def test_transport_instance(): - # A client may be instantiated with a custom transport instance. - transport = transports.TranscoderServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - client = TranscoderServiceClient(transport=transport) - assert client.transport is transport - -def test_transport_get_channel(): - # A client may be instantiated with a custom transport instance. - transport = transports.TranscoderServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel - - transport = transports.TranscoderServiceGrpcAsyncIOTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel - -@pytest.mark.parametrize("transport_class", [ - transports.TranscoderServiceGrpcTransport, - transports.TranscoderServiceGrpcAsyncIOTransport, -]) -def test_transport_adc(transport_class): - # Test default credentials are used if not provided. - with mock.patch.object(google.auth, 'default') as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class() - adc.assert_called_once() - -def test_transport_grpc_default(): - # A client should use the gRPC transport by default. - client = TranscoderServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - assert isinstance( - client.transport, - transports.TranscoderServiceGrpcTransport, - ) - -def test_transcoder_service_base_transport_error(): - # Passing both a credentials object and credentials_file should raise an error - with pytest.raises(core_exceptions.DuplicateCredentialArgs): - transport = transports.TranscoderServiceTransport( - credentials=ga_credentials.AnonymousCredentials(), - credentials_file="credentials.json" - ) - - -def test_transcoder_service_base_transport(): - # Instantiate the base transport. - with mock.patch('google.cloud.video.transcoder_v1.services.transcoder_service.transports.TranscoderServiceTransport.__init__') as Transport: - Transport.return_value = None - transport = transports.TranscoderServiceTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Every method on the transport should just blindly - # raise NotImplementedError. - methods = ( - 'create_job', - 'list_jobs', - 'get_job', - 'delete_job', - 'create_job_template', - 'list_job_templates', - 'get_job_template', - 'delete_job_template', - ) - for method in methods: - with pytest.raises(NotImplementedError): - getattr(transport, method)(request=object()) - - -@requires_google_auth_gte_1_25_0 -def test_transcoder_service_base_transport_with_credentials_file(): - # Instantiate the base transport with a credentials file - with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.video.transcoder_v1.services.transcoder_service.transports.TranscoderServiceTransport._prep_wrapped_messages') as Transport: - Transport.return_value = None - load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.TranscoderServiceTransport( - credentials_file="credentials.json", - quota_project_id="octopus", - ) - load_creds.assert_called_once_with("credentials.json", - scopes=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', -), - quota_project_id="octopus", - ) - - -@requires_google_auth_lt_1_25_0 -def test_transcoder_service_base_transport_with_credentials_file_old_google_auth(): - # Instantiate the base transport with a credentials file - with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.video.transcoder_v1.services.transcoder_service.transports.TranscoderServiceTransport._prep_wrapped_messages') as Transport: - Transport.return_value = None - load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.TranscoderServiceTransport( - credentials_file="credentials.json", - quota_project_id="octopus", - ) - load_creds.assert_called_once_with("credentials.json", scopes=( - 'https://www.googleapis.com/auth/cloud-platform', - ), - quota_project_id="octopus", - ) - - -def test_transcoder_service_base_transport_with_adc(): - # Test the default credentials are used if credentials and credentials_file are None. - with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.video.transcoder_v1.services.transcoder_service.transports.TranscoderServiceTransport._prep_wrapped_messages') as Transport: - Transport.return_value = None - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.TranscoderServiceTransport() - adc.assert_called_once() - - -@requires_google_auth_gte_1_25_0 -def test_transcoder_service_auth_adc(): - # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - TranscoderServiceClient() - adc.assert_called_once_with( - scopes=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', -), - quota_project_id=None, - ) - - -@requires_google_auth_lt_1_25_0 -def test_transcoder_service_auth_adc_old_google_auth(): - # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - TranscoderServiceClient() - adc.assert_called_once_with( - scopes=( 'https://www.googleapis.com/auth/cloud-platform',), - quota_project_id=None, - ) - - -@pytest.mark.parametrize( - "transport_class", - [ - transports.TranscoderServiceGrpcTransport, - transports.TranscoderServiceGrpcAsyncIOTransport, - ], -) -@requires_google_auth_gte_1_25_0 -def test_transcoder_service_transport_auth_adc(transport_class): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class(quota_project_id="octopus", scopes=["1", "2"]) - adc.assert_called_once_with( - scopes=["1", "2"], - default_scopes=( 'https://www.googleapis.com/auth/cloud-platform',), - quota_project_id="octopus", - ) - - -@pytest.mark.parametrize( - "transport_class", - [ - transports.TranscoderServiceGrpcTransport, - transports.TranscoderServiceGrpcAsyncIOTransport, - ], -) -@requires_google_auth_lt_1_25_0 -def test_transcoder_service_transport_auth_adc_old_google_auth(transport_class): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class(quota_project_id="octopus") - adc.assert_called_once_with(scopes=( - 'https://www.googleapis.com/auth/cloud-platform', -), - quota_project_id="octopus", - ) - - -@pytest.mark.parametrize( - "transport_class,grpc_helpers", - [ - (transports.TranscoderServiceGrpcTransport, grpc_helpers), - (transports.TranscoderServiceGrpcAsyncIOTransport, grpc_helpers_async) - ], -) -def test_transcoder_service_transport_create_channel(transport_class, grpc_helpers): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch.object( - grpc_helpers, "create_channel", autospec=True - ) as create_channel: - creds = ga_credentials.AnonymousCredentials() - adc.return_value = (creds, None) - transport_class( - quota_project_id="octopus", - scopes=["1", "2"] - ) - - create_channel.assert_called_with( - "transcoder.googleapis.com:443", - credentials=creds, - credentials_file=None, - quota_project_id="octopus", - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', -), - scopes=["1", "2"], - default_host="transcoder.googleapis.com", - ssl_credentials=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - -@pytest.mark.parametrize("transport_class", [transports.TranscoderServiceGrpcTransport, transports.TranscoderServiceGrpcAsyncIOTransport]) -def test_transcoder_service_grpc_transport_client_cert_source_for_mtls( - transport_class -): - cred = ga_credentials.AnonymousCredentials() - - # Check ssl_channel_credentials is used if provided. - with mock.patch.object(transport_class, "create_channel") as mock_create_channel: - mock_ssl_channel_creds = mock.Mock() - transport_class( - host="squid.clam.whelk", - credentials=cred, - ssl_channel_credentials=mock_ssl_channel_creds - ) - mock_create_channel.assert_called_once_with( - "squid.clam.whelk:443", - credentials=cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_channel_creds, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls - # is used. - with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): - with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: - transport_class( - credentials=cred, - client_cert_source_for_mtls=client_cert_source_callback - ) - expected_cert, expected_key = client_cert_source_callback() - mock_ssl_cred.assert_called_once_with( - certificate_chain=expected_cert, - private_key=expected_key - ) - - -def test_transcoder_service_host_no_port(): - client = TranscoderServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions(api_endpoint='transcoder.googleapis.com'), - ) - assert client.transport._host == 'transcoder.googleapis.com:443' - - -def test_transcoder_service_host_with_port(): - client = TranscoderServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions(api_endpoint='transcoder.googleapis.com:8000'), - ) - assert client.transport._host == 'transcoder.googleapis.com:8000' - -def test_transcoder_service_grpc_transport_channel(): - channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials()) - - # Check that channel is used if provided. - transport = transports.TranscoderServiceGrpcTransport( - host="squid.clam.whelk", - channel=channel, - ) - assert transport.grpc_channel == channel - assert transport._host == "squid.clam.whelk:443" - assert transport._ssl_channel_credentials == None - - -def test_transcoder_service_grpc_asyncio_transport_channel(): - channel = aio.secure_channel('http://localhost/', grpc.local_channel_credentials()) - - # Check that channel is used if provided. - transport = transports.TranscoderServiceGrpcAsyncIOTransport( - host="squid.clam.whelk", - channel=channel, - ) - assert transport.grpc_channel == channel - assert transport._host == "squid.clam.whelk:443" - assert transport._ssl_channel_credentials == None - - -# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are -# removed from grpc/grpc_asyncio transport constructor. -@pytest.mark.parametrize("transport_class", [transports.TranscoderServiceGrpcTransport, transports.TranscoderServiceGrpcAsyncIOTransport]) -def test_transcoder_service_transport_channel_mtls_with_client_cert_source( - transport_class -): - with mock.patch("grpc.ssl_channel_credentials", autospec=True) as grpc_ssl_channel_cred: - with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: - mock_ssl_cred = mock.Mock() - grpc_ssl_channel_cred.return_value = mock_ssl_cred - - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - - cred = ga_credentials.AnonymousCredentials() - with pytest.warns(DeprecationWarning): - with mock.patch.object(google.auth, 'default') as adc: - adc.return_value = (cred, None) - transport = transport_class( - host="squid.clam.whelk", - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=client_cert_source_callback, - ) - adc.assert_called_once() - - grpc_ssl_channel_cred.assert_called_once_with( - certificate_chain=b"cert bytes", private_key=b"key bytes" - ) - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - assert transport.grpc_channel == mock_grpc_channel - assert transport._ssl_channel_credentials == mock_ssl_cred - - -# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are -# removed from grpc/grpc_asyncio transport constructor. -@pytest.mark.parametrize("transport_class", [transports.TranscoderServiceGrpcTransport, transports.TranscoderServiceGrpcAsyncIOTransport]) -def test_transcoder_service_transport_channel_mtls_with_adc( - transport_class -): - mock_ssl_cred = mock.Mock() - with mock.patch.multiple( - "google.auth.transport.grpc.SslCredentials", - __init__=mock.Mock(return_value=None), - ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), - ): - with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - mock_cred = mock.Mock() - - with pytest.warns(DeprecationWarning): - transport = transport_class( - host="squid.clam.whelk", - credentials=mock_cred, - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=None, - ) - - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=mock_cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - assert transport.grpc_channel == mock_grpc_channel - - -def test_job_path(): - project = "squid" - location = "clam" - job = "whelk" - expected = "projects/{project}/locations/{location}/jobs/{job}".format(project=project, location=location, job=job, ) - actual = TranscoderServiceClient.job_path(project, location, job) - assert expected == actual - - -def test_parse_job_path(): - expected = { - "project": "octopus", - "location": "oyster", - "job": "nudibranch", - } - path = TranscoderServiceClient.job_path(**expected) - - # Check that the path construction is reversible. - actual = TranscoderServiceClient.parse_job_path(path) - assert expected == actual - -def test_job_template_path(): - project = "cuttlefish" - location = "mussel" - job_template = "winkle" - expected = "projects/{project}/locations/{location}/jobTemplates/{job_template}".format(project=project, location=location, job_template=job_template, ) - actual = TranscoderServiceClient.job_template_path(project, location, job_template) - assert expected == actual - - -def test_parse_job_template_path(): - expected = { - "project": "nautilus", - "location": "scallop", - "job_template": "abalone", - } - path = TranscoderServiceClient.job_template_path(**expected) - - # Check that the path construction is reversible. - actual = TranscoderServiceClient.parse_job_template_path(path) - assert expected == actual - -def test_common_billing_account_path(): - billing_account = "squid" - expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) - actual = TranscoderServiceClient.common_billing_account_path(billing_account) - assert expected == actual - - -def test_parse_common_billing_account_path(): - expected = { - "billing_account": "clam", - } - path = TranscoderServiceClient.common_billing_account_path(**expected) - - # Check that the path construction is reversible. - actual = TranscoderServiceClient.parse_common_billing_account_path(path) - assert expected == actual - -def test_common_folder_path(): - folder = "whelk" - expected = "folders/{folder}".format(folder=folder, ) - actual = TranscoderServiceClient.common_folder_path(folder) - assert expected == actual - - -def test_parse_common_folder_path(): - expected = { - "folder": "octopus", - } - path = TranscoderServiceClient.common_folder_path(**expected) - - # Check that the path construction is reversible. - actual = TranscoderServiceClient.parse_common_folder_path(path) - assert expected == actual - -def test_common_organization_path(): - organization = "oyster" - expected = "organizations/{organization}".format(organization=organization, ) - actual = TranscoderServiceClient.common_organization_path(organization) - assert expected == actual - - -def test_parse_common_organization_path(): - expected = { - "organization": "nudibranch", - } - path = TranscoderServiceClient.common_organization_path(**expected) - - # Check that the path construction is reversible. - actual = TranscoderServiceClient.parse_common_organization_path(path) - assert expected == actual - -def test_common_project_path(): - project = "cuttlefish" - expected = "projects/{project}".format(project=project, ) - actual = TranscoderServiceClient.common_project_path(project) - assert expected == actual - - -def test_parse_common_project_path(): - expected = { - "project": "mussel", - } - path = TranscoderServiceClient.common_project_path(**expected) - - # Check that the path construction is reversible. - actual = TranscoderServiceClient.parse_common_project_path(path) - assert expected == actual - -def test_common_location_path(): - project = "winkle" - location = "nautilus" - expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) - actual = TranscoderServiceClient.common_location_path(project, location) - assert expected == actual - - -def test_parse_common_location_path(): - expected = { - "project": "scallop", - "location": "abalone", - } - path = TranscoderServiceClient.common_location_path(**expected) - - # Check that the path construction is reversible. - actual = TranscoderServiceClient.parse_common_location_path(path) - assert expected == actual - - -def test_client_withDEFAULT_CLIENT_INFO(): - client_info = gapic_v1.client_info.ClientInfo() - - with mock.patch.object(transports.TranscoderServiceTransport, '_prep_wrapped_messages') as prep: - client = TranscoderServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - client_info=client_info, - ) - prep.assert_called_once_with(client_info) - - with mock.patch.object(transports.TranscoderServiceTransport, '_prep_wrapped_messages') as prep: - transport_class = TranscoderServiceClient.get_transport_class() - transport = transport_class( - credentials=ga_credentials.AnonymousCredentials(), - client_info=client_info, - ) - prep.assert_called_once_with(client_info) diff --git a/owl-bot-staging/v1beta1/.coveragerc b/owl-bot-staging/v1beta1/.coveragerc deleted file mode 100644 index 6926d19..0000000 --- a/owl-bot-staging/v1beta1/.coveragerc +++ /dev/null @@ -1,17 +0,0 @@ -[run] -branch = True - -[report] -show_missing = True -omit = - google/cloud/video/transcoder/__init__.py -exclude_lines = - # Re-enable the standard pragma - pragma: NO COVER - # Ignore debug-only repr - def __repr__ - # Ignore pkg_resources exceptions. - # This is added at the module level as a safeguard for if someone - # generates the code and tries to run it without pip installing. This - # makes it virtually impossible to test properly. - except pkg_resources.DistributionNotFound diff --git a/owl-bot-staging/v1beta1/MANIFEST.in b/owl-bot-staging/v1beta1/MANIFEST.in deleted file mode 100644 index 64bd549..0000000 --- a/owl-bot-staging/v1beta1/MANIFEST.in +++ /dev/null @@ -1,2 +0,0 @@ -recursive-include google/cloud/video/transcoder *.py -recursive-include google/cloud/video/transcoder_v1beta1 *.py diff --git a/owl-bot-staging/v1beta1/README.rst b/owl-bot-staging/v1beta1/README.rst deleted file mode 100644 index 43621a1..0000000 --- a/owl-bot-staging/v1beta1/README.rst +++ /dev/null @@ -1,49 +0,0 @@ -Python Client for Google Cloud Video Transcoder API -================================================= - -Quick Start ------------ - -In order to use this library, you first need to go through the following steps: - -1. `Select or create a Cloud Platform project.`_ -2. `Enable billing for your project.`_ -3. Enable the Google Cloud Video Transcoder API. -4. `Setup Authentication.`_ - -.. _Select or create a Cloud Platform project.: https://console.cloud.google.com/project -.. _Enable billing for your project.: https://cloud.google.com/billing/docs/how-to/modify-project#enable_billing_for_a_project -.. _Setup Authentication.: https://googleapis.dev/python/google-api-core/latest/auth.html - -Installation -~~~~~~~~~~~~ - -Install this library in a `virtualenv`_ using pip. `virtualenv`_ is a tool to -create isolated Python environments. The basic problem it addresses is one of -dependencies and versions, and indirectly permissions. - -With `virtualenv`_, it's possible to install this library without needing system -install permissions, and without clashing with the installed system -dependencies. - -.. _`virtualenv`: https://virtualenv.pypa.io/en/latest/ - - -Mac/Linux -^^^^^^^^^ - -.. code-block:: console - - python3 -m venv - source /bin/activate - /bin/pip install /path/to/library - - -Windows -^^^^^^^ - -.. code-block:: console - - python3 -m venv - \Scripts\activate - \Scripts\pip.exe install \path\to\library diff --git a/owl-bot-staging/v1beta1/docs/conf.py b/owl-bot-staging/v1beta1/docs/conf.py deleted file mode 100644 index d739f6d..0000000 --- a/owl-bot-staging/v1beta1/docs/conf.py +++ /dev/null @@ -1,376 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# -# google-cloud-video-transcoder documentation build configuration file -# -# This file is execfile()d with the current directory set to its -# containing dir. -# -# Note that not all possible configuration values are present in this -# autogenerated file. -# -# All configuration values have a default; values that are commented out -# serve to show the default. - -import sys -import os -import shlex - -# If extensions (or modules to document with autodoc) are in another directory, -# add these directories to sys.path here. If the directory is relative to the -# documentation root, use os.path.abspath to make it absolute, like shown here. -sys.path.insert(0, os.path.abspath("..")) - -__version__ = "0.1.0" - -# -- General configuration ------------------------------------------------ - -# If your documentation needs a minimal Sphinx version, state it here. -needs_sphinx = "1.6.3" - -# Add any Sphinx extension module names here, as strings. They can be -# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom -# ones. -extensions = [ - "sphinx.ext.autodoc", - "sphinx.ext.autosummary", - "sphinx.ext.intersphinx", - "sphinx.ext.coverage", - "sphinx.ext.napoleon", - "sphinx.ext.todo", - "sphinx.ext.viewcode", -] - -# autodoc/autosummary flags -autoclass_content = "both" -autodoc_default_flags = ["members"] -autosummary_generate = True - - -# Add any paths that contain templates here, relative to this directory. -templates_path = ["_templates"] - -# Allow markdown includes (so releases.md can include CHANGLEOG.md) -# http://www.sphinx-doc.org/en/master/markdown.html -source_parsers = {".md": "recommonmark.parser.CommonMarkParser"} - -# The suffix(es) of source filenames. -# You can specify multiple suffix as a list of string: -source_suffix = [".rst", ".md"] - -# The encoding of source files. -# source_encoding = 'utf-8-sig' - -# The master toctree document. -master_doc = "index" - -# General information about the project. -project = u"google-cloud-video-transcoder" -copyright = u"2020, Google, LLC" -author = u"Google APIs" # TODO: autogenerate this bit - -# The version info for the project you're documenting, acts as replacement for -# |version| and |release|, also used in various other places throughout the -# built documents. -# -# The full version, including alpha/beta/rc tags. -release = __version__ -# The short X.Y version. -version = ".".join(release.split(".")[0:2]) - -# The language for content autogenerated by Sphinx. Refer to documentation -# for a list of supported languages. -# -# This is also used if you do content translation via gettext catalogs. -# Usually you set "language" from the command line for these cases. -language = None - -# There are two options for replacing |today|: either, you set today to some -# non-false value, then it is used: -# today = '' -# Else, today_fmt is used as the format for a strftime call. -# today_fmt = '%B %d, %Y' - -# List of patterns, relative to source directory, that match files and -# directories to ignore when looking for source files. -exclude_patterns = ["_build"] - -# The reST default role (used for this markup: `text`) to use for all -# documents. -# default_role = None - -# If true, '()' will be appended to :func: etc. cross-reference text. -# add_function_parentheses = True - -# If true, the current module name will be prepended to all description -# unit titles (such as .. function::). -# add_module_names = True - -# If true, sectionauthor and moduleauthor directives will be shown in the -# output. They are ignored by default. -# show_authors = False - -# The name of the Pygments (syntax highlighting) style to use. -pygments_style = "sphinx" - -# A list of ignored prefixes for module index sorting. -# modindex_common_prefix = [] - -# If true, keep warnings as "system message" paragraphs in the built documents. -# keep_warnings = False - -# If true, `todo` and `todoList` produce output, else they produce nothing. -todo_include_todos = True - - -# -- Options for HTML output ---------------------------------------------- - -# The theme to use for HTML and HTML Help pages. See the documentation for -# a list of builtin themes. -html_theme = "alabaster" - -# Theme options are theme-specific and customize the look and feel of a theme -# further. For a list of options available for each theme, see the -# documentation. -html_theme_options = { - "description": "Google Cloud Video Client Libraries for Python", - "github_user": "googleapis", - "github_repo": "google-cloud-python", - "github_banner": True, - "font_family": "'Roboto', Georgia, sans", - "head_font_family": "'Roboto', Georgia, serif", - "code_font_family": "'Roboto Mono', 'Consolas', monospace", -} - -# Add any paths that contain custom themes here, relative to this directory. -# html_theme_path = [] - -# The name for this set of Sphinx documents. If None, it defaults to -# " v documentation". -# html_title = None - -# A shorter title for the navigation bar. Default is the same as html_title. -# html_short_title = None - -# The name of an image file (relative to this directory) to place at the top -# of the sidebar. -# html_logo = None - -# The name of an image file (within the static path) to use as favicon of the -# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 -# pixels large. -# html_favicon = None - -# Add any paths that contain custom static files (such as style sheets) here, -# relative to this directory. They are copied after the builtin static files, -# so a file named "default.css" will overwrite the builtin "default.css". -html_static_path = ["_static"] - -# Add any extra paths that contain custom files (such as robots.txt or -# .htaccess) here, relative to this directory. These files are copied -# directly to the root of the documentation. -# html_extra_path = [] - -# If not '', a 'Last updated on:' timestamp is inserted at every page bottom, -# using the given strftime format. -# html_last_updated_fmt = '%b %d, %Y' - -# If true, SmartyPants will be used to convert quotes and dashes to -# typographically correct entities. -# html_use_smartypants = True - -# Custom sidebar templates, maps document names to template names. -# html_sidebars = {} - -# Additional templates that should be rendered to pages, maps page names to -# template names. -# html_additional_pages = {} - -# If false, no module index is generated. -# html_domain_indices = True - -# If false, no index is generated. -# html_use_index = True - -# If true, the index is split into individual pages for each letter. -# html_split_index = False - -# If true, links to the reST sources are added to the pages. -# html_show_sourcelink = True - -# If true, "Created using Sphinx" is shown in the HTML footer. Default is True. -# html_show_sphinx = True - -# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. -# html_show_copyright = True - -# If true, an OpenSearch description file will be output, and all pages will -# contain a tag referring to it. The value of this option must be the -# base URL from which the finished HTML is served. -# html_use_opensearch = '' - -# This is the file name suffix for HTML files (e.g. ".xhtml"). -# html_file_suffix = None - -# Language to be used for generating the HTML full-text search index. -# Sphinx supports the following languages: -# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja' -# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr' -# html_search_language = 'en' - -# A dictionary with options for the search language support, empty by default. -# Now only 'ja' uses this config value -# html_search_options = {'type': 'default'} - -# The name of a javascript file (relative to the configuration directory) that -# implements a search results scorer. If empty, the default will be used. -# html_search_scorer = 'scorer.js' - -# Output file base name for HTML help builder. -htmlhelp_basename = "google-cloud-video-transcoder-doc" - -# -- Options for warnings ------------------------------------------------------ - - -suppress_warnings = [ - # Temporarily suppress this to avoid "more than one target found for - # cross-reference" warning, which are intractable for us to avoid while in - # a mono-repo. - # See https://github.com/sphinx-doc/sphinx/blob - # /2a65ffeef5c107c19084fabdd706cdff3f52d93c/sphinx/domains/python.py#L843 - "ref.python" -] - -# -- Options for LaTeX output --------------------------------------------- - -latex_elements = { - # The paper size ('letterpaper' or 'a4paper'). - # 'papersize': 'letterpaper', - # The font size ('10pt', '11pt' or '12pt'). - # 'pointsize': '10pt', - # Additional stuff for the LaTeX preamble. - # 'preamble': '', - # Latex figure (float) alignment - # 'figure_align': 'htbp', -} - -# Grouping the document tree into LaTeX files. List of tuples -# (source start file, target name, title, -# author, documentclass [howto, manual, or own class]). -latex_documents = [ - ( - master_doc, - "google-cloud-video-transcoder.tex", - u"google-cloud-video-transcoder Documentation", - author, - "manual", - ) -] - -# The name of an image file (relative to this directory) to place at the top of -# the title page. -# latex_logo = None - -# For "manual" documents, if this is true, then toplevel headings are parts, -# not chapters. -# latex_use_parts = False - -# If true, show page references after internal links. -# latex_show_pagerefs = False - -# If true, show URL addresses after external links. -# latex_show_urls = False - -# Documents to append as an appendix to all manuals. -# latex_appendices = [] - -# If false, no module index is generated. -# latex_domain_indices = True - - -# -- Options for manual page output --------------------------------------- - -# One entry per manual page. List of tuples -# (source start file, name, description, authors, manual section). -man_pages = [ - ( - master_doc, - "google-cloud-video-transcoder", - u"Google Cloud Video Transcoder Documentation", - [author], - 1, - ) -] - -# If true, show URL addresses after external links. -# man_show_urls = False - - -# -- Options for Texinfo output ------------------------------------------- - -# Grouping the document tree into Texinfo files. List of tuples -# (source start file, target name, title, author, -# dir menu entry, description, category) -texinfo_documents = [ - ( - master_doc, - "google-cloud-video-transcoder", - u"google-cloud-video-transcoder Documentation", - author, - "google-cloud-video-transcoder", - "GAPIC library for Google Cloud Video Transcoder API", - "APIs", - ) -] - -# Documents to append as an appendix to all manuals. -# texinfo_appendices = [] - -# If false, no module index is generated. -# texinfo_domain_indices = True - -# How to display URL addresses: 'footnote', 'no', or 'inline'. -# texinfo_show_urls = 'footnote' - -# If true, do not generate a @detailmenu in the "Top" node's menu. -# texinfo_no_detailmenu = False - - -# Example configuration for intersphinx: refer to the Python standard library. -intersphinx_mapping = { - "python": ("http://python.readthedocs.org/en/latest/", None), - "gax": ("https://gax-python.readthedocs.org/en/latest/", None), - "google-auth": ("https://google-auth.readthedocs.io/en/stable", None), - "google-gax": ("https://gax-python.readthedocs.io/en/latest/", None), - "google.api_core": ("https://googleapis.dev/python/google-api-core/latest/", None), - "grpc": ("https://grpc.io/grpc/python/", None), - "requests": ("http://requests.kennethreitz.org/en/stable/", None), - "proto": ("https://proto-plus-python.readthedocs.io/en/stable", None), - "protobuf": ("https://googleapis.dev/python/protobuf/latest/", None), -} - - -# Napoleon settings -napoleon_google_docstring = True -napoleon_numpy_docstring = True -napoleon_include_private_with_doc = False -napoleon_include_special_with_doc = True -napoleon_use_admonition_for_examples = False -napoleon_use_admonition_for_notes = False -napoleon_use_admonition_for_references = False -napoleon_use_ivar = False -napoleon_use_param = True -napoleon_use_rtype = True diff --git a/owl-bot-staging/v1beta1/docs/index.rst b/owl-bot-staging/v1beta1/docs/index.rst deleted file mode 100644 index b5ddfd3..0000000 --- a/owl-bot-staging/v1beta1/docs/index.rst +++ /dev/null @@ -1,7 +0,0 @@ -API Reference -------------- -.. toctree:: - :maxdepth: 2 - - transcoder_v1beta1/services - transcoder_v1beta1/types diff --git a/owl-bot-staging/v1beta1/docs/transcoder_v1beta1/services.rst b/owl-bot-staging/v1beta1/docs/transcoder_v1beta1/services.rst deleted file mode 100644 index a3b6569..0000000 --- a/owl-bot-staging/v1beta1/docs/transcoder_v1beta1/services.rst +++ /dev/null @@ -1,6 +0,0 @@ -Services for Google Cloud Video Transcoder v1beta1 API -====================================================== -.. toctree:: - :maxdepth: 2 - - transcoder_service diff --git a/owl-bot-staging/v1beta1/docs/transcoder_v1beta1/transcoder_service.rst b/owl-bot-staging/v1beta1/docs/transcoder_v1beta1/transcoder_service.rst deleted file mode 100644 index c631a53..0000000 --- a/owl-bot-staging/v1beta1/docs/transcoder_v1beta1/transcoder_service.rst +++ /dev/null @@ -1,10 +0,0 @@ -TranscoderService ------------------------------------ - -.. automodule:: google.cloud.video.transcoder_v1beta1.services.transcoder_service - :members: - :inherited-members: - -.. automodule:: google.cloud.video.transcoder_v1beta1.services.transcoder_service.pagers - :members: - :inherited-members: diff --git a/owl-bot-staging/v1beta1/docs/transcoder_v1beta1/types.rst b/owl-bot-staging/v1beta1/docs/transcoder_v1beta1/types.rst deleted file mode 100644 index cb38b8a..0000000 --- a/owl-bot-staging/v1beta1/docs/transcoder_v1beta1/types.rst +++ /dev/null @@ -1,7 +0,0 @@ -Types for Google Cloud Video Transcoder v1beta1 API -=================================================== - -.. automodule:: google.cloud.video.transcoder_v1beta1.types - :members: - :undoc-members: - :show-inheritance: diff --git a/owl-bot-staging/v1beta1/google/cloud/video/transcoder/__init__.py b/owl-bot-staging/v1beta1/google/cloud/video/transcoder/__init__.py deleted file mode 100644 index 6ceb79f..0000000 --- a/owl-bot-staging/v1beta1/google/cloud/video/transcoder/__init__.py +++ /dev/null @@ -1,85 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -from google.cloud.video.transcoder_v1beta1.services.transcoder_service.client import TranscoderServiceClient -from google.cloud.video.transcoder_v1beta1.services.transcoder_service.async_client import TranscoderServiceAsyncClient - -from google.cloud.video.transcoder_v1beta1.types.resources import AdBreak -from google.cloud.video.transcoder_v1beta1.types.resources import AudioStream -from google.cloud.video.transcoder_v1beta1.types.resources import EditAtom -from google.cloud.video.transcoder_v1beta1.types.resources import ElementaryStream -from google.cloud.video.transcoder_v1beta1.types.resources import Encryption -from google.cloud.video.transcoder_v1beta1.types.resources import FailureDetail -from google.cloud.video.transcoder_v1beta1.types.resources import Input -from google.cloud.video.transcoder_v1beta1.types.resources import Job -from google.cloud.video.transcoder_v1beta1.types.resources import JobConfig -from google.cloud.video.transcoder_v1beta1.types.resources import JobTemplate -from google.cloud.video.transcoder_v1beta1.types.resources import Manifest -from google.cloud.video.transcoder_v1beta1.types.resources import MuxStream -from google.cloud.video.transcoder_v1beta1.types.resources import Output -from google.cloud.video.transcoder_v1beta1.types.resources import Overlay -from google.cloud.video.transcoder_v1beta1.types.resources import PreprocessingConfig -from google.cloud.video.transcoder_v1beta1.types.resources import Progress -from google.cloud.video.transcoder_v1beta1.types.resources import PubsubDestination -from google.cloud.video.transcoder_v1beta1.types.resources import SegmentSettings -from google.cloud.video.transcoder_v1beta1.types.resources import SpriteSheet -from google.cloud.video.transcoder_v1beta1.types.resources import TextStream -from google.cloud.video.transcoder_v1beta1.types.resources import VideoStream -from google.cloud.video.transcoder_v1beta1.types.services import CreateJobRequest -from google.cloud.video.transcoder_v1beta1.types.services import CreateJobTemplateRequest -from google.cloud.video.transcoder_v1beta1.types.services import DeleteJobRequest -from google.cloud.video.transcoder_v1beta1.types.services import DeleteJobTemplateRequest -from google.cloud.video.transcoder_v1beta1.types.services import GetJobRequest -from google.cloud.video.transcoder_v1beta1.types.services import GetJobTemplateRequest -from google.cloud.video.transcoder_v1beta1.types.services import ListJobsRequest -from google.cloud.video.transcoder_v1beta1.types.services import ListJobsResponse -from google.cloud.video.transcoder_v1beta1.types.services import ListJobTemplatesRequest -from google.cloud.video.transcoder_v1beta1.types.services import ListJobTemplatesResponse - -__all__ = ('TranscoderServiceClient', - 'TranscoderServiceAsyncClient', - 'AdBreak', - 'AudioStream', - 'EditAtom', - 'ElementaryStream', - 'Encryption', - 'FailureDetail', - 'Input', - 'Job', - 'JobConfig', - 'JobTemplate', - 'Manifest', - 'MuxStream', - 'Output', - 'Overlay', - 'PreprocessingConfig', - 'Progress', - 'PubsubDestination', - 'SegmentSettings', - 'SpriteSheet', - 'TextStream', - 'VideoStream', - 'CreateJobRequest', - 'CreateJobTemplateRequest', - 'DeleteJobRequest', - 'DeleteJobTemplateRequest', - 'GetJobRequest', - 'GetJobTemplateRequest', - 'ListJobsRequest', - 'ListJobsResponse', - 'ListJobTemplatesRequest', - 'ListJobTemplatesResponse', -) diff --git a/owl-bot-staging/v1beta1/google/cloud/video/transcoder/py.typed b/owl-bot-staging/v1beta1/google/cloud/video/transcoder/py.typed deleted file mode 100644 index a2716a6..0000000 --- a/owl-bot-staging/v1beta1/google/cloud/video/transcoder/py.typed +++ /dev/null @@ -1,2 +0,0 @@ -# Marker file for PEP 561. -# The google-cloud-video-transcoder package uses inline types. diff --git a/owl-bot-staging/v1beta1/google/cloud/video/transcoder_v1beta1/__init__.py b/owl-bot-staging/v1beta1/google/cloud/video/transcoder_v1beta1/__init__.py deleted file mode 100644 index 85efcb9..0000000 --- a/owl-bot-staging/v1beta1/google/cloud/video/transcoder_v1beta1/__init__.py +++ /dev/null @@ -1,86 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -from .services.transcoder_service import TranscoderServiceClient -from .services.transcoder_service import TranscoderServiceAsyncClient - -from .types.resources import AdBreak -from .types.resources import AudioStream -from .types.resources import EditAtom -from .types.resources import ElementaryStream -from .types.resources import Encryption -from .types.resources import FailureDetail -from .types.resources import Input -from .types.resources import Job -from .types.resources import JobConfig -from .types.resources import JobTemplate -from .types.resources import Manifest -from .types.resources import MuxStream -from .types.resources import Output -from .types.resources import Overlay -from .types.resources import PreprocessingConfig -from .types.resources import Progress -from .types.resources import PubsubDestination -from .types.resources import SegmentSettings -from .types.resources import SpriteSheet -from .types.resources import TextStream -from .types.resources import VideoStream -from .types.services import CreateJobRequest -from .types.services import CreateJobTemplateRequest -from .types.services import DeleteJobRequest -from .types.services import DeleteJobTemplateRequest -from .types.services import GetJobRequest -from .types.services import GetJobTemplateRequest -from .types.services import ListJobsRequest -from .types.services import ListJobsResponse -from .types.services import ListJobTemplatesRequest -from .types.services import ListJobTemplatesResponse - -__all__ = ( - 'TranscoderServiceAsyncClient', -'AdBreak', -'AudioStream', -'CreateJobRequest', -'CreateJobTemplateRequest', -'DeleteJobRequest', -'DeleteJobTemplateRequest', -'EditAtom', -'ElementaryStream', -'Encryption', -'FailureDetail', -'GetJobRequest', -'GetJobTemplateRequest', -'Input', -'Job', -'JobConfig', -'JobTemplate', -'ListJobTemplatesRequest', -'ListJobTemplatesResponse', -'ListJobsRequest', -'ListJobsResponse', -'Manifest', -'MuxStream', -'Output', -'Overlay', -'PreprocessingConfig', -'Progress', -'PubsubDestination', -'SegmentSettings', -'SpriteSheet', -'TextStream', -'TranscoderServiceClient', -'VideoStream', -) diff --git a/owl-bot-staging/v1beta1/google/cloud/video/transcoder_v1beta1/gapic_metadata.json b/owl-bot-staging/v1beta1/google/cloud/video/transcoder_v1beta1/gapic_metadata.json deleted file mode 100644 index ebf08b7..0000000 --- a/owl-bot-staging/v1beta1/google/cloud/video/transcoder_v1beta1/gapic_metadata.json +++ /dev/null @@ -1,103 +0,0 @@ - { - "comment": "This file maps proto services/RPCs to the corresponding library clients/methods", - "language": "python", - "libraryPackage": "google.cloud.video.transcoder_v1beta1", - "protoPackage": "google.cloud.video.transcoder.v1beta1", - "schema": "1.0", - "services": { - "TranscoderService": { - "clients": { - "grpc": { - "libraryClient": "TranscoderServiceClient", - "rpcs": { - "CreateJob": { - "methods": [ - "create_job" - ] - }, - "CreateJobTemplate": { - "methods": [ - "create_job_template" - ] - }, - "DeleteJob": { - "methods": [ - "delete_job" - ] - }, - "DeleteJobTemplate": { - "methods": [ - "delete_job_template" - ] - }, - "GetJob": { - "methods": [ - "get_job" - ] - }, - "GetJobTemplate": { - "methods": [ - "get_job_template" - ] - }, - "ListJobTemplates": { - "methods": [ - "list_job_templates" - ] - }, - "ListJobs": { - "methods": [ - "list_jobs" - ] - } - } - }, - "grpc-async": { - "libraryClient": "TranscoderServiceAsyncClient", - "rpcs": { - "CreateJob": { - "methods": [ - "create_job" - ] - }, - "CreateJobTemplate": { - "methods": [ - "create_job_template" - ] - }, - "DeleteJob": { - "methods": [ - "delete_job" - ] - }, - "DeleteJobTemplate": { - "methods": [ - "delete_job_template" - ] - }, - "GetJob": { - "methods": [ - "get_job" - ] - }, - "GetJobTemplate": { - "methods": [ - "get_job_template" - ] - }, - "ListJobTemplates": { - "methods": [ - "list_job_templates" - ] - }, - "ListJobs": { - "methods": [ - "list_jobs" - ] - } - } - } - } - } - } -} diff --git a/owl-bot-staging/v1beta1/google/cloud/video/transcoder_v1beta1/py.typed b/owl-bot-staging/v1beta1/google/cloud/video/transcoder_v1beta1/py.typed deleted file mode 100644 index a2716a6..0000000 --- a/owl-bot-staging/v1beta1/google/cloud/video/transcoder_v1beta1/py.typed +++ /dev/null @@ -1,2 +0,0 @@ -# Marker file for PEP 561. -# The google-cloud-video-transcoder package uses inline types. diff --git a/owl-bot-staging/v1beta1/google/cloud/video/transcoder_v1beta1/services/__init__.py b/owl-bot-staging/v1beta1/google/cloud/video/transcoder_v1beta1/services/__init__.py deleted file mode 100644 index 4de6597..0000000 --- a/owl-bot-staging/v1beta1/google/cloud/video/transcoder_v1beta1/services/__init__.py +++ /dev/null @@ -1,15 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/v1beta1/google/cloud/video/transcoder_v1beta1/services/transcoder_service/__init__.py b/owl-bot-staging/v1beta1/google/cloud/video/transcoder_v1beta1/services/transcoder_service/__init__.py deleted file mode 100644 index 1688786..0000000 --- a/owl-bot-staging/v1beta1/google/cloud/video/transcoder_v1beta1/services/transcoder_service/__init__.py +++ /dev/null @@ -1,22 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from .client import TranscoderServiceClient -from .async_client import TranscoderServiceAsyncClient - -__all__ = ( - 'TranscoderServiceClient', - 'TranscoderServiceAsyncClient', -) diff --git a/owl-bot-staging/v1beta1/google/cloud/video/transcoder_v1beta1/services/transcoder_service/async_client.py b/owl-bot-staging/v1beta1/google/cloud/video/transcoder_v1beta1/services/transcoder_service/async_client.py deleted file mode 100644 index de9d823..0000000 --- a/owl-bot-staging/v1beta1/google/cloud/video/transcoder_v1beta1/services/transcoder_service/async_client.py +++ /dev/null @@ -1,809 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from collections import OrderedDict -import functools -import re -from typing import Dict, Sequence, Tuple, Type, Union -import pkg_resources - -import google.api_core.client_options as ClientOptions # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.api_core import gapic_v1 # type: ignore -from google.api_core import retry as retries # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.oauth2 import service_account # type: ignore - -from google.cloud.video.transcoder_v1beta1.services.transcoder_service import pagers -from google.cloud.video.transcoder_v1beta1.types import resources -from google.cloud.video.transcoder_v1beta1.types import services -from google.protobuf import timestamp_pb2 # type: ignore -from .transports.base import TranscoderServiceTransport, DEFAULT_CLIENT_INFO -from .transports.grpc_asyncio import TranscoderServiceGrpcAsyncIOTransport -from .client import TranscoderServiceClient - - -class TranscoderServiceAsyncClient: - """Using the Transcoder API, you can queue asynchronous jobs for - transcoding media into various output formats. Output formats - may include different streaming standards such as HTTP Live - Streaming (HLS) and Dynamic Adaptive Streaming over HTTP (DASH). - You can also customize jobs using advanced features such as - Digital Rights Management (DRM), audio equalization, content - concatenation, and digital ad-stitch ready content generation. - """ - - _client: TranscoderServiceClient - - DEFAULT_ENDPOINT = TranscoderServiceClient.DEFAULT_ENDPOINT - DEFAULT_MTLS_ENDPOINT = TranscoderServiceClient.DEFAULT_MTLS_ENDPOINT - - job_path = staticmethod(TranscoderServiceClient.job_path) - parse_job_path = staticmethod(TranscoderServiceClient.parse_job_path) - job_template_path = staticmethod(TranscoderServiceClient.job_template_path) - parse_job_template_path = staticmethod(TranscoderServiceClient.parse_job_template_path) - common_billing_account_path = staticmethod(TranscoderServiceClient.common_billing_account_path) - parse_common_billing_account_path = staticmethod(TranscoderServiceClient.parse_common_billing_account_path) - common_folder_path = staticmethod(TranscoderServiceClient.common_folder_path) - parse_common_folder_path = staticmethod(TranscoderServiceClient.parse_common_folder_path) - common_organization_path = staticmethod(TranscoderServiceClient.common_organization_path) - parse_common_organization_path = staticmethod(TranscoderServiceClient.parse_common_organization_path) - common_project_path = staticmethod(TranscoderServiceClient.common_project_path) - parse_common_project_path = staticmethod(TranscoderServiceClient.parse_common_project_path) - common_location_path = staticmethod(TranscoderServiceClient.common_location_path) - parse_common_location_path = staticmethod(TranscoderServiceClient.parse_common_location_path) - - @classmethod - def from_service_account_info(cls, info: dict, *args, **kwargs): - """Creates an instance of this client using the provided credentials - info. - - Args: - info (dict): The service account private key info. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - TranscoderServiceAsyncClient: The constructed client. - """ - return TranscoderServiceClient.from_service_account_info.__func__(TranscoderServiceAsyncClient, info, *args, **kwargs) # type: ignore - - @classmethod - def from_service_account_file(cls, filename: str, *args, **kwargs): - """Creates an instance of this client using the provided credentials - file. - - Args: - filename (str): The path to the service account private key json - file. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - TranscoderServiceAsyncClient: The constructed client. - """ - return TranscoderServiceClient.from_service_account_file.__func__(TranscoderServiceAsyncClient, filename, *args, **kwargs) # type: ignore - - from_service_account_json = from_service_account_file - - @property - def transport(self) -> TranscoderServiceTransport: - """Returns the transport used by the client instance. - - Returns: - TranscoderServiceTransport: The transport used by the client instance. - """ - return self._client.transport - - get_transport_class = functools.partial(type(TranscoderServiceClient).get_transport_class, type(TranscoderServiceClient)) - - def __init__(self, *, - credentials: ga_credentials.Credentials = None, - transport: Union[str, TranscoderServiceTransport] = "grpc_asyncio", - client_options: ClientOptions = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - ) -> None: - """Instantiates the transcoder service client. - - Args: - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - transport (Union[str, ~.TranscoderServiceTransport]): The - transport to use. If set to None, a transport is chosen - automatically. - client_options (ClientOptions): Custom options for the client. It - won't take effect if a ``transport`` instance is provided. - (1) The ``api_endpoint`` property can be used to override the - default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT - environment variable can also be used to override the endpoint: - "always" (always use the default mTLS endpoint), "never" (always - use the default regular endpoint) and "auto" (auto switch to the - default mTLS endpoint if client certificate is present, this is - the default value). However, the ``api_endpoint`` property takes - precedence if provided. - (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable - is "true", then the ``client_cert_source`` property can be used - to provide client certificate for mutual TLS transport. If - not provided, the default SSL client certificate will be used if - present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not - set, no client certificate will be used. - - Raises: - google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport - creation failed for any reason. - """ - self._client = TranscoderServiceClient( - credentials=credentials, - transport=transport, - client_options=client_options, - client_info=client_info, - - ) - - async def create_job(self, - request: services.CreateJobRequest = None, - *, - parent: str = None, - job: resources.Job = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> resources.Job: - r"""Creates a job in the specified region. - - Args: - request (:class:`google.cloud.video.transcoder_v1beta1.types.CreateJobRequest`): - The request object. Request message for - `TranscoderService.CreateJob`. - parent (:class:`str`): - Required. The parent location to create and process this - job. Format: ``projects/{project}/locations/{location}`` - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - job (:class:`google.cloud.video.transcoder_v1beta1.types.Job`): - Required. Parameters for creating - transcoding job. - - This corresponds to the ``job`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.video.transcoder_v1beta1.types.Job: - Transcoding job resource. - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, job]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = services.CreateJobRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if job is not None: - request.job = job - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.create_job, - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def list_jobs(self, - request: services.ListJobsRequest = None, - *, - parent: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListJobsAsyncPager: - r"""Lists jobs in the specified region. - - Args: - request (:class:`google.cloud.video.transcoder_v1beta1.types.ListJobsRequest`): - The request object. Request message for - `TranscoderService.ListJobs`. The parent location from - which to retrieve the collection of jobs. - parent (:class:`str`): - Required. Format: - ``projects/{project}/locations/{location}`` - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.video.transcoder_v1beta1.services.transcoder_service.pagers.ListJobsAsyncPager: - Response message for TranscoderService.ListJobs. - - Iterating over this object will yield results and - resolve additional pages automatically. - - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = services.ListJobsRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.list_jobs, - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__aiter__` convenience method. - response = pagers.ListJobsAsyncPager( - method=rpc, - request=request, - response=response, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def get_job(self, - request: services.GetJobRequest = None, - *, - name: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> resources.Job: - r"""Returns the job data. - - Args: - request (:class:`google.cloud.video.transcoder_v1beta1.types.GetJobRequest`): - The request object. Request message for - `TranscoderService.GetJob`. - name (:class:`str`): - Required. The name of the job to retrieve. Format: - ``projects/{project}/locations/{location}/jobs/{job}`` - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.video.transcoder_v1beta1.types.Job: - Transcoding job resource. - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = services.GetJobRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.get_job, - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def delete_job(self, - request: services.DeleteJobRequest = None, - *, - name: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Deletes a job. - - Args: - request (:class:`google.cloud.video.transcoder_v1beta1.types.DeleteJobRequest`): - The request object. Request message for - `TranscoderService.DeleteJob`. - name (:class:`str`): - Required. The name of the job to delete. Format: - ``projects/{project}/locations/{location}/jobs/{job}`` - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = services.DeleteJobRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.delete_job, - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - async def create_job_template(self, - request: services.CreateJobTemplateRequest = None, - *, - parent: str = None, - job_template: resources.JobTemplate = None, - job_template_id: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> resources.JobTemplate: - r"""Creates a job template in the specified region. - - Args: - request (:class:`google.cloud.video.transcoder_v1beta1.types.CreateJobTemplateRequest`): - The request object. Request message for - `TranscoderService.CreateJobTemplate`. - parent (:class:`str`): - Required. The parent location to create this job - template. Format: - ``projects/{project}/locations/{location}`` - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - job_template (:class:`google.cloud.video.transcoder_v1beta1.types.JobTemplate`): - Required. Parameters for creating job - template. - - This corresponds to the ``job_template`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - job_template_id (:class:`str`): - Required. The ID to use for the job template, which will - become the final component of the job template's - resource name. - - This value should be 4-63 characters, and valid - characters must match the regular expression - ``[a-zA-Z][a-zA-Z0-9_-]*``. - - This corresponds to the ``job_template_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.video.transcoder_v1beta1.types.JobTemplate: - Transcoding job template resource. - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, job_template, job_template_id]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = services.CreateJobTemplateRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if job_template is not None: - request.job_template = job_template - if job_template_id is not None: - request.job_template_id = job_template_id - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.create_job_template, - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def list_job_templates(self, - request: services.ListJobTemplatesRequest = None, - *, - parent: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListJobTemplatesAsyncPager: - r"""Lists job templates in the specified region. - - Args: - request (:class:`google.cloud.video.transcoder_v1beta1.types.ListJobTemplatesRequest`): - The request object. Request message for - `TranscoderService.ListJobTemplates`. - parent (:class:`str`): - Required. The parent location from which to retrieve the - collection of job templates. Format: - ``projects/{project}/locations/{location}`` - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.video.transcoder_v1beta1.services.transcoder_service.pagers.ListJobTemplatesAsyncPager: - Response message for TranscoderService.ListJobTemplates. - - Iterating over this object will yield results and - resolve additional pages automatically. - - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = services.ListJobTemplatesRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.list_job_templates, - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__aiter__` convenience method. - response = pagers.ListJobTemplatesAsyncPager( - method=rpc, - request=request, - response=response, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def get_job_template(self, - request: services.GetJobTemplateRequest = None, - *, - name: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> resources.JobTemplate: - r"""Returns the job template data. - - Args: - request (:class:`google.cloud.video.transcoder_v1beta1.types.GetJobTemplateRequest`): - The request object. Request message for - `TranscoderService.GetJobTemplate`. - name (:class:`str`): - Required. The name of the job template to retrieve. - Format: - ``projects/{project}/locations/{location}/jobTemplates/{job_template}`` - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.video.transcoder_v1beta1.types.JobTemplate: - Transcoding job template resource. - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = services.GetJobTemplateRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.get_job_template, - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def delete_job_template(self, - request: services.DeleteJobTemplateRequest = None, - *, - name: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Deletes a job template. - - Args: - request (:class:`google.cloud.video.transcoder_v1beta1.types.DeleteJobTemplateRequest`): - The request object. Request message for - `TranscoderService.DeleteJobTemplate`. - name (:class:`str`): - Required. The name of the job template to delete. - ``projects/{project}/locations/{location}/jobTemplates/{job_template}`` - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = services.DeleteJobTemplateRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.delete_job_template, - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - - - - -try: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=pkg_resources.get_distribution( - "google-cloud-video-transcoder", - ).version, - ) -except pkg_resources.DistributionNotFound: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() - - -__all__ = ( - "TranscoderServiceAsyncClient", -) diff --git a/owl-bot-staging/v1beta1/google/cloud/video/transcoder_v1beta1/services/transcoder_service/client.py b/owl-bot-staging/v1beta1/google/cloud/video/transcoder_v1beta1/services/transcoder_service/client.py deleted file mode 100644 index a12d3ce..0000000 --- a/owl-bot-staging/v1beta1/google/cloud/video/transcoder_v1beta1/services/transcoder_service/client.py +++ /dev/null @@ -1,1009 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from collections import OrderedDict -from distutils import util -import os -import re -from typing import Callable, Dict, Optional, Sequence, Tuple, Type, Union -import pkg_resources - -from google.api_core import client_options as client_options_lib # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.api_core import gapic_v1 # type: ignore -from google.api_core import retry as retries # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport import mtls # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -from google.auth.exceptions import MutualTLSChannelError # type: ignore -from google.oauth2 import service_account # type: ignore - -from google.cloud.video.transcoder_v1beta1.services.transcoder_service import pagers -from google.cloud.video.transcoder_v1beta1.types import resources -from google.cloud.video.transcoder_v1beta1.types import services -from google.protobuf import timestamp_pb2 # type: ignore -from .transports.base import TranscoderServiceTransport, DEFAULT_CLIENT_INFO -from .transports.grpc import TranscoderServiceGrpcTransport -from .transports.grpc_asyncio import TranscoderServiceGrpcAsyncIOTransport - - -class TranscoderServiceClientMeta(type): - """Metaclass for the TranscoderService client. - - This provides class-level methods for building and retrieving - support objects (e.g. transport) without polluting the client instance - objects. - """ - _transport_registry = OrderedDict() # type: Dict[str, Type[TranscoderServiceTransport]] - _transport_registry["grpc"] = TranscoderServiceGrpcTransport - _transport_registry["grpc_asyncio"] = TranscoderServiceGrpcAsyncIOTransport - - def get_transport_class(cls, - label: str = None, - ) -> Type[TranscoderServiceTransport]: - """Returns an appropriate transport class. - - Args: - label: The name of the desired transport. If none is - provided, then the first transport in the registry is used. - - Returns: - The transport class to use. - """ - # If a specific transport is requested, return that one. - if label: - return cls._transport_registry[label] - - # No transport is requested; return the default (that is, the first one - # in the dictionary). - return next(iter(cls._transport_registry.values())) - - -class TranscoderServiceClient(metaclass=TranscoderServiceClientMeta): - """Using the Transcoder API, you can queue asynchronous jobs for - transcoding media into various output formats. Output formats - may include different streaming standards such as HTTP Live - Streaming (HLS) and Dynamic Adaptive Streaming over HTTP (DASH). - You can also customize jobs using advanced features such as - Digital Rights Management (DRM), audio equalization, content - concatenation, and digital ad-stitch ready content generation. - """ - - @staticmethod - def _get_default_mtls_endpoint(api_endpoint): - """Converts api endpoint to mTLS endpoint. - - Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to - "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. - Args: - api_endpoint (Optional[str]): the api endpoint to convert. - Returns: - str: converted mTLS api endpoint. - """ - if not api_endpoint: - return api_endpoint - - mtls_endpoint_re = re.compile( - r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" - ) - - m = mtls_endpoint_re.match(api_endpoint) - name, mtls, sandbox, googledomain = m.groups() - if mtls or not googledomain: - return api_endpoint - - if sandbox: - return api_endpoint.replace( - "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" - ) - - return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") - - DEFAULT_ENDPOINT = "transcoder.googleapis.com" - DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore - DEFAULT_ENDPOINT - ) - - @classmethod - def from_service_account_info(cls, info: dict, *args, **kwargs): - """Creates an instance of this client using the provided credentials - info. - - Args: - info (dict): The service account private key info. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - TranscoderServiceClient: The constructed client. - """ - credentials = service_account.Credentials.from_service_account_info(info) - kwargs["credentials"] = credentials - return cls(*args, **kwargs) - - @classmethod - def from_service_account_file(cls, filename: str, *args, **kwargs): - """Creates an instance of this client using the provided credentials - file. - - Args: - filename (str): The path to the service account private key json - file. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - TranscoderServiceClient: The constructed client. - """ - credentials = service_account.Credentials.from_service_account_file( - filename) - kwargs["credentials"] = credentials - return cls(*args, **kwargs) - - from_service_account_json = from_service_account_file - - @property - def transport(self) -> TranscoderServiceTransport: - """Returns the transport used by the client instance. - - Returns: - TranscoderServiceTransport: The transport used by the client - instance. - """ - return self._transport - - @staticmethod - def job_path(project: str,location: str,job: str,) -> str: - """Returns a fully-qualified job string.""" - return "projects/{project}/locations/{location}/jobs/{job}".format(project=project, location=location, job=job, ) - - @staticmethod - def parse_job_path(path: str) -> Dict[str,str]: - """Parses a job path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/jobs/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def job_template_path(project: str,location: str,job_template: str,) -> str: - """Returns a fully-qualified job_template string.""" - return "projects/{project}/locations/{location}/jobTemplates/{job_template}".format(project=project, location=location, job_template=job_template, ) - - @staticmethod - def parse_job_template_path(path: str) -> Dict[str,str]: - """Parses a job_template path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/jobTemplates/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_billing_account_path(billing_account: str, ) -> str: - """Returns a fully-qualified billing_account string.""" - return "billingAccounts/{billing_account}".format(billing_account=billing_account, ) - - @staticmethod - def parse_common_billing_account_path(path: str) -> Dict[str,str]: - """Parse a billing_account path into its component segments.""" - m = re.match(r"^billingAccounts/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_folder_path(folder: str, ) -> str: - """Returns a fully-qualified folder string.""" - return "folders/{folder}".format(folder=folder, ) - - @staticmethod - def parse_common_folder_path(path: str) -> Dict[str,str]: - """Parse a folder path into its component segments.""" - m = re.match(r"^folders/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_organization_path(organization: str, ) -> str: - """Returns a fully-qualified organization string.""" - return "organizations/{organization}".format(organization=organization, ) - - @staticmethod - def parse_common_organization_path(path: str) -> Dict[str,str]: - """Parse a organization path into its component segments.""" - m = re.match(r"^organizations/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_project_path(project: str, ) -> str: - """Returns a fully-qualified project string.""" - return "projects/{project}".format(project=project, ) - - @staticmethod - def parse_common_project_path(path: str) -> Dict[str,str]: - """Parse a project path into its component segments.""" - m = re.match(r"^projects/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_location_path(project: str, location: str, ) -> str: - """Returns a fully-qualified location string.""" - return "projects/{project}/locations/{location}".format(project=project, location=location, ) - - @staticmethod - def parse_common_location_path(path: str) -> Dict[str,str]: - """Parse a location path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) - return m.groupdict() if m else {} - - def __init__(self, *, - credentials: Optional[ga_credentials.Credentials] = None, - transport: Union[str, TranscoderServiceTransport, None] = None, - client_options: Optional[client_options_lib.ClientOptions] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - ) -> None: - """Instantiates the transcoder service client. - - Args: - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - transport (Union[str, TranscoderServiceTransport]): The - transport to use. If set to None, a transport is chosen - automatically. - client_options (google.api_core.client_options.ClientOptions): Custom options for the - client. It won't take effect if a ``transport`` instance is provided. - (1) The ``api_endpoint`` property can be used to override the - default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT - environment variable can also be used to override the endpoint: - "always" (always use the default mTLS endpoint), "never" (always - use the default regular endpoint) and "auto" (auto switch to the - default mTLS endpoint if client certificate is present, this is - the default value). However, the ``api_endpoint`` property takes - precedence if provided. - (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable - is "true", then the ``client_cert_source`` property can be used - to provide client certificate for mutual TLS transport. If - not provided, the default SSL client certificate will be used if - present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not - set, no client certificate will be used. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport - creation failed for any reason. - """ - if isinstance(client_options, dict): - client_options = client_options_lib.from_dict(client_options) - if client_options is None: - client_options = client_options_lib.ClientOptions() - - # Create SSL credentials for mutual TLS if needed. - use_client_cert = bool(util.strtobool(os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false"))) - - client_cert_source_func = None - is_mtls = False - if use_client_cert: - if client_options.client_cert_source: - is_mtls = True - client_cert_source_func = client_options.client_cert_source - else: - is_mtls = mtls.has_default_client_cert_source() - if is_mtls: - client_cert_source_func = mtls.default_client_cert_source() - else: - client_cert_source_func = None - - # Figure out which api endpoint to use. - if client_options.api_endpoint is not None: - api_endpoint = client_options.api_endpoint - else: - use_mtls_env = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") - if use_mtls_env == "never": - api_endpoint = self.DEFAULT_ENDPOINT - elif use_mtls_env == "always": - api_endpoint = self.DEFAULT_MTLS_ENDPOINT - elif use_mtls_env == "auto": - if is_mtls: - api_endpoint = self.DEFAULT_MTLS_ENDPOINT - else: - api_endpoint = self.DEFAULT_ENDPOINT - else: - raise MutualTLSChannelError( - "Unsupported GOOGLE_API_USE_MTLS_ENDPOINT value. Accepted " - "values: never, auto, always" - ) - - # Save or instantiate the transport. - # Ordinarily, we provide the transport, but allowing a custom transport - # instance provides an extensibility point for unusual situations. - if isinstance(transport, TranscoderServiceTransport): - # transport is a TranscoderServiceTransport instance. - if credentials or client_options.credentials_file: - raise ValueError("When providing a transport instance, " - "provide its credentials directly.") - if client_options.scopes: - raise ValueError( - "When providing a transport instance, provide its scopes " - "directly." - ) - self._transport = transport - else: - Transport = type(self).get_transport_class(transport) - self._transport = Transport( - credentials=credentials, - credentials_file=client_options.credentials_file, - host=api_endpoint, - scopes=client_options.scopes, - client_cert_source_for_mtls=client_cert_source_func, - quota_project_id=client_options.quota_project_id, - client_info=client_info, - always_use_jwt_access=( - Transport == type(self).get_transport_class("grpc") - or Transport == type(self).get_transport_class("grpc_asyncio") - ), - ) - - def create_job(self, - request: services.CreateJobRequest = None, - *, - parent: str = None, - job: resources.Job = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> resources.Job: - r"""Creates a job in the specified region. - - Args: - request (google.cloud.video.transcoder_v1beta1.types.CreateJobRequest): - The request object. Request message for - `TranscoderService.CreateJob`. - parent (str): - Required. The parent location to create and process this - job. Format: ``projects/{project}/locations/{location}`` - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - job (google.cloud.video.transcoder_v1beta1.types.Job): - Required. Parameters for creating - transcoding job. - - This corresponds to the ``job`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.video.transcoder_v1beta1.types.Job: - Transcoding job resource. - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, job]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a services.CreateJobRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, services.CreateJobRequest): - request = services.CreateJobRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if job is not None: - request.job = job - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.create_job] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def list_jobs(self, - request: services.ListJobsRequest = None, - *, - parent: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListJobsPager: - r"""Lists jobs in the specified region. - - Args: - request (google.cloud.video.transcoder_v1beta1.types.ListJobsRequest): - The request object. Request message for - `TranscoderService.ListJobs`. The parent location from - which to retrieve the collection of jobs. - parent (str): - Required. Format: - ``projects/{project}/locations/{location}`` - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.video.transcoder_v1beta1.services.transcoder_service.pagers.ListJobsPager: - Response message for TranscoderService.ListJobs. - - Iterating over this object will yield results and - resolve additional pages automatically. - - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a services.ListJobsRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, services.ListJobsRequest): - request = services.ListJobsRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_jobs] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__iter__` convenience method. - response = pagers.ListJobsPager( - method=rpc, - request=request, - response=response, - metadata=metadata, - ) - - # Done; return the response. - return response - - def get_job(self, - request: services.GetJobRequest = None, - *, - name: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> resources.Job: - r"""Returns the job data. - - Args: - request (google.cloud.video.transcoder_v1beta1.types.GetJobRequest): - The request object. Request message for - `TranscoderService.GetJob`. - name (str): - Required. The name of the job to retrieve. Format: - ``projects/{project}/locations/{location}/jobs/{job}`` - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.video.transcoder_v1beta1.types.Job: - Transcoding job resource. - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a services.GetJobRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, services.GetJobRequest): - request = services.GetJobRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_job] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def delete_job(self, - request: services.DeleteJobRequest = None, - *, - name: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Deletes a job. - - Args: - request (google.cloud.video.transcoder_v1beta1.types.DeleteJobRequest): - The request object. Request message for - `TranscoderService.DeleteJob`. - name (str): - Required. The name of the job to delete. Format: - ``projects/{project}/locations/{location}/jobs/{job}`` - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a services.DeleteJobRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, services.DeleteJobRequest): - request = services.DeleteJobRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.delete_job] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - def create_job_template(self, - request: services.CreateJobTemplateRequest = None, - *, - parent: str = None, - job_template: resources.JobTemplate = None, - job_template_id: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> resources.JobTemplate: - r"""Creates a job template in the specified region. - - Args: - request (google.cloud.video.transcoder_v1beta1.types.CreateJobTemplateRequest): - The request object. Request message for - `TranscoderService.CreateJobTemplate`. - parent (str): - Required. The parent location to create this job - template. Format: - ``projects/{project}/locations/{location}`` - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - job_template (google.cloud.video.transcoder_v1beta1.types.JobTemplate): - Required. Parameters for creating job - template. - - This corresponds to the ``job_template`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - job_template_id (str): - Required. The ID to use for the job template, which will - become the final component of the job template's - resource name. - - This value should be 4-63 characters, and valid - characters must match the regular expression - ``[a-zA-Z][a-zA-Z0-9_-]*``. - - This corresponds to the ``job_template_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.video.transcoder_v1beta1.types.JobTemplate: - Transcoding job template resource. - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, job_template, job_template_id]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a services.CreateJobTemplateRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, services.CreateJobTemplateRequest): - request = services.CreateJobTemplateRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if job_template is not None: - request.job_template = job_template - if job_template_id is not None: - request.job_template_id = job_template_id - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.create_job_template] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def list_job_templates(self, - request: services.ListJobTemplatesRequest = None, - *, - parent: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListJobTemplatesPager: - r"""Lists job templates in the specified region. - - Args: - request (google.cloud.video.transcoder_v1beta1.types.ListJobTemplatesRequest): - The request object. Request message for - `TranscoderService.ListJobTemplates`. - parent (str): - Required. The parent location from which to retrieve the - collection of job templates. Format: - ``projects/{project}/locations/{location}`` - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.video.transcoder_v1beta1.services.transcoder_service.pagers.ListJobTemplatesPager: - Response message for TranscoderService.ListJobTemplates. - - Iterating over this object will yield results and - resolve additional pages automatically. - - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a services.ListJobTemplatesRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, services.ListJobTemplatesRequest): - request = services.ListJobTemplatesRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_job_templates] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__iter__` convenience method. - response = pagers.ListJobTemplatesPager( - method=rpc, - request=request, - response=response, - metadata=metadata, - ) - - # Done; return the response. - return response - - def get_job_template(self, - request: services.GetJobTemplateRequest = None, - *, - name: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> resources.JobTemplate: - r"""Returns the job template data. - - Args: - request (google.cloud.video.transcoder_v1beta1.types.GetJobTemplateRequest): - The request object. Request message for - `TranscoderService.GetJobTemplate`. - name (str): - Required. The name of the job template to retrieve. - Format: - ``projects/{project}/locations/{location}/jobTemplates/{job_template}`` - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.video.transcoder_v1beta1.types.JobTemplate: - Transcoding job template resource. - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a services.GetJobTemplateRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, services.GetJobTemplateRequest): - request = services.GetJobTemplateRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_job_template] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def delete_job_template(self, - request: services.DeleteJobTemplateRequest = None, - *, - name: str = None, - retry: retries.Retry = gapic_v1.method.DEFAULT, - timeout: float = None, - metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Deletes a job template. - - Args: - request (google.cloud.video.transcoder_v1beta1.types.DeleteJobTemplateRequest): - The request object. Request message for - `TranscoderService.DeleteJobTemplate`. - name (str): - Required. The name of the job template to delete. - ``projects/{project}/locations/{location}/jobTemplates/{job_template}`` - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - # Create or coerce a protobuf request object. - # Sanity check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a services.DeleteJobTemplateRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, services.DeleteJobTemplateRequest): - request = services.DeleteJobTemplateRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.delete_job_template] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - - - - -try: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=pkg_resources.get_distribution( - "google-cloud-video-transcoder", - ).version, - ) -except pkg_resources.DistributionNotFound: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() - - -__all__ = ( - "TranscoderServiceClient", -) diff --git a/owl-bot-staging/v1beta1/google/cloud/video/transcoder_v1beta1/services/transcoder_service/pagers.py b/owl-bot-staging/v1beta1/google/cloud/video/transcoder_v1beta1/services/transcoder_service/pagers.py deleted file mode 100644 index 63d2ed7..0000000 --- a/owl-bot-staging/v1beta1/google/cloud/video/transcoder_v1beta1/services/transcoder_service/pagers.py +++ /dev/null @@ -1,263 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from typing import Any, AsyncIterable, Awaitable, Callable, Iterable, Sequence, Tuple, Optional - -from google.cloud.video.transcoder_v1beta1.types import resources -from google.cloud.video.transcoder_v1beta1.types import services - - -class ListJobsPager: - """A pager for iterating through ``list_jobs`` requests. - - This class thinly wraps an initial - :class:`google.cloud.video.transcoder_v1beta1.types.ListJobsResponse` object, and - provides an ``__iter__`` method to iterate through its - ``jobs`` field. - - If there are more pages, the ``__iter__`` method will make additional - ``ListJobs`` requests and continue to iterate - through the ``jobs`` field on the - corresponding responses. - - All the usual :class:`google.cloud.video.transcoder_v1beta1.types.ListJobsResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., services.ListJobsResponse], - request: services.ListJobsRequest, - response: services.ListJobsResponse, - *, - metadata: Sequence[Tuple[str, str]] = ()): - """Instantiate the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.video.transcoder_v1beta1.types.ListJobsRequest): - The initial request object. - response (google.cloud.video.transcoder_v1beta1.types.ListJobsResponse): - The initial response object. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - self._method = method - self._request = services.ListJobsRequest(request) - self._response = response - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - def pages(self) -> Iterable[services.ListJobsResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, metadata=self._metadata) - yield self._response - - def __iter__(self) -> Iterable[resources.Job]: - for page in self.pages: - yield from page.jobs - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListJobsAsyncPager: - """A pager for iterating through ``list_jobs`` requests. - - This class thinly wraps an initial - :class:`google.cloud.video.transcoder_v1beta1.types.ListJobsResponse` object, and - provides an ``__aiter__`` method to iterate through its - ``jobs`` field. - - If there are more pages, the ``__aiter__`` method will make additional - ``ListJobs`` requests and continue to iterate - through the ``jobs`` field on the - corresponding responses. - - All the usual :class:`google.cloud.video.transcoder_v1beta1.types.ListJobsResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., Awaitable[services.ListJobsResponse]], - request: services.ListJobsRequest, - response: services.ListJobsResponse, - *, - metadata: Sequence[Tuple[str, str]] = ()): - """Instantiates the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.video.transcoder_v1beta1.types.ListJobsRequest): - The initial request object. - response (google.cloud.video.transcoder_v1beta1.types.ListJobsResponse): - The initial response object. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - self._method = method - self._request = services.ListJobsRequest(request) - self._response = response - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - async def pages(self) -> AsyncIterable[services.ListJobsResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, metadata=self._metadata) - yield self._response - - def __aiter__(self) -> AsyncIterable[resources.Job]: - async def async_generator(): - async for page in self.pages: - for response in page.jobs: - yield response - - return async_generator() - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListJobTemplatesPager: - """A pager for iterating through ``list_job_templates`` requests. - - This class thinly wraps an initial - :class:`google.cloud.video.transcoder_v1beta1.types.ListJobTemplatesResponse` object, and - provides an ``__iter__`` method to iterate through its - ``job_templates`` field. - - If there are more pages, the ``__iter__`` method will make additional - ``ListJobTemplates`` requests and continue to iterate - through the ``job_templates`` field on the - corresponding responses. - - All the usual :class:`google.cloud.video.transcoder_v1beta1.types.ListJobTemplatesResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., services.ListJobTemplatesResponse], - request: services.ListJobTemplatesRequest, - response: services.ListJobTemplatesResponse, - *, - metadata: Sequence[Tuple[str, str]] = ()): - """Instantiate the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.video.transcoder_v1beta1.types.ListJobTemplatesRequest): - The initial request object. - response (google.cloud.video.transcoder_v1beta1.types.ListJobTemplatesResponse): - The initial response object. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - self._method = method - self._request = services.ListJobTemplatesRequest(request) - self._response = response - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - def pages(self) -> Iterable[services.ListJobTemplatesResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, metadata=self._metadata) - yield self._response - - def __iter__(self) -> Iterable[resources.JobTemplate]: - for page in self.pages: - yield from page.job_templates - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListJobTemplatesAsyncPager: - """A pager for iterating through ``list_job_templates`` requests. - - This class thinly wraps an initial - :class:`google.cloud.video.transcoder_v1beta1.types.ListJobTemplatesResponse` object, and - provides an ``__aiter__`` method to iterate through its - ``job_templates`` field. - - If there are more pages, the ``__aiter__`` method will make additional - ``ListJobTemplates`` requests and continue to iterate - through the ``job_templates`` field on the - corresponding responses. - - All the usual :class:`google.cloud.video.transcoder_v1beta1.types.ListJobTemplatesResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., Awaitable[services.ListJobTemplatesResponse]], - request: services.ListJobTemplatesRequest, - response: services.ListJobTemplatesResponse, - *, - metadata: Sequence[Tuple[str, str]] = ()): - """Instantiates the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.video.transcoder_v1beta1.types.ListJobTemplatesRequest): - The initial request object. - response (google.cloud.video.transcoder_v1beta1.types.ListJobTemplatesResponse): - The initial response object. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - self._method = method - self._request = services.ListJobTemplatesRequest(request) - self._response = response - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - async def pages(self) -> AsyncIterable[services.ListJobTemplatesResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, metadata=self._metadata) - yield self._response - - def __aiter__(self) -> AsyncIterable[resources.JobTemplate]: - async def async_generator(): - async for page in self.pages: - for response in page.job_templates: - yield response - - return async_generator() - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) diff --git a/owl-bot-staging/v1beta1/google/cloud/video/transcoder_v1beta1/services/transcoder_service/transports/__init__.py b/owl-bot-staging/v1beta1/google/cloud/video/transcoder_v1beta1/services/transcoder_service/transports/__init__.py deleted file mode 100644 index 5ed2b9a..0000000 --- a/owl-bot-staging/v1beta1/google/cloud/video/transcoder_v1beta1/services/transcoder_service/transports/__init__.py +++ /dev/null @@ -1,33 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from collections import OrderedDict -from typing import Dict, Type - -from .base import TranscoderServiceTransport -from .grpc import TranscoderServiceGrpcTransport -from .grpc_asyncio import TranscoderServiceGrpcAsyncIOTransport - - -# Compile a registry of transports. -_transport_registry = OrderedDict() # type: Dict[str, Type[TranscoderServiceTransport]] -_transport_registry['grpc'] = TranscoderServiceGrpcTransport -_transport_registry['grpc_asyncio'] = TranscoderServiceGrpcAsyncIOTransport - -__all__ = ( - 'TranscoderServiceTransport', - 'TranscoderServiceGrpcTransport', - 'TranscoderServiceGrpcAsyncIOTransport', -) diff --git a/owl-bot-staging/v1beta1/google/cloud/video/transcoder_v1beta1/services/transcoder_service/transports/base.py b/owl-bot-staging/v1beta1/google/cloud/video/transcoder_v1beta1/services/transcoder_service/transports/base.py deleted file mode 100644 index ccf0914..0000000 --- a/owl-bot-staging/v1beta1/google/cloud/video/transcoder_v1beta1/services/transcoder_service/transports/base.py +++ /dev/null @@ -1,268 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import abc -from typing import Awaitable, Callable, Dict, Optional, Sequence, Union -import packaging.version -import pkg_resources - -import google.auth # type: ignore -import google.api_core # type: ignore -from google.api_core import exceptions as core_exceptions # type: ignore -from google.api_core import gapic_v1 # type: ignore -from google.api_core import retry as retries # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.oauth2 import service_account # type: ignore - -from google.cloud.video.transcoder_v1beta1.types import resources -from google.cloud.video.transcoder_v1beta1.types import services -from google.protobuf import empty_pb2 # type: ignore - -try: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( - gapic_version=pkg_resources.get_distribution( - 'google-cloud-video-transcoder', - ).version, - ) -except pkg_resources.DistributionNotFound: - DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() - -try: - # google.auth.__version__ was added in 1.26.0 - _GOOGLE_AUTH_VERSION = google.auth.__version__ -except AttributeError: - try: # try pkg_resources if it is available - _GOOGLE_AUTH_VERSION = pkg_resources.get_distribution("google-auth").version - except pkg_resources.DistributionNotFound: # pragma: NO COVER - _GOOGLE_AUTH_VERSION = None - - -class TranscoderServiceTransport(abc.ABC): - """Abstract transport class for TranscoderService.""" - - AUTH_SCOPES = ( - 'https://www.googleapis.com/auth/cloud-platform', - ) - - DEFAULT_HOST: str = 'transcoder.googleapis.com' - def __init__( - self, *, - host: str = DEFAULT_HOST, - credentials: ga_credentials.Credentials = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - **kwargs, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to. - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is mutually exclusive with credentials. - scopes (Optional[Sequence[str]]): A list of scopes. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - """ - # Save the hostname. Default to port 443 (HTTPS) if none is specified. - if ':' not in host: - host += ':443' - self._host = host - - scopes_kwargs = self._get_scopes_kwargs(self._host, scopes) - - # Save the scopes. - self._scopes = scopes - - # If no credentials are provided, then determine the appropriate - # defaults. - if credentials and credentials_file: - raise core_exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive") - - if credentials_file is not None: - credentials, _ = google.auth.load_credentials_from_file( - credentials_file, - **scopes_kwargs, - quota_project_id=quota_project_id - ) - - elif credentials is None: - credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) - - # If the credentials is service account credentials, then always try to use self signed JWT. - if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): - credentials = credentials.with_always_use_jwt_access(True) - - # Save the credentials. - self._credentials = credentials - - # TODO(busunkim): This method is in the base transport - # to avoid duplicating code across the transport classes. These functions - # should be deleted once the minimum required versions of google-auth is increased. - - # TODO: Remove this function once google-auth >= 1.25.0 is required - @classmethod - def _get_scopes_kwargs(cls, host: str, scopes: Optional[Sequence[str]]) -> Dict[str, Optional[Sequence[str]]]: - """Returns scopes kwargs to pass to google-auth methods depending on the google-auth version""" - - scopes_kwargs = {} - - if _GOOGLE_AUTH_VERSION and ( - packaging.version.parse(_GOOGLE_AUTH_VERSION) - >= packaging.version.parse("1.25.0") - ): - scopes_kwargs = {"scopes": scopes, "default_scopes": cls.AUTH_SCOPES} - else: - scopes_kwargs = {"scopes": scopes or cls.AUTH_SCOPES} - - return scopes_kwargs - - def _prep_wrapped_messages(self, client_info): - # Precompute the wrapped methods. - self._wrapped_methods = { - self.create_job: gapic_v1.method.wrap_method( - self.create_job, - default_timeout=60.0, - client_info=client_info, - ), - self.list_jobs: gapic_v1.method.wrap_method( - self.list_jobs, - default_timeout=60.0, - client_info=client_info, - ), - self.get_job: gapic_v1.method.wrap_method( - self.get_job, - default_timeout=60.0, - client_info=client_info, - ), - self.delete_job: gapic_v1.method.wrap_method( - self.delete_job, - default_timeout=60.0, - client_info=client_info, - ), - self.create_job_template: gapic_v1.method.wrap_method( - self.create_job_template, - default_timeout=60.0, - client_info=client_info, - ), - self.list_job_templates: gapic_v1.method.wrap_method( - self.list_job_templates, - default_timeout=60.0, - client_info=client_info, - ), - self.get_job_template: gapic_v1.method.wrap_method( - self.get_job_template, - default_timeout=60.0, - client_info=client_info, - ), - self.delete_job_template: gapic_v1.method.wrap_method( - self.delete_job_template, - default_timeout=60.0, - client_info=client_info, - ), - } - - @property - def create_job(self) -> Callable[ - [services.CreateJobRequest], - Union[ - resources.Job, - Awaitable[resources.Job] - ]]: - raise NotImplementedError() - - @property - def list_jobs(self) -> Callable[ - [services.ListJobsRequest], - Union[ - services.ListJobsResponse, - Awaitable[services.ListJobsResponse] - ]]: - raise NotImplementedError() - - @property - def get_job(self) -> Callable[ - [services.GetJobRequest], - Union[ - resources.Job, - Awaitable[resources.Job] - ]]: - raise NotImplementedError() - - @property - def delete_job(self) -> Callable[ - [services.DeleteJobRequest], - Union[ - empty_pb2.Empty, - Awaitable[empty_pb2.Empty] - ]]: - raise NotImplementedError() - - @property - def create_job_template(self) -> Callable[ - [services.CreateJobTemplateRequest], - Union[ - resources.JobTemplate, - Awaitable[resources.JobTemplate] - ]]: - raise NotImplementedError() - - @property - def list_job_templates(self) -> Callable[ - [services.ListJobTemplatesRequest], - Union[ - services.ListJobTemplatesResponse, - Awaitable[services.ListJobTemplatesResponse] - ]]: - raise NotImplementedError() - - @property - def get_job_template(self) -> Callable[ - [services.GetJobTemplateRequest], - Union[ - resources.JobTemplate, - Awaitable[resources.JobTemplate] - ]]: - raise NotImplementedError() - - @property - def delete_job_template(self) -> Callable[ - [services.DeleteJobTemplateRequest], - Union[ - empty_pb2.Empty, - Awaitable[empty_pb2.Empty] - ]]: - raise NotImplementedError() - - -__all__ = ( - 'TranscoderServiceTransport', -) diff --git a/owl-bot-staging/v1beta1/google/cloud/video/transcoder_v1beta1/services/transcoder_service/transports/grpc.py b/owl-bot-staging/v1beta1/google/cloud/video/transcoder_v1beta1/services/transcoder_service/transports/grpc.py deleted file mode 100644 index 2621bc3..0000000 --- a/owl-bot-staging/v1beta1/google/cloud/video/transcoder_v1beta1/services/transcoder_service/transports/grpc.py +++ /dev/null @@ -1,442 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import warnings -from typing import Callable, Dict, Optional, Sequence, Tuple, Union - -from google.api_core import grpc_helpers # type: ignore -from google.api_core import gapic_v1 # type: ignore -import google.auth # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore - -import grpc # type: ignore - -from google.cloud.video.transcoder_v1beta1.types import resources -from google.cloud.video.transcoder_v1beta1.types import services -from google.protobuf import empty_pb2 # type: ignore -from .base import TranscoderServiceTransport, DEFAULT_CLIENT_INFO - - -class TranscoderServiceGrpcTransport(TranscoderServiceTransport): - """gRPC backend transport for TranscoderService. - - Using the Transcoder API, you can queue asynchronous jobs for - transcoding media into various output formats. Output formats - may include different streaming standards such as HTTP Live - Streaming (HLS) and Dynamic Adaptive Streaming over HTTP (DASH). - You can also customize jobs using advanced features such as - Digital Rights Management (DRM), audio equalization, content - concatenation, and digital ad-stitch ready content generation. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends protocol buffers over the wire using gRPC (which is built on - top of HTTP/2); the ``grpcio`` package must be installed. - """ - _stubs: Dict[str, Callable] - - def __init__(self, *, - host: str = 'transcoder.googleapis.com', - credentials: ga_credentials.Credentials = None, - credentials_file: str = None, - scopes: Sequence[str] = None, - channel: grpc.Channel = None, - api_mtls_endpoint: str = None, - client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, - ssl_channel_credentials: grpc.ChannelCredentials = None, - client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to. - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - This argument is ignored if ``channel`` is provided. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. - scopes (Optional(Sequence[str])): A list of scopes. This argument is - ignored if ``channel`` is provided. - channel (Optional[grpc.Channel]): A ``Channel`` instance through - which to make calls. - api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. - If provided, it overrides the ``host`` argument and tries to create - a mutual TLS channel with client SSL credentials from - ``client_cert_source`` or applicatin default SSL credentials. - client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): - Deprecated. A callback to provide client SSL certificate bytes and - private key bytes, both in PEM format. It is ignored if - ``api_mtls_endpoint`` is None. - ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for grpc channel. It is ignored if ``channel`` is provided. - client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): - A callback to provide client certificate bytes and private key bytes, - both in PEM format. It is used to configure mutual TLS channel. It is - ignored if ``channel`` or ``ssl_channel_credentials`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport - creation failed for any reason. - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - self._grpc_channel = None - self._ssl_channel_credentials = ssl_channel_credentials - self._stubs: Dict[str, Callable] = {} - - if api_mtls_endpoint: - warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) - if client_cert_source: - warnings.warn("client_cert_source is deprecated", DeprecationWarning) - - if channel: - # Ignore credentials if a channel was passed. - credentials = False - # If a channel was explicitly provided, set it. - self._grpc_channel = channel - self._ssl_channel_credentials = None - - else: - if api_mtls_endpoint: - host = api_mtls_endpoint - - # Create SSL credentials with client_cert_source or application - # default SSL credentials. - if client_cert_source: - cert, key = client_cert_source() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - else: - self._ssl_channel_credentials = SslCredentials().ssl_credentials - - else: - if client_cert_source_for_mtls and not ssl_channel_credentials: - cert, key = client_cert_source_for_mtls() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - - # The base transport sets the host, credentials and scopes - super().__init__( - host=host, - credentials=credentials, - credentials_file=credentials_file, - scopes=scopes, - quota_project_id=quota_project_id, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - ) - - if not self._grpc_channel: - self._grpc_channel = type(self).create_channel( - self._host, - credentials=self._credentials, - credentials_file=credentials_file, - scopes=self._scopes, - ssl_credentials=self._ssl_channel_credentials, - quota_project_id=quota_project_id, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - # Wrap messages. This must be done after self._grpc_channel exists - self._prep_wrapped_messages(client_info) - - @classmethod - def create_channel(cls, - host: str = 'transcoder.googleapis.com', - credentials: ga_credentials.Credentials = None, - credentials_file: str = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - **kwargs) -> grpc.Channel: - """Create and return a gRPC channel object. - Args: - host (Optional[str]): The host for the channel to use. - credentials (Optional[~.Credentials]): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If - none are specified, the client will attempt to ascertain - the credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is mutually exclusive with credentials. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - kwargs (Optional[dict]): Keyword arguments, which are passed to the - channel creation. - Returns: - grpc.Channel: A gRPC channel object. - - Raises: - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - - return grpc_helpers.create_channel( - host, - credentials=credentials, - credentials_file=credentials_file, - quota_project_id=quota_project_id, - default_scopes=cls.AUTH_SCOPES, - scopes=scopes, - default_host=cls.DEFAULT_HOST, - **kwargs - ) - - @property - def grpc_channel(self) -> grpc.Channel: - """Return the channel designed to connect to this service. - """ - return self._grpc_channel - - @property - def create_job(self) -> Callable[ - [services.CreateJobRequest], - resources.Job]: - r"""Return a callable for the create job method over gRPC. - - Creates a job in the specified region. - - Returns: - Callable[[~.CreateJobRequest], - ~.Job]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_job' not in self._stubs: - self._stubs['create_job'] = self.grpc_channel.unary_unary( - '/google.cloud.video.transcoder.v1beta1.TranscoderService/CreateJob', - request_serializer=services.CreateJobRequest.serialize, - response_deserializer=resources.Job.deserialize, - ) - return self._stubs['create_job'] - - @property - def list_jobs(self) -> Callable[ - [services.ListJobsRequest], - services.ListJobsResponse]: - r"""Return a callable for the list jobs method over gRPC. - - Lists jobs in the specified region. - - Returns: - Callable[[~.ListJobsRequest], - ~.ListJobsResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_jobs' not in self._stubs: - self._stubs['list_jobs'] = self.grpc_channel.unary_unary( - '/google.cloud.video.transcoder.v1beta1.TranscoderService/ListJobs', - request_serializer=services.ListJobsRequest.serialize, - response_deserializer=services.ListJobsResponse.deserialize, - ) - return self._stubs['list_jobs'] - - @property - def get_job(self) -> Callable[ - [services.GetJobRequest], - resources.Job]: - r"""Return a callable for the get job method over gRPC. - - Returns the job data. - - Returns: - Callable[[~.GetJobRequest], - ~.Job]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_job' not in self._stubs: - self._stubs['get_job'] = self.grpc_channel.unary_unary( - '/google.cloud.video.transcoder.v1beta1.TranscoderService/GetJob', - request_serializer=services.GetJobRequest.serialize, - response_deserializer=resources.Job.deserialize, - ) - return self._stubs['get_job'] - - @property - def delete_job(self) -> Callable[ - [services.DeleteJobRequest], - empty_pb2.Empty]: - r"""Return a callable for the delete job method over gRPC. - - Deletes a job. - - Returns: - Callable[[~.DeleteJobRequest], - ~.Empty]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_job' not in self._stubs: - self._stubs['delete_job'] = self.grpc_channel.unary_unary( - '/google.cloud.video.transcoder.v1beta1.TranscoderService/DeleteJob', - request_serializer=services.DeleteJobRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs['delete_job'] - - @property - def create_job_template(self) -> Callable[ - [services.CreateJobTemplateRequest], - resources.JobTemplate]: - r"""Return a callable for the create job template method over gRPC. - - Creates a job template in the specified region. - - Returns: - Callable[[~.CreateJobTemplateRequest], - ~.JobTemplate]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_job_template' not in self._stubs: - self._stubs['create_job_template'] = self.grpc_channel.unary_unary( - '/google.cloud.video.transcoder.v1beta1.TranscoderService/CreateJobTemplate', - request_serializer=services.CreateJobTemplateRequest.serialize, - response_deserializer=resources.JobTemplate.deserialize, - ) - return self._stubs['create_job_template'] - - @property - def list_job_templates(self) -> Callable[ - [services.ListJobTemplatesRequest], - services.ListJobTemplatesResponse]: - r"""Return a callable for the list job templates method over gRPC. - - Lists job templates in the specified region. - - Returns: - Callable[[~.ListJobTemplatesRequest], - ~.ListJobTemplatesResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_job_templates' not in self._stubs: - self._stubs['list_job_templates'] = self.grpc_channel.unary_unary( - '/google.cloud.video.transcoder.v1beta1.TranscoderService/ListJobTemplates', - request_serializer=services.ListJobTemplatesRequest.serialize, - response_deserializer=services.ListJobTemplatesResponse.deserialize, - ) - return self._stubs['list_job_templates'] - - @property - def get_job_template(self) -> Callable[ - [services.GetJobTemplateRequest], - resources.JobTemplate]: - r"""Return a callable for the get job template method over gRPC. - - Returns the job template data. - - Returns: - Callable[[~.GetJobTemplateRequest], - ~.JobTemplate]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_job_template' not in self._stubs: - self._stubs['get_job_template'] = self.grpc_channel.unary_unary( - '/google.cloud.video.transcoder.v1beta1.TranscoderService/GetJobTemplate', - request_serializer=services.GetJobTemplateRequest.serialize, - response_deserializer=resources.JobTemplate.deserialize, - ) - return self._stubs['get_job_template'] - - @property - def delete_job_template(self) -> Callable[ - [services.DeleteJobTemplateRequest], - empty_pb2.Empty]: - r"""Return a callable for the delete job template method over gRPC. - - Deletes a job template. - - Returns: - Callable[[~.DeleteJobTemplateRequest], - ~.Empty]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_job_template' not in self._stubs: - self._stubs['delete_job_template'] = self.grpc_channel.unary_unary( - '/google.cloud.video.transcoder.v1beta1.TranscoderService/DeleteJobTemplate', - request_serializer=services.DeleteJobTemplateRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs['delete_job_template'] - - -__all__ = ( - 'TranscoderServiceGrpcTransport', -) diff --git a/owl-bot-staging/v1beta1/google/cloud/video/transcoder_v1beta1/services/transcoder_service/transports/grpc_asyncio.py b/owl-bot-staging/v1beta1/google/cloud/video/transcoder_v1beta1/services/transcoder_service/transports/grpc_asyncio.py deleted file mode 100644 index c91e961..0000000 --- a/owl-bot-staging/v1beta1/google/cloud/video/transcoder_v1beta1/services/transcoder_service/transports/grpc_asyncio.py +++ /dev/null @@ -1,446 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import warnings -from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union - -from google.api_core import gapic_v1 # type: ignore -from google.api_core import grpc_helpers_async # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -import packaging.version - -import grpc # type: ignore -from grpc.experimental import aio # type: ignore - -from google.cloud.video.transcoder_v1beta1.types import resources -from google.cloud.video.transcoder_v1beta1.types import services -from google.protobuf import empty_pb2 # type: ignore -from .base import TranscoderServiceTransport, DEFAULT_CLIENT_INFO -from .grpc import TranscoderServiceGrpcTransport - - -class TranscoderServiceGrpcAsyncIOTransport(TranscoderServiceTransport): - """gRPC AsyncIO backend transport for TranscoderService. - - Using the Transcoder API, you can queue asynchronous jobs for - transcoding media into various output formats. Output formats - may include different streaming standards such as HTTP Live - Streaming (HLS) and Dynamic Adaptive Streaming over HTTP (DASH). - You can also customize jobs using advanced features such as - Digital Rights Management (DRM), audio equalization, content - concatenation, and digital ad-stitch ready content generation. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends protocol buffers over the wire using gRPC (which is built on - top of HTTP/2); the ``grpcio`` package must be installed. - """ - - _grpc_channel: aio.Channel - _stubs: Dict[str, Callable] = {} - - @classmethod - def create_channel(cls, - host: str = 'transcoder.googleapis.com', - credentials: ga_credentials.Credentials = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - **kwargs) -> aio.Channel: - """Create and return a gRPC AsyncIO channel object. - Args: - host (Optional[str]): The host for the channel to use. - credentials (Optional[~.Credentials]): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If - none are specified, the client will attempt to ascertain - the credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - kwargs (Optional[dict]): Keyword arguments, which are passed to the - channel creation. - Returns: - aio.Channel: A gRPC AsyncIO channel object. - """ - - return grpc_helpers_async.create_channel( - host, - credentials=credentials, - credentials_file=credentials_file, - quota_project_id=quota_project_id, - default_scopes=cls.AUTH_SCOPES, - scopes=scopes, - default_host=cls.DEFAULT_HOST, - **kwargs - ) - - def __init__(self, *, - host: str = 'transcoder.googleapis.com', - credentials: ga_credentials.Credentials = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - channel: aio.Channel = None, - api_mtls_endpoint: str = None, - client_cert_source: Callable[[], Tuple[bytes, bytes]] = None, - ssl_channel_credentials: grpc.ChannelCredentials = None, - client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None, - quota_project_id=None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to. - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - This argument is ignored if ``channel`` is provided. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - channel (Optional[aio.Channel]): A ``Channel`` instance through - which to make calls. - api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. - If provided, it overrides the ``host`` argument and tries to create - a mutual TLS channel with client SSL credentials from - ``client_cert_source`` or applicatin default SSL credentials. - client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): - Deprecated. A callback to provide client SSL certificate bytes and - private key bytes, both in PEM format. It is ignored if - ``api_mtls_endpoint`` is None. - ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for grpc channel. It is ignored if ``channel`` is provided. - client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): - A callback to provide client certificate bytes and private key bytes, - both in PEM format. It is used to configure mutual TLS channel. It is - ignored if ``channel`` or ``ssl_channel_credentials`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - - Raises: - google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport - creation failed for any reason. - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - self._grpc_channel = None - self._ssl_channel_credentials = ssl_channel_credentials - self._stubs: Dict[str, Callable] = {} - - if api_mtls_endpoint: - warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) - if client_cert_source: - warnings.warn("client_cert_source is deprecated", DeprecationWarning) - - if channel: - # Ignore credentials if a channel was passed. - credentials = False - # If a channel was explicitly provided, set it. - self._grpc_channel = channel - self._ssl_channel_credentials = None - else: - if api_mtls_endpoint: - host = api_mtls_endpoint - - # Create SSL credentials with client_cert_source or application - # default SSL credentials. - if client_cert_source: - cert, key = client_cert_source() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - else: - self._ssl_channel_credentials = SslCredentials().ssl_credentials - - else: - if client_cert_source_for_mtls and not ssl_channel_credentials: - cert, key = client_cert_source_for_mtls() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - - # The base transport sets the host, credentials and scopes - super().__init__( - host=host, - credentials=credentials, - credentials_file=credentials_file, - scopes=scopes, - quota_project_id=quota_project_id, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - ) - - if not self._grpc_channel: - self._grpc_channel = type(self).create_channel( - self._host, - credentials=self._credentials, - credentials_file=credentials_file, - scopes=self._scopes, - ssl_credentials=self._ssl_channel_credentials, - quota_project_id=quota_project_id, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - # Wrap messages. This must be done after self._grpc_channel exists - self._prep_wrapped_messages(client_info) - - @property - def grpc_channel(self) -> aio.Channel: - """Create the channel designed to connect to this service. - - This property caches on the instance; repeated calls return - the same channel. - """ - # Return the channel from cache. - return self._grpc_channel - - @property - def create_job(self) -> Callable[ - [services.CreateJobRequest], - Awaitable[resources.Job]]: - r"""Return a callable for the create job method over gRPC. - - Creates a job in the specified region. - - Returns: - Callable[[~.CreateJobRequest], - Awaitable[~.Job]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_job' not in self._stubs: - self._stubs['create_job'] = self.grpc_channel.unary_unary( - '/google.cloud.video.transcoder.v1beta1.TranscoderService/CreateJob', - request_serializer=services.CreateJobRequest.serialize, - response_deserializer=resources.Job.deserialize, - ) - return self._stubs['create_job'] - - @property - def list_jobs(self) -> Callable[ - [services.ListJobsRequest], - Awaitable[services.ListJobsResponse]]: - r"""Return a callable for the list jobs method over gRPC. - - Lists jobs in the specified region. - - Returns: - Callable[[~.ListJobsRequest], - Awaitable[~.ListJobsResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_jobs' not in self._stubs: - self._stubs['list_jobs'] = self.grpc_channel.unary_unary( - '/google.cloud.video.transcoder.v1beta1.TranscoderService/ListJobs', - request_serializer=services.ListJobsRequest.serialize, - response_deserializer=services.ListJobsResponse.deserialize, - ) - return self._stubs['list_jobs'] - - @property - def get_job(self) -> Callable[ - [services.GetJobRequest], - Awaitable[resources.Job]]: - r"""Return a callable for the get job method over gRPC. - - Returns the job data. - - Returns: - Callable[[~.GetJobRequest], - Awaitable[~.Job]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_job' not in self._stubs: - self._stubs['get_job'] = self.grpc_channel.unary_unary( - '/google.cloud.video.transcoder.v1beta1.TranscoderService/GetJob', - request_serializer=services.GetJobRequest.serialize, - response_deserializer=resources.Job.deserialize, - ) - return self._stubs['get_job'] - - @property - def delete_job(self) -> Callable[ - [services.DeleteJobRequest], - Awaitable[empty_pb2.Empty]]: - r"""Return a callable for the delete job method over gRPC. - - Deletes a job. - - Returns: - Callable[[~.DeleteJobRequest], - Awaitable[~.Empty]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_job' not in self._stubs: - self._stubs['delete_job'] = self.grpc_channel.unary_unary( - '/google.cloud.video.transcoder.v1beta1.TranscoderService/DeleteJob', - request_serializer=services.DeleteJobRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs['delete_job'] - - @property - def create_job_template(self) -> Callable[ - [services.CreateJobTemplateRequest], - Awaitable[resources.JobTemplate]]: - r"""Return a callable for the create job template method over gRPC. - - Creates a job template in the specified region. - - Returns: - Callable[[~.CreateJobTemplateRequest], - Awaitable[~.JobTemplate]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_job_template' not in self._stubs: - self._stubs['create_job_template'] = self.grpc_channel.unary_unary( - '/google.cloud.video.transcoder.v1beta1.TranscoderService/CreateJobTemplate', - request_serializer=services.CreateJobTemplateRequest.serialize, - response_deserializer=resources.JobTemplate.deserialize, - ) - return self._stubs['create_job_template'] - - @property - def list_job_templates(self) -> Callable[ - [services.ListJobTemplatesRequest], - Awaitable[services.ListJobTemplatesResponse]]: - r"""Return a callable for the list job templates method over gRPC. - - Lists job templates in the specified region. - - Returns: - Callable[[~.ListJobTemplatesRequest], - Awaitable[~.ListJobTemplatesResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_job_templates' not in self._stubs: - self._stubs['list_job_templates'] = self.grpc_channel.unary_unary( - '/google.cloud.video.transcoder.v1beta1.TranscoderService/ListJobTemplates', - request_serializer=services.ListJobTemplatesRequest.serialize, - response_deserializer=services.ListJobTemplatesResponse.deserialize, - ) - return self._stubs['list_job_templates'] - - @property - def get_job_template(self) -> Callable[ - [services.GetJobTemplateRequest], - Awaitable[resources.JobTemplate]]: - r"""Return a callable for the get job template method over gRPC. - - Returns the job template data. - - Returns: - Callable[[~.GetJobTemplateRequest], - Awaitable[~.JobTemplate]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_job_template' not in self._stubs: - self._stubs['get_job_template'] = self.grpc_channel.unary_unary( - '/google.cloud.video.transcoder.v1beta1.TranscoderService/GetJobTemplate', - request_serializer=services.GetJobTemplateRequest.serialize, - response_deserializer=resources.JobTemplate.deserialize, - ) - return self._stubs['get_job_template'] - - @property - def delete_job_template(self) -> Callable[ - [services.DeleteJobTemplateRequest], - Awaitable[empty_pb2.Empty]]: - r"""Return a callable for the delete job template method over gRPC. - - Deletes a job template. - - Returns: - Callable[[~.DeleteJobTemplateRequest], - Awaitable[~.Empty]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_job_template' not in self._stubs: - self._stubs['delete_job_template'] = self.grpc_channel.unary_unary( - '/google.cloud.video.transcoder.v1beta1.TranscoderService/DeleteJobTemplate', - request_serializer=services.DeleteJobTemplateRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs['delete_job_template'] - - -__all__ = ( - 'TranscoderServiceGrpcAsyncIOTransport', -) diff --git a/owl-bot-staging/v1beta1/google/cloud/video/transcoder_v1beta1/types/__init__.py b/owl-bot-staging/v1beta1/google/cloud/video/transcoder_v1beta1/types/__init__.py deleted file mode 100644 index ebb04cc..0000000 --- a/owl-bot-staging/v1beta1/google/cloud/video/transcoder_v1beta1/types/__init__.py +++ /dev/null @@ -1,84 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from .resources import ( - AdBreak, - AudioStream, - EditAtom, - ElementaryStream, - Encryption, - FailureDetail, - Input, - Job, - JobConfig, - JobTemplate, - Manifest, - MuxStream, - Output, - Overlay, - PreprocessingConfig, - Progress, - PubsubDestination, - SegmentSettings, - SpriteSheet, - TextStream, - VideoStream, -) -from .services import ( - CreateJobRequest, - CreateJobTemplateRequest, - DeleteJobRequest, - DeleteJobTemplateRequest, - GetJobRequest, - GetJobTemplateRequest, - ListJobsRequest, - ListJobsResponse, - ListJobTemplatesRequest, - ListJobTemplatesResponse, -) - -__all__ = ( - 'AdBreak', - 'AudioStream', - 'EditAtom', - 'ElementaryStream', - 'Encryption', - 'FailureDetail', - 'Input', - 'Job', - 'JobConfig', - 'JobTemplate', - 'Manifest', - 'MuxStream', - 'Output', - 'Overlay', - 'PreprocessingConfig', - 'Progress', - 'PubsubDestination', - 'SegmentSettings', - 'SpriteSheet', - 'TextStream', - 'VideoStream', - 'CreateJobRequest', - 'CreateJobTemplateRequest', - 'DeleteJobRequest', - 'DeleteJobTemplateRequest', - 'GetJobRequest', - 'GetJobTemplateRequest', - 'ListJobsRequest', - 'ListJobsResponse', - 'ListJobTemplatesRequest', - 'ListJobTemplatesResponse', -) diff --git a/owl-bot-staging/v1beta1/google/cloud/video/transcoder_v1beta1/types/resources.py b/owl-bot-staging/v1beta1/google/cloud/video/transcoder_v1beta1/types/resources.py deleted file mode 100644 index e04fca3..0000000 --- a/owl-bot-staging/v1beta1/google/cloud/video/transcoder_v1beta1/types/resources.py +++ /dev/null @@ -1,1736 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import proto # type: ignore - -from google.protobuf import duration_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore - - -__protobuf__ = proto.module( - package='google.cloud.video.transcoder.v1beta1', - manifest={ - 'Job', - 'JobTemplate', - 'JobConfig', - 'Input', - 'Output', - 'EditAtom', - 'AdBreak', - 'ElementaryStream', - 'MuxStream', - 'Manifest', - 'PubsubDestination', - 'SpriteSheet', - 'Overlay', - 'PreprocessingConfig', - 'VideoStream', - 'AudioStream', - 'TextStream', - 'SegmentSettings', - 'Encryption', - 'Progress', - 'FailureDetail', - }, -) - - -class Job(proto.Message): - r"""Transcoding job resource. - Attributes: - name (str): - The resource name of the job. Format: - ``projects/{project}/locations/{location}/jobs/{job}`` - input_uri (str): - Input only. Specify the ``input_uri`` to populate empty - ``uri`` fields in each element of ``Job.config.inputs`` or - ``JobTemplate.config.inputs`` when using template. URI of - the media. Input files must be at least 5 seconds in - duration and stored in Cloud Storage (for example, - ``gs://bucket/inputs/file.mp4``). - output_uri (str): - Input only. Specify the ``output_uri`` to populate an empty - ``Job.config.output.uri`` or - ``JobTemplate.config.output.uri`` when using template. URI - for the output file(s). For example, - ``gs://my-bucket/outputs/``. - template_id (str): - Input only. Specify the ``template_id`` to use for - populating ``Job.config``. The default is ``preset/web-hd``. - - Preset Transcoder templates: - - - ``preset/{preset_id}`` - - - User defined JobTemplate: ``{job_template_id}`` - config (google.cloud.video.transcoder_v1beta1.types.JobConfig): - The configuration for this job. - priority (int): - Specify the priority of the job. Enter a - value between 0 and 100, where 0 is the lowest - priority and 100 is the highest priority. The - default is 0. - origin_uri (google.cloud.video.transcoder_v1beta1.types.Job.OriginUri): - Output only. The origin URI. - - state (google.cloud.video.transcoder_v1beta1.types.Job.ProcessingState): - Output only. The current state of the job. - progress (google.cloud.video.transcoder_v1beta1.types.Progress): - Output only. Estimated fractional progress, from ``0`` to - ``1`` for each step. - - .. raw:: html - - - failure_reason (str): - Output only. A description of the reason for the failure. - This property is always present when ``state`` is - ``FAILED``. - failure_details (Sequence[google.cloud.video.transcoder_v1beta1.types.FailureDetail]): - Output only. List of failure details. This property may - contain additional information about the failure when - ``failure_reason`` is present. - - .. raw:: html - - - create_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The time the job was created. - start_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The time the transcoding - started. - end_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The time the transcoding - finished. - ttl_after_completion_days (int): - Job time to live value in days, which will be - effective after job completion. Job should be - deleted automatically after the given TTL. Enter - a value between 1 and 90. The default is 30. - """ - class ProcessingState(proto.Enum): - r"""The current state of the job.""" - PROCESSING_STATE_UNSPECIFIED = 0 - PENDING = 1 - RUNNING = 2 - SUCCEEDED = 3 - FAILED = 4 - - class OriginUri(proto.Message): - r"""The origin URI. - Attributes: - hls (str): - HLS manifest URI per - https://tools.ietf.org/html/rfc8216#section-4.3.4. - If multiple HLS manifests are created, only the - first one is listed. - dash (str): - Dash manifest URI. If multiple Dash manifests - are created, only the first one is listed. - """ - - hls = proto.Field( - proto.STRING, - number=1, - ) - dash = proto.Field( - proto.STRING, - number=2, - ) - - name = proto.Field( - proto.STRING, - number=1, - ) - input_uri = proto.Field( - proto.STRING, - number=2, - ) - output_uri = proto.Field( - proto.STRING, - number=3, - ) - template_id = proto.Field( - proto.STRING, - number=4, - oneof='job_config', - ) - config = proto.Field( - proto.MESSAGE, - number=5, - oneof='job_config', - message='JobConfig', - ) - priority = proto.Field( - proto.INT32, - number=6, - ) - origin_uri = proto.Field( - proto.MESSAGE, - number=7, - message=OriginUri, - ) - state = proto.Field( - proto.ENUM, - number=8, - enum=ProcessingState, - ) - progress = proto.Field( - proto.MESSAGE, - number=9, - message='Progress', - ) - failure_reason = proto.Field( - proto.STRING, - number=10, - ) - failure_details = proto.RepeatedField( - proto.MESSAGE, - number=11, - message='FailureDetail', - ) - create_time = proto.Field( - proto.MESSAGE, - number=12, - message=timestamp_pb2.Timestamp, - ) - start_time = proto.Field( - proto.MESSAGE, - number=13, - message=timestamp_pb2.Timestamp, - ) - end_time = proto.Field( - proto.MESSAGE, - number=14, - message=timestamp_pb2.Timestamp, - ) - ttl_after_completion_days = proto.Field( - proto.INT32, - number=15, - ) - - -class JobTemplate(proto.Message): - r"""Transcoding job template resource. - Attributes: - name (str): - The resource name of the job template. Format: - ``projects/{project}/locations/{location}/jobTemplates/{job_template}`` - config (google.cloud.video.transcoder_v1beta1.types.JobConfig): - The configuration for this template. - """ - - name = proto.Field( - proto.STRING, - number=1, - ) - config = proto.Field( - proto.MESSAGE, - number=2, - message='JobConfig', - ) - - -class JobConfig(proto.Message): - r"""Job configuration - Attributes: - inputs (Sequence[google.cloud.video.transcoder_v1beta1.types.Input]): - List of input assets stored in Cloud Storage. - edit_list (Sequence[google.cloud.video.transcoder_v1beta1.types.EditAtom]): - List of ``Edit atom``\ s. Defines the ultimate timeline of - the resulting file or manifest. - elementary_streams (Sequence[google.cloud.video.transcoder_v1beta1.types.ElementaryStream]): - List of elementary streams. - mux_streams (Sequence[google.cloud.video.transcoder_v1beta1.types.MuxStream]): - List of multiplexing settings for output - streams. - manifests (Sequence[google.cloud.video.transcoder_v1beta1.types.Manifest]): - List of output manifests. - output (google.cloud.video.transcoder_v1beta1.types.Output): - Output configuration. - ad_breaks (Sequence[google.cloud.video.transcoder_v1beta1.types.AdBreak]): - List of ad breaks. Specifies where to insert - ad break tags in the output manifests. - pubsub_destination (google.cloud.video.transcoder_v1beta1.types.PubsubDestination): - Destination on Pub/Sub. - sprite_sheets (Sequence[google.cloud.video.transcoder_v1beta1.types.SpriteSheet]): - List of output sprite sheets. - overlays (Sequence[google.cloud.video.transcoder_v1beta1.types.Overlay]): - List of overlays on the output video, in - descending Z-order. - """ - - inputs = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='Input', - ) - edit_list = proto.RepeatedField( - proto.MESSAGE, - number=2, - message='EditAtom', - ) - elementary_streams = proto.RepeatedField( - proto.MESSAGE, - number=3, - message='ElementaryStream', - ) - mux_streams = proto.RepeatedField( - proto.MESSAGE, - number=4, - message='MuxStream', - ) - manifests = proto.RepeatedField( - proto.MESSAGE, - number=5, - message='Manifest', - ) - output = proto.Field( - proto.MESSAGE, - number=6, - message='Output', - ) - ad_breaks = proto.RepeatedField( - proto.MESSAGE, - number=7, - message='AdBreak', - ) - pubsub_destination = proto.Field( - proto.MESSAGE, - number=8, - message='PubsubDestination', - ) - sprite_sheets = proto.RepeatedField( - proto.MESSAGE, - number=9, - message='SpriteSheet', - ) - overlays = proto.RepeatedField( - proto.MESSAGE, - number=10, - message='Overlay', - ) - - -class Input(proto.Message): - r"""Input asset. - Attributes: - key (str): - A unique key for this input. Must be - specified when using advanced mapping and edit - lists. - uri (str): - URI of the media. Input files must be at least 5 seconds in - duration and stored in Cloud Storage (for example, - ``gs://bucket/inputs/file.mp4``). If empty, the value will - be populated from ``Job.input_uri``. - preprocessing_config (google.cloud.video.transcoder_v1beta1.types.PreprocessingConfig): - Preprocessing configurations. - """ - - key = proto.Field( - proto.STRING, - number=1, - ) - uri = proto.Field( - proto.STRING, - number=2, - ) - preprocessing_config = proto.Field( - proto.MESSAGE, - number=3, - message='PreprocessingConfig', - ) - - -class Output(proto.Message): - r"""Location of output file(s) in a Cloud Storage bucket. - Attributes: - uri (str): - URI for the output file(s). For example, - ``gs://my-bucket/outputs/``. If empty the value is populated - from ``Job.output_uri``. - """ - - uri = proto.Field( - proto.STRING, - number=1, - ) - - -class EditAtom(proto.Message): - r"""Edit atom. - Attributes: - key (str): - A unique key for this atom. Must be specified - when using advanced mapping. - inputs (Sequence[str]): - List of ``Input.key``\ s identifying files that should be - used in this atom. The listed ``inputs`` must have the same - timeline. - end_time_offset (google.protobuf.duration_pb2.Duration): - End time in seconds for the atom, relative to the input file - timeline. When ``end_time_offset`` is not specified, the - ``inputs`` are used until the end of the atom. - start_time_offset (google.protobuf.duration_pb2.Duration): - Start time in seconds for the atom, relative to the input - file timeline. The default is ``0s``. - """ - - key = proto.Field( - proto.STRING, - number=1, - ) - inputs = proto.RepeatedField( - proto.STRING, - number=2, - ) - end_time_offset = proto.Field( - proto.MESSAGE, - number=3, - message=duration_pb2.Duration, - ) - start_time_offset = proto.Field( - proto.MESSAGE, - number=4, - message=duration_pb2.Duration, - ) - - -class AdBreak(proto.Message): - r"""Ad break. - Attributes: - start_time_offset (google.protobuf.duration_pb2.Duration): - Start time in seconds for the ad break, relative to the - output file timeline. The default is ``0s``. - """ - - start_time_offset = proto.Field( - proto.MESSAGE, - number=1, - message=duration_pb2.Duration, - ) - - -class ElementaryStream(proto.Message): - r"""Encoding of an input file such as an audio, video, or text - track. Elementary streams must be packaged before - mapping and sharing between different output formats. - - Attributes: - key (str): - A unique key for this elementary stream. - video_stream (google.cloud.video.transcoder_v1beta1.types.VideoStream): - Encoding of a video stream. - audio_stream (google.cloud.video.transcoder_v1beta1.types.AudioStream): - Encoding of an audio stream. - text_stream (google.cloud.video.transcoder_v1beta1.types.TextStream): - Encoding of a text stream. For example, - closed captions or subtitles. - """ - - key = proto.Field( - proto.STRING, - number=4, - ) - video_stream = proto.Field( - proto.MESSAGE, - number=1, - oneof='elementary_stream', - message='VideoStream', - ) - audio_stream = proto.Field( - proto.MESSAGE, - number=2, - oneof='elementary_stream', - message='AudioStream', - ) - text_stream = proto.Field( - proto.MESSAGE, - number=3, - oneof='elementary_stream', - message='TextStream', - ) - - -class MuxStream(proto.Message): - r"""Multiplexing settings for output stream. - Attributes: - key (str): - A unique key for this multiplexed stream. HLS media - manifests will be named ``MuxStream.key`` with the - ``".m3u8"`` extension suffix. - file_name (str): - The name of the generated file. The default is - ``MuxStream.key`` with the extension suffix corresponding to - the ``MuxStream.container``. - - Individual segments also have an incremental 10-digit - zero-padded suffix starting from 0 before the extension, - such as ``"mux_stream0000000123.ts"``. - container (str): - The container format. The default is ``"mp4"`` - - Supported container formats: - - - 'ts' - - 'fmp4'- the corresponding file extension is ``".m4s"`` - - 'mp4' - - 'vtt' - elementary_streams (Sequence[str]): - List of ``ElementaryStream.key``\ s multiplexed in this - stream. - segment_settings (google.cloud.video.transcoder_v1beta1.types.SegmentSettings): - Segment settings for ``"ts"``, ``"fmp4"`` and ``"vtt"``. - encryption (google.cloud.video.transcoder_v1beta1.types.Encryption): - Encryption settings. - """ - - key = proto.Field( - proto.STRING, - number=1, - ) - file_name = proto.Field( - proto.STRING, - number=2, - ) - container = proto.Field( - proto.STRING, - number=3, - ) - elementary_streams = proto.RepeatedField( - proto.STRING, - number=4, - ) - segment_settings = proto.Field( - proto.MESSAGE, - number=5, - message='SegmentSettings', - ) - encryption = proto.Field( - proto.MESSAGE, - number=6, - message='Encryption', - ) - - -class Manifest(proto.Message): - r"""Manifest configuration. - Attributes: - file_name (str): - The name of the generated file. The default is - ``"manifest"`` with the extension suffix corresponding to - the ``Manifest.type``. - type_ (google.cloud.video.transcoder_v1beta1.types.Manifest.ManifestType): - Required. Type of the manifest, can be "HLS" - or "DASH". - mux_streams (Sequence[str]): - Required. List of user given ``MuxStream.key``\ s that - should appear in this manifest. - - When ``Manifest.type`` is ``HLS``, a media manifest with - name ``MuxStream.key`` and ``.m3u8`` extension is generated - for each element of the ``Manifest.mux_streams``. - """ - class ManifestType(proto.Enum): - r"""The manifest type can be either ``"HLS"`` or ``"DASH"``.""" - MANIFEST_TYPE_UNSPECIFIED = 0 - HLS = 1 - DASH = 2 - - file_name = proto.Field( - proto.STRING, - number=1, - ) - type_ = proto.Field( - proto.ENUM, - number=2, - enum=ManifestType, - ) - mux_streams = proto.RepeatedField( - proto.STRING, - number=3, - ) - - -class PubsubDestination(proto.Message): - r"""A Pub/Sub destination. - Attributes: - topic (str): - The name of the Pub/Sub topic to publish job completion - notification to. For example: - ``projects/{project}/topics/{topic}``. - """ - - topic = proto.Field( - proto.STRING, - number=1, - ) - - -class SpriteSheet(proto.Message): - r"""Sprite sheet configuration. - Attributes: - format_ (str): - Format type. The default is ``"jpeg"``. - - Supported formats: - - - 'jpeg' - file_prefix (str): - Required. File name prefix for the generated sprite sheets. - - Each sprite sheet has an incremental 10-digit zero-padded - suffix starting from 0 before the extension, such as - ``"sprite_sheet0000000123.jpeg"``. - sprite_width_pixels (int): - Required. The width of sprite in pixels. Must be an even - integer. To preserve the source aspect ratio, set the - [SpriteSheet.sprite_width_pixels][google.cloud.video.transcoder.v1beta1.SpriteSheet.sprite_width_pixels] - field or the - [SpriteSheet.sprite_height_pixels][google.cloud.video.transcoder.v1beta1.SpriteSheet.sprite_height_pixels] - field, but not both (the API will automatically calculate - the missing field). - sprite_height_pixels (int): - Required. The height of sprite in pixels. Must be an even - integer. To preserve the source aspect ratio, set the - [SpriteSheet.sprite_height_pixels][google.cloud.video.transcoder.v1beta1.SpriteSheet.sprite_height_pixels] - field or the - [SpriteSheet.sprite_width_pixels][google.cloud.video.transcoder.v1beta1.SpriteSheet.sprite_width_pixels] - field, but not both (the API will automatically calculate - the missing field). - column_count (int): - The maximum number of sprites per row in a - sprite sheet. The default is 0, which indicates - no maximum limit. - row_count (int): - The maximum number of rows per sprite sheet. - When the sprite sheet is full, a new sprite - sheet is created. The default is 0, which - indicates no maximum limit. - start_time_offset (google.protobuf.duration_pb2.Duration): - Start time in seconds, relative to the output file timeline. - Determines the first sprite to pick. The default is ``0s``. - end_time_offset (google.protobuf.duration_pb2.Duration): - End time in seconds, relative to the output file timeline. - When ``end_time_offset`` is not specified, the sprites are - generated until the end of the output file. - total_count (int): - Total number of sprites. Create the specified - number of sprites distributed evenly across the - timeline of the output media. The default is - 100. - interval (google.protobuf.duration_pb2.Duration): - Starting from ``0s``, create sprites at regular intervals. - Specify the interval value in seconds. - quality (int): - The quality of the generated sprite sheet. - Enter a value between 1 and 100, where 1 is the - lowest quality and 100 is the highest quality. - The default is 100. A high quality value - corresponds to a low image data compression - ratio. - """ - - format_ = proto.Field( - proto.STRING, - number=1, - ) - file_prefix = proto.Field( - proto.STRING, - number=2, - ) - sprite_width_pixels = proto.Field( - proto.INT32, - number=3, - ) - sprite_height_pixels = proto.Field( - proto.INT32, - number=4, - ) - column_count = proto.Field( - proto.INT32, - number=5, - ) - row_count = proto.Field( - proto.INT32, - number=6, - ) - start_time_offset = proto.Field( - proto.MESSAGE, - number=7, - message=duration_pb2.Duration, - ) - end_time_offset = proto.Field( - proto.MESSAGE, - number=8, - message=duration_pb2.Duration, - ) - total_count = proto.Field( - proto.INT32, - number=9, - oneof='extraction_strategy', - ) - interval = proto.Field( - proto.MESSAGE, - number=10, - oneof='extraction_strategy', - message=duration_pb2.Duration, - ) - quality = proto.Field( - proto.INT32, - number=11, - ) - - -class Overlay(proto.Message): - r"""Overlay configuration. - Attributes: - image (google.cloud.video.transcoder_v1beta1.types.Overlay.Image): - Image overlay. - animations (Sequence[google.cloud.video.transcoder_v1beta1.types.Overlay.Animation]): - List of Animations. The list should be - chronological, without any time overlap. - """ - class FadeType(proto.Enum): - r"""Fade type for the overlay: ``FADE_IN`` or ``FADE_OUT``.""" - FADE_TYPE_UNSPECIFIED = 0 - FADE_IN = 1 - FADE_OUT = 2 - - class NormalizedCoordinate(proto.Message): - r"""2D normalized coordinates. Default: ``{0.0, 0.0}`` - Attributes: - x (float): - Normalized x coordinate. - y (float): - Normalized y coordinate. - """ - - x = proto.Field( - proto.DOUBLE, - number=1, - ) - y = proto.Field( - proto.DOUBLE, - number=2, - ) - - class Image(proto.Message): - r"""Overlaid jpeg image. - Attributes: - uri (str): - Required. URI of the JPEG image in Cloud Storage. For - example, ``gs://bucket/inputs/image.jpeg``. JPEG is the only - supported image type. - resolution (google.cloud.video.transcoder_v1beta1.types.Overlay.NormalizedCoordinate): - Normalized image resolution, based on output video - resolution. Valid values: ``0.0``–``1.0``. To respect the - original image aspect ratio, set either ``x`` or ``y`` to - ``0.0``. To use the original image resolution, set both - ``x`` and ``y`` to ``0.0``. - alpha (float): - Target image opacity. Valid values are from ``1.0`` (solid, - default) to ``0.0`` (transparent), exclusive. Set this to a - value greater than ``0.0``. - """ - - uri = proto.Field( - proto.STRING, - number=1, - ) - resolution = proto.Field( - proto.MESSAGE, - number=2, - message='Overlay.NormalizedCoordinate', - ) - alpha = proto.Field( - proto.DOUBLE, - number=3, - ) - - class AnimationStatic(proto.Message): - r"""Display static overlay object. - Attributes: - xy (google.cloud.video.transcoder_v1beta1.types.Overlay.NormalizedCoordinate): - Normalized coordinates based on output video resolution. - Valid values: ``0.0``–``1.0``. ``xy`` is the upper-left - coordinate of the overlay object. For example, use the x and - y coordinates {0,0} to position the top-left corner of the - overlay animation in the top-left corner of the output - video. - start_time_offset (google.protobuf.duration_pb2.Duration): - The time to start displaying the overlay - object, in seconds. Default: 0 - """ - - xy = proto.Field( - proto.MESSAGE, - number=1, - message='Overlay.NormalizedCoordinate', - ) - start_time_offset = proto.Field( - proto.MESSAGE, - number=2, - message=duration_pb2.Duration, - ) - - class AnimationFade(proto.Message): - r"""Display overlay object with fade animation. - Attributes: - fade_type (google.cloud.video.transcoder_v1beta1.types.Overlay.FadeType): - Required. Type of fade animation: ``FADE_IN`` or - ``FADE_OUT``. - xy (google.cloud.video.transcoder_v1beta1.types.Overlay.NormalizedCoordinate): - Normalized coordinates based on output video resolution. - Valid values: ``0.0``–``1.0``. ``xy`` is the upper-left - coordinate of the overlay object. For example, use the x and - y coordinates {0,0} to position the top-left corner of the - overlay animation in the top-left corner of the output - video. - start_time_offset (google.protobuf.duration_pb2.Duration): - The time to start the fade animation, in - seconds. Default: 0 - end_time_offset (google.protobuf.duration_pb2.Duration): - The time to end the fade animation, in seconds. Default: - ``start_time_offset`` + 1s - """ - - fade_type = proto.Field( - proto.ENUM, - number=1, - enum='Overlay.FadeType', - ) - xy = proto.Field( - proto.MESSAGE, - number=2, - message='Overlay.NormalizedCoordinate', - ) - start_time_offset = proto.Field( - proto.MESSAGE, - number=3, - message=duration_pb2.Duration, - ) - end_time_offset = proto.Field( - proto.MESSAGE, - number=4, - message=duration_pb2.Duration, - ) - - class AnimationEnd(proto.Message): - r"""End previous overlay animation from the video. Without - AnimationEnd, the overlay object will keep the state of previous - animation until the end of the video. - - Attributes: - start_time_offset (google.protobuf.duration_pb2.Duration): - The time to end overlay object, in seconds. - Default: 0 - """ - - start_time_offset = proto.Field( - proto.MESSAGE, - number=1, - message=duration_pb2.Duration, - ) - - class Animation(proto.Message): - r"""Animation types. - Attributes: - animation_static (google.cloud.video.transcoder_v1beta1.types.Overlay.AnimationStatic): - Display static overlay object. - animation_fade (google.cloud.video.transcoder_v1beta1.types.Overlay.AnimationFade): - Display overlay object with fade animation. - animation_end (google.cloud.video.transcoder_v1beta1.types.Overlay.AnimationEnd): - End previous animation. - """ - - animation_static = proto.Field( - proto.MESSAGE, - number=1, - oneof='animation_type', - message='Overlay.AnimationStatic', - ) - animation_fade = proto.Field( - proto.MESSAGE, - number=2, - oneof='animation_type', - message='Overlay.AnimationFade', - ) - animation_end = proto.Field( - proto.MESSAGE, - number=3, - oneof='animation_type', - message='Overlay.AnimationEnd', - ) - - image = proto.Field( - proto.MESSAGE, - number=1, - message=Image, - ) - animations = proto.RepeatedField( - proto.MESSAGE, - number=2, - message=Animation, - ) - - -class PreprocessingConfig(proto.Message): - r"""Preprocessing configurations. - Attributes: - color (google.cloud.video.transcoder_v1beta1.types.PreprocessingConfig.Color): - Color preprocessing configuration. - denoise (google.cloud.video.transcoder_v1beta1.types.PreprocessingConfig.Denoise): - Denoise preprocessing configuration. - deblock (google.cloud.video.transcoder_v1beta1.types.PreprocessingConfig.Deblock): - Deblock preprocessing configuration. - audio (google.cloud.video.transcoder_v1beta1.types.PreprocessingConfig.Audio): - Audio preprocessing configuration. - crop (google.cloud.video.transcoder_v1beta1.types.PreprocessingConfig.Crop): - Specify the video cropping configuration. - pad (google.cloud.video.transcoder_v1beta1.types.PreprocessingConfig.Pad): - Specify the video pad filter configuration. - """ - - class Color(proto.Message): - r"""Color preprocessing configuration. - Attributes: - saturation (float): - Control color saturation of the video. Enter - a value between -1 and 1, where -1 is fully - desaturated and 1 is maximum saturation. 0 is no - change. The default is 0. - contrast (float): - Control black and white contrast of the - video. Enter a value between -1 and 1, where -1 - is minimum contrast and 1 is maximum contrast. 0 - is no change. The default is 0. - brightness (float): - Control brightness of the video. Enter a - value between -1 and 1, where -1 is minimum - brightness and 1 is maximum brightness. 0 is no - change. The default is 0. - """ - - saturation = proto.Field( - proto.DOUBLE, - number=1, - ) - contrast = proto.Field( - proto.DOUBLE, - number=2, - ) - brightness = proto.Field( - proto.DOUBLE, - number=3, - ) - - class Denoise(proto.Message): - r"""Denoise preprocessing configuration. - Attributes: - strength (float): - Set strength of the denoise. Enter a value - between 0 and 1. The higher the value, the - smoother the image. 0 is no denoising. The - default is 0. - tune (str): - Set the denoiser mode. The default is ``"standard"``. - - Supported denoiser modes: - - - 'standard' - - 'grain' - """ - - strength = proto.Field( - proto.DOUBLE, - number=1, - ) - tune = proto.Field( - proto.STRING, - number=2, - ) - - class Deblock(proto.Message): - r"""Deblock preprocessing configuration. - Attributes: - strength (float): - Set strength of the deblocker. Enter a value - between 0 and 1. The higher the value, the - stronger the block removal. 0 is no deblocking. - The default is 0. - enabled (bool): - Enable deblocker. The default is ``false``. - """ - - strength = proto.Field( - proto.DOUBLE, - number=1, - ) - enabled = proto.Field( - proto.BOOL, - number=2, - ) - - class Audio(proto.Message): - r"""Audio preprocessing configuration. - Attributes: - lufs (float): - Specify audio loudness normalization in loudness units - relative to full scale (LUFS). Enter a value between -24 and - 0 (the default), where: - - - -24 is the Advanced Television Systems Committee (ATSC - A/85) standard - - -23 is the EU R128 broadcast standard - - -19 is the prior standard for online mono audio - - -18 is the ReplayGain standard - - -16 is the prior standard for stereo audio - - -14 is the new online audio standard recommended by - Spotify, as well as Amazon Echo - - 0 disables normalization - high_boost (bool): - Enable boosting high frequency components. The default is - ``false``. - low_boost (bool): - Enable boosting low frequency components. The default is - ``false``. - """ - - lufs = proto.Field( - proto.DOUBLE, - number=1, - ) - high_boost = proto.Field( - proto.BOOL, - number=2, - ) - low_boost = proto.Field( - proto.BOOL, - number=3, - ) - - class Crop(proto.Message): - r"""Video cropping configuration for the input video. The cropped - input video is scaled to match the output resolution. - - Attributes: - top_pixels (int): - The number of pixels to crop from the top. - The default is 0. - bottom_pixels (int): - The number of pixels to crop from the bottom. - The default is 0. - left_pixels (int): - The number of pixels to crop from the left. - The default is 0. - right_pixels (int): - The number of pixels to crop from the right. - The default is 0. - """ - - top_pixels = proto.Field( - proto.INT32, - number=1, - ) - bottom_pixels = proto.Field( - proto.INT32, - number=2, - ) - left_pixels = proto.Field( - proto.INT32, - number=3, - ) - right_pixels = proto.Field( - proto.INT32, - number=4, - ) - - class Pad(proto.Message): - r"""Pad filter configuration for the input video. The padded - input video is scaled after padding with black to match the - output resolution. - - Attributes: - top_pixels (int): - The number of pixels to add to the top. The - default is 0. - bottom_pixels (int): - The number of pixels to add to the bottom. - The default is 0. - left_pixels (int): - The number of pixels to add to the left. The - default is 0. - right_pixels (int): - The number of pixels to add to the right. The - default is 0. - """ - - top_pixels = proto.Field( - proto.INT32, - number=1, - ) - bottom_pixels = proto.Field( - proto.INT32, - number=2, - ) - left_pixels = proto.Field( - proto.INT32, - number=3, - ) - right_pixels = proto.Field( - proto.INT32, - number=4, - ) - - color = proto.Field( - proto.MESSAGE, - number=1, - message=Color, - ) - denoise = proto.Field( - proto.MESSAGE, - number=2, - message=Denoise, - ) - deblock = proto.Field( - proto.MESSAGE, - number=3, - message=Deblock, - ) - audio = proto.Field( - proto.MESSAGE, - number=4, - message=Audio, - ) - crop = proto.Field( - proto.MESSAGE, - number=5, - message=Crop, - ) - pad = proto.Field( - proto.MESSAGE, - number=6, - message=Pad, - ) - - -class VideoStream(proto.Message): - r"""Video stream resource. - Attributes: - codec (str): - Codec type. The following codecs are supported: - - - ``h264`` (default) - - ``h265`` - - ``vp9`` - profile (str): - Enforces the specified codec profile. The following profiles - are supported: - - - ``baseline`` - - ``main`` - - ``high`` (default) - - The available options are FFmpeg-compatible. Note that - certain values for this field may cause the transcoder to - override other fields you set in the ``VideoStream`` - message. - tune (str): - Enforces the specified codec tune. The available options are - FFmpeg-compatible. Note that certain values for this field - may cause the transcoder to override other fields you set in - the ``VideoStream`` message. - preset (str): - Enforces the specified codec preset. The default is - ``veryfast``. The available options are FFmpeg-compatible. - Note that certain values for this field may cause the - transcoder to override other fields you set in the - ``VideoStream`` message. - height_pixels (int): - The height of the video in pixels. Must be an - even integer. When not specified, the height is - adjusted to match the specified width and input - aspect ratio. If both are omitted, the input - height is used. - width_pixels (int): - The width of the video in pixels. Must be an - even integer. When not specified, the width is - adjusted to match the specified height and input - aspect ratio. If both are omitted, the input - width is used. - pixel_format (str): - Pixel format to use. The default is ``"yuv420p"``. - - Supported pixel formats: - - - 'yuv420p' pixel format. - - 'yuv422p' pixel format. - - 'yuv444p' pixel format. - - 'yuv420p10' 10-bit HDR pixel format. - - 'yuv422p10' 10-bit HDR pixel format. - - 'yuv444p10' 10-bit HDR pixel format. - - 'yuv420p12' 12-bit HDR pixel format. - - 'yuv422p12' 12-bit HDR pixel format. - - 'yuv444p12' 12-bit HDR pixel format. - bitrate_bps (int): - Required. The video bitrate in bits per - second. The minimum value is 1,000. The maximum - value for H264/H265 is 800,000,000. The maximum - value for VP9 is 480,000,000. - rate_control_mode (str): - Specify the ``rate_control_mode``. The default is ``"vbr"``. - - Supported rate control modes: - - - 'vbr' - variable bitrate - - 'crf' - constant rate factor - enable_two_pass (bool): - Use two-pass encoding strategy to achieve better video - quality. ``VideoStream.rate_control_mode`` must be - ``"vbr"``. The default is ``false``. - crf_level (int): - Target CRF level. Must be between 10 and 36, - where 10 is the highest quality and 36 is the - most efficient compression. The default is 21. - vbv_size_bits (int): - Size of the Video Buffering Verifier (VBV) buffer in bits. - Must be greater than zero. The default is equal to - ``VideoStream.bitrate_bps``. - vbv_fullness_bits (int): - Initial fullness of the Video Buffering Verifier (VBV) - buffer in bits. Must be greater than zero. The default is - equal to 90% of ``VideoStream.vbv_size_bits``. - allow_open_gop (bool): - Specifies whether an open Group of Pictures (GOP) structure - should be allowed or not. The default is ``false``. - gop_frame_count (int): - Select the GOP size based on the specified - frame count. Must be greater than zero. - gop_duration (google.protobuf.duration_pb2.Duration): - Select the GOP size based on the specified duration. The - default is ``"3s"``. Note that ``gopDuration`` must be less - than or equal to ```segmentDuration`` <#SegmentSettings>`__, - and ```segmentDuration`` <#SegmentSettings>`__ must be - divisible by ``gopDuration``. - entropy_coder (str): - The entropy coder to use. The default is ``"cabac"``. - - Supported entropy coders: - - - 'cavlc' - - 'cabac' - b_pyramid (bool): - Allow B-pyramid for reference frame selection. This may not - be supported on all decoders. The default is ``false``. - b_frame_count (int): - The number of consecutive B-frames. Must be greater than or - equal to zero. Must be less than - ``VideoStream.gop_frame_count`` if set. The default is 0. - frame_rate (float): - Required. The target video frame rate in frames per second - (FPS). Must be less than or equal to 120. Will default to - the input frame rate if larger than the input frame rate. - The API will generate an output FPS that is divisible by the - input FPS, and smaller or equal to the target FPS. See - `Calculate frame - rate `__ - for more information. - aq_strength (float): - Specify the intensity of the adaptive - quantizer (AQ). Must be between 0 and 1, where 0 - disables the quantizer and 1 maximizes the - quantizer. A higher value equals a lower bitrate - but smoother image. The default is 0. - """ - - codec = proto.Field( - proto.STRING, - number=1, - ) - profile = proto.Field( - proto.STRING, - number=2, - ) - tune = proto.Field( - proto.STRING, - number=3, - ) - preset = proto.Field( - proto.STRING, - number=4, - ) - height_pixels = proto.Field( - proto.INT32, - number=5, - ) - width_pixels = proto.Field( - proto.INT32, - number=6, - ) - pixel_format = proto.Field( - proto.STRING, - number=7, - ) - bitrate_bps = proto.Field( - proto.INT32, - number=8, - ) - rate_control_mode = proto.Field( - proto.STRING, - number=9, - ) - enable_two_pass = proto.Field( - proto.BOOL, - number=10, - ) - crf_level = proto.Field( - proto.INT32, - number=11, - ) - vbv_size_bits = proto.Field( - proto.INT32, - number=12, - ) - vbv_fullness_bits = proto.Field( - proto.INT32, - number=13, - ) - allow_open_gop = proto.Field( - proto.BOOL, - number=14, - ) - gop_frame_count = proto.Field( - proto.INT32, - number=15, - oneof='gop_mode', - ) - gop_duration = proto.Field( - proto.MESSAGE, - number=16, - oneof='gop_mode', - message=duration_pb2.Duration, - ) - entropy_coder = proto.Field( - proto.STRING, - number=17, - ) - b_pyramid = proto.Field( - proto.BOOL, - number=18, - ) - b_frame_count = proto.Field( - proto.INT32, - number=19, - ) - frame_rate = proto.Field( - proto.DOUBLE, - number=20, - ) - aq_strength = proto.Field( - proto.DOUBLE, - number=21, - ) - - -class AudioStream(proto.Message): - r"""Audio stream resource. - Attributes: - codec (str): - The codec for this audio stream. The default is ``"aac"``. - - Supported audio codecs: - - - 'aac' - - 'aac-he' - - 'aac-he-v2' - - 'mp3' - - 'ac3' - - 'eac3' - bitrate_bps (int): - Required. Audio bitrate in bits per second. - Must be between 1 and 10,000,000. - channel_count (int): - Number of audio channels. Must be between 1 - and 6. The default is 2. - channel_layout (Sequence[str]): - A list of channel names specifying layout of the audio - channels. This only affects the metadata embedded in the - container headers, if supported by the specified format. The - default is ``["fl", "fr"]``. - - Supported channel names: - - - 'fl' - Front left channel - - 'fr' - Front right channel - - 'sl' - Side left channel - - 'sr' - Side right channel - - 'fc' - Front center channel - - 'lfe' - Low frequency - mapping (Sequence[google.cloud.video.transcoder_v1beta1.types.AudioStream.AudioAtom]): - The mapping for the ``Job.edit_list`` atoms with audio - ``EditAtom.inputs``. - sample_rate_hertz (int): - The audio sample rate in Hertz. The default - is 48000 Hertz. - """ - - class AudioAtom(proto.Message): - r"""The mapping for the ``Job.edit_list`` atoms with audio - ``EditAtom.inputs``. - - Attributes: - key (str): - Required. The ``EditAtom.key`` that references the atom with - audio inputs in the ``Job.edit_list``. - channels (Sequence[google.cloud.video.transcoder_v1beta1.types.AudioStream.AudioAtom.AudioChannel]): - List of ``Channel``\ s for this audio stream. for in-depth - explanation. - """ - - class AudioChannel(proto.Message): - r"""The audio channel. - Attributes: - inputs (Sequence[google.cloud.video.transcoder_v1beta1.types.AudioStream.AudioAtom.AudioChannel.AudioChannelInput]): - List of ``Job.inputs`` for this audio channel. - """ - - class AudioChannelInput(proto.Message): - r"""Identifies which input file, track, and channel should be - used. - - Attributes: - key (str): - Required. The ``Input.key`` that identifies the input file. - track (int): - Required. The zero-based index of the track - in the input file. - channel (int): - Required. The zero-based index of the channel - in the input file. - gain_db (float): - Audio volume control in dB. Negative values - decrease volume, positive values increase. The - default is 0. - """ - - key = proto.Field( - proto.STRING, - number=1, - ) - track = proto.Field( - proto.INT32, - number=2, - ) - channel = proto.Field( - proto.INT32, - number=3, - ) - gain_db = proto.Field( - proto.DOUBLE, - number=4, - ) - - inputs = proto.RepeatedField( - proto.MESSAGE, - number=2, - message='AudioStream.AudioAtom.AudioChannel.AudioChannelInput', - ) - - key = proto.Field( - proto.STRING, - number=1, - ) - channels = proto.RepeatedField( - proto.MESSAGE, - number=2, - message='AudioStream.AudioAtom.AudioChannel', - ) - - codec = proto.Field( - proto.STRING, - number=1, - ) - bitrate_bps = proto.Field( - proto.INT32, - number=2, - ) - channel_count = proto.Field( - proto.INT32, - number=3, - ) - channel_layout = proto.RepeatedField( - proto.STRING, - number=4, - ) - mapping = proto.RepeatedField( - proto.MESSAGE, - number=5, - message=AudioAtom, - ) - sample_rate_hertz = proto.Field( - proto.INT32, - number=6, - ) - - -class TextStream(proto.Message): - r"""Encoding of a text stream. For example, closed captions or - subtitles. - - Attributes: - codec (str): - The codec for this text stream. The default is ``"webvtt"``. - - Supported text codecs: - - - 'srt' - - 'ttml' - - 'cea608' - - 'cea708' - - 'webvtt' - language_code (str): - Required. The BCP-47 language code, such as ``"en-US"`` or - ``"sr-Latn"``. For more information, see - https://www.unicode.org/reports/tr35/#Unicode_locale_identifier. - mapping (Sequence[google.cloud.video.transcoder_v1beta1.types.TextStream.TextAtom]): - The mapping for the ``Job.edit_list`` atoms with text - ``EditAtom.inputs``. - """ - - class TextAtom(proto.Message): - r"""The mapping for the ``Job.edit_list`` atoms with text - ``EditAtom.inputs``. - - Attributes: - key (str): - Required. The ``EditAtom.key`` that references atom with - text inputs in the ``Job.edit_list``. - inputs (Sequence[google.cloud.video.transcoder_v1beta1.types.TextStream.TextAtom.TextInput]): - List of ``Job.inputs`` that should be embedded in this atom. - Only one input is supported. - """ - - class TextInput(proto.Message): - r"""Identifies which input file and track should be used. - Attributes: - key (str): - Required. The ``Input.key`` that identifies the input file. - track (int): - Required. The zero-based index of the track - in the input file. - """ - - key = proto.Field( - proto.STRING, - number=1, - ) - track = proto.Field( - proto.INT32, - number=2, - ) - - key = proto.Field( - proto.STRING, - number=1, - ) - inputs = proto.RepeatedField( - proto.MESSAGE, - number=2, - message='TextStream.TextAtom.TextInput', - ) - - codec = proto.Field( - proto.STRING, - number=1, - ) - language_code = proto.Field( - proto.STRING, - number=2, - ) - mapping = proto.RepeatedField( - proto.MESSAGE, - number=3, - message=TextAtom, - ) - - -class SegmentSettings(proto.Message): - r"""Segment settings for ``"ts"``, ``"fmp4"`` and ``"vtt"``. - Attributes: - segment_duration (google.protobuf.duration_pb2.Duration): - Duration of the segments in seconds. The default is - ``"6.0s"``. Note that ``segmentDuration`` must be greater - than or equal to ```gopDuration`` <#videostream>`__, and - ``segmentDuration`` must be divisible by - ```gopDuration`` <#videostream>`__. - individual_segments (bool): - Required. Create an individual segment file. The default is - ``false``. - """ - - segment_duration = proto.Field( - proto.MESSAGE, - number=1, - message=duration_pb2.Duration, - ) - individual_segments = proto.Field( - proto.BOOL, - number=3, - ) - - -class Encryption(proto.Message): - r"""Encryption settings. - Attributes: - key (str): - Required. 128 bit encryption key represented - as lowercase hexadecimal digits. - iv (str): - Required. 128 bit Initialization Vector (IV) - represented as lowercase hexadecimal digits. - aes_128 (google.cloud.video.transcoder_v1beta1.types.Encryption.Aes128Encryption): - Configuration for AES-128 encryption. - sample_aes (google.cloud.video.transcoder_v1beta1.types.Encryption.SampleAesEncryption): - Configuration for SAMPLE-AES encryption. - mpeg_cenc (google.cloud.video.transcoder_v1beta1.types.Encryption.MpegCommonEncryption): - Configuration for MPEG Common Encryption - (MPEG-CENC). - """ - - class Aes128Encryption(proto.Message): - r"""Configuration for AES-128 encryption. - Attributes: - key_uri (str): - Required. URI of the key delivery service. - This URI is inserted into the M3U8 header. - """ - - key_uri = proto.Field( - proto.STRING, - number=1, - ) - - class SampleAesEncryption(proto.Message): - r"""Configuration for SAMPLE-AES encryption. - Attributes: - key_uri (str): - Required. URI of the key delivery service. - This URI is inserted into the M3U8 header. - """ - - key_uri = proto.Field( - proto.STRING, - number=1, - ) - - class MpegCommonEncryption(proto.Message): - r"""Configuration for MPEG Common Encryption (MPEG-CENC). - Attributes: - key_id (str): - Required. 128 bit Key ID represented as - lowercase hexadecimal digits for use with common - encryption. - scheme (str): - Required. Specify the encryption scheme. - Supported encryption schemes: - - 'cenc' - - 'cbcs' - """ - - key_id = proto.Field( - proto.STRING, - number=1, - ) - scheme = proto.Field( - proto.STRING, - number=2, - ) - - key = proto.Field( - proto.STRING, - number=1, - ) - iv = proto.Field( - proto.STRING, - number=2, - ) - aes_128 = proto.Field( - proto.MESSAGE, - number=3, - oneof='encryption_mode', - message=Aes128Encryption, - ) - sample_aes = proto.Field( - proto.MESSAGE, - number=4, - oneof='encryption_mode', - message=SampleAesEncryption, - ) - mpeg_cenc = proto.Field( - proto.MESSAGE, - number=5, - oneof='encryption_mode', - message=MpegCommonEncryption, - ) - - -class Progress(proto.Message): - r"""Estimated fractional progress for each step, from ``0`` to ``1``. - Attributes: - analyzed (float): - Estimated fractional progress for ``analyzing`` step. - encoded (float): - Estimated fractional progress for ``encoding`` step. - uploaded (float): - Estimated fractional progress for ``uploading`` step. - notified (float): - Estimated fractional progress for ``notifying`` step. - """ - - analyzed = proto.Field( - proto.DOUBLE, - number=1, - ) - encoded = proto.Field( - proto.DOUBLE, - number=2, - ) - uploaded = proto.Field( - proto.DOUBLE, - number=3, - ) - notified = proto.Field( - proto.DOUBLE, - number=4, - ) - - -class FailureDetail(proto.Message): - r"""Additional information about the reasons for the failure. - Attributes: - description (str): - A description of the failure. - """ - - description = proto.Field( - proto.STRING, - number=1, - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/v1beta1/google/cloud/video/transcoder_v1beta1/types/services.py b/owl-bot-staging/v1beta1/google/cloud/video/transcoder_v1beta1/types/services.py deleted file mode 100644 index 937818b..0000000 --- a/owl-bot-staging/v1beta1/google/cloud/video/transcoder_v1beta1/types/services.py +++ /dev/null @@ -1,256 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import proto # type: ignore - -from google.cloud.video.transcoder_v1beta1.types import resources - - -__protobuf__ = proto.module( - package='google.cloud.video.transcoder.v1beta1', - manifest={ - 'CreateJobRequest', - 'ListJobsRequest', - 'GetJobRequest', - 'DeleteJobRequest', - 'ListJobsResponse', - 'CreateJobTemplateRequest', - 'ListJobTemplatesRequest', - 'GetJobTemplateRequest', - 'DeleteJobTemplateRequest', - 'ListJobTemplatesResponse', - }, -) - - -class CreateJobRequest(proto.Message): - r"""Request message for ``TranscoderService.CreateJob``. - Attributes: - parent (str): - Required. The parent location to create and process this - job. Format: ``projects/{project}/locations/{location}`` - job (google.cloud.video.transcoder_v1beta1.types.Job): - Required. Parameters for creating transcoding - job. - """ - - parent = proto.Field( - proto.STRING, - number=1, - ) - job = proto.Field( - proto.MESSAGE, - number=2, - message=resources.Job, - ) - - -class ListJobsRequest(proto.Message): - r"""Request message for ``TranscoderService.ListJobs``. The parent - location from which to retrieve the collection of jobs. - - Attributes: - parent (str): - Required. Format: - ``projects/{project}/locations/{location}`` - page_size (int): - The maximum number of items to return. - page_token (str): - The ``next_page_token`` value returned from a previous List - request, if any. - """ - - parent = proto.Field( - proto.STRING, - number=1, - ) - page_size = proto.Field( - proto.INT32, - number=2, - ) - page_token = proto.Field( - proto.STRING, - number=3, - ) - - -class GetJobRequest(proto.Message): - r"""Request message for ``TranscoderService.GetJob``. - Attributes: - name (str): - Required. The name of the job to retrieve. Format: - ``projects/{project}/locations/{location}/jobs/{job}`` - """ - - name = proto.Field( - proto.STRING, - number=1, - ) - - -class DeleteJobRequest(proto.Message): - r"""Request message for ``TranscoderService.DeleteJob``. - Attributes: - name (str): - Required. The name of the job to delete. Format: - ``projects/{project}/locations/{location}/jobs/{job}`` - """ - - name = proto.Field( - proto.STRING, - number=1, - ) - - -class ListJobsResponse(proto.Message): - r"""Response message for ``TranscoderService.ListJobs``. - Attributes: - jobs (Sequence[google.cloud.video.transcoder_v1beta1.types.Job]): - List of jobs in the specified region. - next_page_token (str): - The pagination token. - """ - - @property - def raw_page(self): - return self - - jobs = proto.RepeatedField( - proto.MESSAGE, - number=1, - message=resources.Job, - ) - next_page_token = proto.Field( - proto.STRING, - number=2, - ) - - -class CreateJobTemplateRequest(proto.Message): - r"""Request message for ``TranscoderService.CreateJobTemplate``. - Attributes: - parent (str): - Required. The parent location to create this job template. - Format: ``projects/{project}/locations/{location}`` - job_template (google.cloud.video.transcoder_v1beta1.types.JobTemplate): - Required. Parameters for creating job - template. - job_template_id (str): - Required. The ID to use for the job template, which will - become the final component of the job template's resource - name. - - This value should be 4-63 characters, and valid characters - must match the regular expression - ``[a-zA-Z][a-zA-Z0-9_-]*``. - """ - - parent = proto.Field( - proto.STRING, - number=1, - ) - job_template = proto.Field( - proto.MESSAGE, - number=2, - message=resources.JobTemplate, - ) - job_template_id = proto.Field( - proto.STRING, - number=3, - ) - - -class ListJobTemplatesRequest(proto.Message): - r"""Request message for ``TranscoderService.ListJobTemplates``. - Attributes: - parent (str): - Required. The parent location from which to retrieve the - collection of job templates. Format: - ``projects/{project}/locations/{location}`` - page_size (int): - The maximum number of items to return. - page_token (str): - The ``next_page_token`` value returned from a previous List - request, if any. - """ - - parent = proto.Field( - proto.STRING, - number=1, - ) - page_size = proto.Field( - proto.INT32, - number=2, - ) - page_token = proto.Field( - proto.STRING, - number=3, - ) - - -class GetJobTemplateRequest(proto.Message): - r"""Request message for ``TranscoderService.GetJobTemplate``. - Attributes: - name (str): - Required. The name of the job template to retrieve. Format: - ``projects/{project}/locations/{location}/jobTemplates/{job_template}`` - """ - - name = proto.Field( - proto.STRING, - number=1, - ) - - -class DeleteJobTemplateRequest(proto.Message): - r"""Request message for ``TranscoderService.DeleteJobTemplate``. - Attributes: - name (str): - Required. The name of the job template to delete. - ``projects/{project}/locations/{location}/jobTemplates/{job_template}`` - """ - - name = proto.Field( - proto.STRING, - number=1, - ) - - -class ListJobTemplatesResponse(proto.Message): - r"""Response message for ``TranscoderService.ListJobTemplates``. - Attributes: - job_templates (Sequence[google.cloud.video.transcoder_v1beta1.types.JobTemplate]): - List of job templates in the specified - region. - next_page_token (str): - The pagination token. - """ - - @property - def raw_page(self): - return self - - job_templates = proto.RepeatedField( - proto.MESSAGE, - number=1, - message=resources.JobTemplate, - ) - next_page_token = proto.Field( - proto.STRING, - number=2, - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/v1beta1/mypy.ini b/owl-bot-staging/v1beta1/mypy.ini deleted file mode 100644 index 4505b48..0000000 --- a/owl-bot-staging/v1beta1/mypy.ini +++ /dev/null @@ -1,3 +0,0 @@ -[mypy] -python_version = 3.6 -namespace_packages = True diff --git a/owl-bot-staging/v1beta1/noxfile.py b/owl-bot-staging/v1beta1/noxfile.py deleted file mode 100644 index 4ddc2d2..0000000 --- a/owl-bot-staging/v1beta1/noxfile.py +++ /dev/null @@ -1,132 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import os -import pathlib -import shutil -import subprocess -import sys - - -import nox # type: ignore - -CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() - -LOWER_BOUND_CONSTRAINTS_FILE = CURRENT_DIRECTORY / "constraints.txt" -PACKAGE_NAME = subprocess.check_output([sys.executable, "setup.py", "--name"], encoding="utf-8") - - -nox.sessions = [ - "unit", - "cover", - "mypy", - "check_lower_bounds" - # exclude update_lower_bounds from default - "docs", -] - -@nox.session(python=['3.6', '3.7', '3.8', '3.9']) -def unit(session): - """Run the unit test suite.""" - - session.install('coverage', 'pytest', 'pytest-cov', 'asyncmock', 'pytest-asyncio') - session.install('-e', '.') - - session.run( - 'py.test', - '--quiet', - '--cov=google/cloud/video/transcoder_v1beta1/', - '--cov-config=.coveragerc', - '--cov-report=term', - '--cov-report=html', - os.path.join('tests', 'unit', ''.join(session.posargs)) - ) - - -@nox.session(python='3.7') -def cover(session): - """Run the final coverage report. - This outputs the coverage report aggregating coverage from the unit - test runs (not system test runs), and then erases coverage data. - """ - session.install("coverage", "pytest-cov") - session.run("coverage", "report", "--show-missing", "--fail-under=100") - - session.run("coverage", "erase") - - -@nox.session(python=['3.6', '3.7']) -def mypy(session): - """Run the type checker.""" - session.install('mypy', 'types-pkg_resources') - session.install('.') - session.run( - 'mypy', - '--explicit-package-bases', - 'google', - ) - - -@nox.session -def update_lower_bounds(session): - """Update lower bounds in constraints.txt to match setup.py""" - session.install('google-cloud-testutils') - session.install('.') - - session.run( - 'lower-bound-checker', - 'update', - '--package-name', - PACKAGE_NAME, - '--constraints-file', - str(LOWER_BOUND_CONSTRAINTS_FILE), - ) - - -@nox.session -def check_lower_bounds(session): - """Check lower bounds in setup.py are reflected in constraints file""" - session.install('google-cloud-testutils') - session.install('.') - - session.run( - 'lower-bound-checker', - 'check', - '--package-name', - PACKAGE_NAME, - '--constraints-file', - str(LOWER_BOUND_CONSTRAINTS_FILE), - ) - -@nox.session(python='3.6') -def docs(session): - """Build the docs for this library.""" - - session.install("-e", ".") - session.install("sphinx<3.0.0", "alabaster", "recommonmark") - - shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) - session.run( - "sphinx-build", - "-W", # warnings as errors - "-T", # show full traceback on exception - "-N", # no colors - "-b", - "html", - "-d", - os.path.join("docs", "_build", "doctrees", ""), - os.path.join("docs", ""), - os.path.join("docs", "_build", "html", ""), - ) diff --git a/owl-bot-staging/v1beta1/scripts/fixup_transcoder_v1beta1_keywords.py b/owl-bot-staging/v1beta1/scripts/fixup_transcoder_v1beta1_keywords.py deleted file mode 100644 index 700007b..0000000 --- a/owl-bot-staging/v1beta1/scripts/fixup_transcoder_v1beta1_keywords.py +++ /dev/null @@ -1,183 +0,0 @@ -#! /usr/bin/env python3 -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import argparse -import os -import libcst as cst -import pathlib -import sys -from typing import (Any, Callable, Dict, List, Sequence, Tuple) - - -def partition( - predicate: Callable[[Any], bool], - iterator: Sequence[Any] -) -> Tuple[List[Any], List[Any]]: - """A stable, out-of-place partition.""" - results = ([], []) - - for i in iterator: - results[int(predicate(i))].append(i) - - # Returns trueList, falseList - return results[1], results[0] - - -class transcoderCallTransformer(cst.CSTTransformer): - CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') - METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { - 'create_job': ('parent', 'job', ), - 'create_job_template': ('parent', 'job_template', 'job_template_id', ), - 'delete_job': ('name', ), - 'delete_job_template': ('name', ), - 'get_job': ('name', ), - 'get_job_template': ('name', ), - 'list_jobs': ('parent', 'page_size', 'page_token', ), - 'list_job_templates': ('parent', 'page_size', 'page_token', ), - } - - def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: - try: - key = original.func.attr.value - kword_params = self.METHOD_TO_PARAMS[key] - except (AttributeError, KeyError): - # Either not a method from the API or too convoluted to be sure. - return updated - - # If the existing code is valid, keyword args come after positional args. - # Therefore, all positional args must map to the first parameters. - args, kwargs = partition(lambda a: not bool(a.keyword), updated.args) - if any(k.keyword.value == "request" for k in kwargs): - # We've already fixed this file, don't fix it again. - return updated - - kwargs, ctrl_kwargs = partition( - lambda a: not a.keyword.value in self.CTRL_PARAMS, - kwargs - ) - - args, ctrl_args = args[:len(kword_params)], args[len(kword_params):] - ctrl_kwargs.extend(cst.Arg(value=a.value, keyword=cst.Name(value=ctrl)) - for a, ctrl in zip(ctrl_args, self.CTRL_PARAMS)) - - request_arg = cst.Arg( - value=cst.Dict([ - cst.DictElement( - cst.SimpleString("'{}'".format(name)), -cst.Element(value=arg.value) - ) - # Note: the args + kwargs looks silly, but keep in mind that - # the control parameters had to be stripped out, and that - # those could have been passed positionally or by keyword. - for name, arg in zip(kword_params, args + kwargs)]), - keyword=cst.Name("request") - ) - - return updated.with_changes( - args=[request_arg] + ctrl_kwargs - ) - - -def fix_files( - in_dir: pathlib.Path, - out_dir: pathlib.Path, - *, - transformer=transcoderCallTransformer(), -): - """Duplicate the input dir to the output dir, fixing file method calls. - - Preconditions: - * in_dir is a real directory - * out_dir is a real, empty directory - """ - pyfile_gen = ( - pathlib.Path(os.path.join(root, f)) - for root, _, files in os.walk(in_dir) - for f in files if os.path.splitext(f)[1] == ".py" - ) - - for fpath in pyfile_gen: - with open(fpath, 'r') as f: - src = f.read() - - # Parse the code and insert method call fixes. - tree = cst.parse_module(src) - updated = tree.visit(transformer) - - # Create the path and directory structure for the new file. - updated_path = out_dir.joinpath(fpath.relative_to(in_dir)) - updated_path.parent.mkdir(parents=True, exist_ok=True) - - # Generate the updated source file at the corresponding path. - with open(updated_path, 'w') as f: - f.write(updated.code) - - -if __name__ == '__main__': - parser = argparse.ArgumentParser( - description="""Fix up source that uses the transcoder client library. - -The existing sources are NOT overwritten but are copied to output_dir with changes made. - -Note: This tool operates at a best-effort level at converting positional - parameters in client method calls to keyword based parameters. - Cases where it WILL FAIL include - A) * or ** expansion in a method call. - B) Calls via function or method alias (includes free function calls) - C) Indirect or dispatched calls (e.g. the method is looked up dynamically) - - These all constitute false negatives. The tool will also detect false - positives when an API method shares a name with another method. -""") - parser.add_argument( - '-d', - '--input-directory', - required=True, - dest='input_dir', - help='the input directory to walk for python files to fix up', - ) - parser.add_argument( - '-o', - '--output-directory', - required=True, - dest='output_dir', - help='the directory to output files fixed via un-flattening', - ) - args = parser.parse_args() - input_dir = pathlib.Path(args.input_dir) - output_dir = pathlib.Path(args.output_dir) - if not input_dir.is_dir(): - print( - f"input directory '{input_dir}' does not exist or is not a directory", - file=sys.stderr, - ) - sys.exit(-1) - - if not output_dir.is_dir(): - print( - f"output directory '{output_dir}' does not exist or is not a directory", - file=sys.stderr, - ) - sys.exit(-1) - - if os.listdir(output_dir): - print( - f"output directory '{output_dir}' is not empty", - file=sys.stderr, - ) - sys.exit(-1) - - fix_files(input_dir, output_dir) diff --git a/owl-bot-staging/v1beta1/setup.py b/owl-bot-staging/v1beta1/setup.py deleted file mode 100644 index 8f17241..0000000 --- a/owl-bot-staging/v1beta1/setup.py +++ /dev/null @@ -1,54 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import io -import os -import setuptools # type: ignore - -version = '0.1.0' - -package_root = os.path.abspath(os.path.dirname(__file__)) - -readme_filename = os.path.join(package_root, 'README.rst') -with io.open(readme_filename, encoding='utf-8') as readme_file: - readme = readme_file.read() - -setuptools.setup( - name='google-cloud-video-transcoder', - version=version, - long_description=readme, - packages=setuptools.PEP420PackageFinder.find(), - namespace_packages=('google', 'google.cloud', 'google.cloud.video'), - platforms='Posix; MacOS X; Windows', - include_package_data=True, - install_requires=( - 'google-api-core[grpc] >= 1.27.0, < 3.0.0dev', - 'libcst >= 0.2.5', - 'proto-plus >= 1.15.0', - 'packaging >= 14.3', ), - python_requires='>=3.6', - classifiers=[ - 'Development Status :: 3 - Alpha', - 'Intended Audience :: Developers', - 'Operating System :: OS Independent', - 'Programming Language :: Python :: 3.6', - 'Programming Language :: Python :: 3.7', - 'Programming Language :: Python :: 3.8', - 'Programming Language :: Python :: 3.9', - 'Topic :: Internet', - 'Topic :: Software Development :: Libraries :: Python Modules', - ], - zip_safe=False, -) diff --git a/owl-bot-staging/v1beta1/tests/__init__.py b/owl-bot-staging/v1beta1/tests/__init__.py deleted file mode 100644 index b54a5fc..0000000 --- a/owl-bot-staging/v1beta1/tests/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ - -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/v1beta1/tests/unit/__init__.py b/owl-bot-staging/v1beta1/tests/unit/__init__.py deleted file mode 100644 index b54a5fc..0000000 --- a/owl-bot-staging/v1beta1/tests/unit/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ - -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/v1beta1/tests/unit/gapic/__init__.py b/owl-bot-staging/v1beta1/tests/unit/gapic/__init__.py deleted file mode 100644 index b54a5fc..0000000 --- a/owl-bot-staging/v1beta1/tests/unit/gapic/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ - -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/v1beta1/tests/unit/gapic/transcoder_v1beta1/__init__.py b/owl-bot-staging/v1beta1/tests/unit/gapic/transcoder_v1beta1/__init__.py deleted file mode 100644 index b54a5fc..0000000 --- a/owl-bot-staging/v1beta1/tests/unit/gapic/transcoder_v1beta1/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ - -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/v1beta1/tests/unit/gapic/transcoder_v1beta1/test_transcoder_service.py b/owl-bot-staging/v1beta1/tests/unit/gapic/transcoder_v1beta1/test_transcoder_service.py deleted file mode 100644 index 9851ffb..0000000 --- a/owl-bot-staging/v1beta1/tests/unit/gapic/transcoder_v1beta1/test_transcoder_service.py +++ /dev/null @@ -1,3243 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import os -import mock -import packaging.version - -import grpc -from grpc.experimental import aio -import math -import pytest -from proto.marshal.rules.dates import DurationRule, TimestampRule - - -from google.api_core import client_options -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import grpc_helpers -from google.api_core import grpc_helpers_async -from google.auth import credentials as ga_credentials -from google.auth.exceptions import MutualTLSChannelError -from google.cloud.video.transcoder_v1beta1.services.transcoder_service import TranscoderServiceAsyncClient -from google.cloud.video.transcoder_v1beta1.services.transcoder_service import TranscoderServiceClient -from google.cloud.video.transcoder_v1beta1.services.transcoder_service import pagers -from google.cloud.video.transcoder_v1beta1.services.transcoder_service import transports -from google.cloud.video.transcoder_v1beta1.services.transcoder_service.transports.base import _GOOGLE_AUTH_VERSION -from google.cloud.video.transcoder_v1beta1.types import resources -from google.cloud.video.transcoder_v1beta1.types import services -from google.oauth2 import service_account -from google.protobuf import duration_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore -import google.auth - - -# TODO(busunkim): Once google-auth >= 1.25.0 is required transitively -# through google-api-core: -# - Delete the auth "less than" test cases -# - Delete these pytest markers (Make the "greater than or equal to" tests the default). -requires_google_auth_lt_1_25_0 = pytest.mark.skipif( - packaging.version.parse(_GOOGLE_AUTH_VERSION) >= packaging.version.parse("1.25.0"), - reason="This test requires google-auth < 1.25.0", -) -requires_google_auth_gte_1_25_0 = pytest.mark.skipif( - packaging.version.parse(_GOOGLE_AUTH_VERSION) < packaging.version.parse("1.25.0"), - reason="This test requires google-auth >= 1.25.0", -) - -def client_cert_source_callback(): - return b"cert bytes", b"key bytes" - - -# If default endpoint is localhost, then default mtls endpoint will be the same. -# This method modifies the default endpoint so the client can produce a different -# mtls endpoint for endpoint testing purposes. -def modify_default_endpoint(client): - return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT - - -def test__get_default_mtls_endpoint(): - api_endpoint = "example.googleapis.com" - api_mtls_endpoint = "example.mtls.googleapis.com" - sandbox_endpoint = "example.sandbox.googleapis.com" - sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" - non_googleapi = "api.example.com" - - assert TranscoderServiceClient._get_default_mtls_endpoint(None) is None - assert TranscoderServiceClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint - assert TranscoderServiceClient._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint - assert TranscoderServiceClient._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint - assert TranscoderServiceClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint - assert TranscoderServiceClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi - - -@pytest.mark.parametrize("client_class", [ - TranscoderServiceClient, - TranscoderServiceAsyncClient, -]) -def test_transcoder_service_client_from_service_account_info(client_class): - creds = ga_credentials.AnonymousCredentials() - with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: - factory.return_value = creds - info = {"valid": True} - client = client_class.from_service_account_info(info) - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - assert client.transport._host == 'transcoder.googleapis.com:443' - - -@pytest.mark.parametrize("transport_class,transport_name", [ - (transports.TranscoderServiceGrpcTransport, "grpc"), - (transports.TranscoderServiceGrpcAsyncIOTransport, "grpc_asyncio"), -]) -def test_transcoder_service_client_service_account_always_use_jwt(transport_class, transport_name): - with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: - creds = service_account.Credentials(None, None, None) - transport = transport_class(credentials=creds, always_use_jwt_access=True) - use_jwt.assert_called_once_with(True) - - with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: - creds = service_account.Credentials(None, None, None) - transport = transport_class(credentials=creds, always_use_jwt_access=False) - use_jwt.assert_not_called() - - -@pytest.mark.parametrize("client_class", [ - TranscoderServiceClient, - TranscoderServiceAsyncClient, -]) -def test_transcoder_service_client_from_service_account_file(client_class): - creds = ga_credentials.AnonymousCredentials() - with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: - factory.return_value = creds - client = client_class.from_service_account_file("dummy/file/path.json") - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - client = client_class.from_service_account_json("dummy/file/path.json") - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - assert client.transport._host == 'transcoder.googleapis.com:443' - - -def test_transcoder_service_client_get_transport_class(): - transport = TranscoderServiceClient.get_transport_class() - available_transports = [ - transports.TranscoderServiceGrpcTransport, - ] - assert transport in available_transports - - transport = TranscoderServiceClient.get_transport_class("grpc") - assert transport == transports.TranscoderServiceGrpcTransport - - -@pytest.mark.parametrize("client_class,transport_class,transport_name", [ - (TranscoderServiceClient, transports.TranscoderServiceGrpcTransport, "grpc"), - (TranscoderServiceAsyncClient, transports.TranscoderServiceGrpcAsyncIOTransport, "grpc_asyncio"), -]) -@mock.patch.object(TranscoderServiceClient, "DEFAULT_ENDPOINT", modify_default_endpoint(TranscoderServiceClient)) -@mock.patch.object(TranscoderServiceAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(TranscoderServiceAsyncClient)) -def test_transcoder_service_client_client_options(client_class, transport_class, transport_name): - # Check that if channel is provided we won't create a new one. - with mock.patch.object(TranscoderServiceClient, 'get_transport_class') as gtc: - transport = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ) - client = client_class(transport=transport) - gtc.assert_not_called() - - # Check that if channel is provided via str we will create a new one. - with mock.patch.object(TranscoderServiceClient, 'get_transport_class') as gtc: - client = client_class(transport=transport_name) - gtc.assert_called() - - # Check the case api_endpoint is provided. - options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host="squid.clam.whelk", - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is - # "never". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class() - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is - # "always". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class() - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_MTLS_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has - # unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): - with pytest.raises(MutualTLSChannelError): - client = client_class() - - # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): - with pytest.raises(ValueError): - client = client_class() - - # Check the case quota_project_id is provided - options = client_options.ClientOptions(quota_project_id="octopus") - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id="octopus", - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - ) - -@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ - (TranscoderServiceClient, transports.TranscoderServiceGrpcTransport, "grpc", "true"), - (TranscoderServiceAsyncClient, transports.TranscoderServiceGrpcAsyncIOTransport, "grpc_asyncio", "true"), - (TranscoderServiceClient, transports.TranscoderServiceGrpcTransport, "grpc", "false"), - (TranscoderServiceAsyncClient, transports.TranscoderServiceGrpcAsyncIOTransport, "grpc_asyncio", "false"), -]) -@mock.patch.object(TranscoderServiceClient, "DEFAULT_ENDPOINT", modify_default_endpoint(TranscoderServiceClient)) -@mock.patch.object(TranscoderServiceAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(TranscoderServiceAsyncClient)) -@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) -def test_transcoder_service_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env): - # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default - # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. - - # Check the case client_cert_source is provided. Whether client cert is used depends on - # GOOGLE_API_USE_CLIENT_CERTIFICATE value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options) - - if use_client_cert_env == "false": - expected_client_cert_source = None - expected_host = client.DEFAULT_ENDPOINT - else: - expected_client_cert_source = client_cert_source_callback - expected_host = client.DEFAULT_MTLS_ENDPOINT - - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=expected_host, - scopes=None, - client_cert_source_for_mtls=expected_client_cert_source, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - ) - - # Check the case ADC client cert is provided. Whether client cert is used depends on - # GOOGLE_API_USE_CLIENT_CERTIFICATE value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - with mock.patch.object(transport_class, '__init__') as patched: - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback): - if use_client_cert_env == "false": - expected_host = client.DEFAULT_ENDPOINT - expected_client_cert_source = None - else: - expected_host = client.DEFAULT_MTLS_ENDPOINT - expected_client_cert_source = client_cert_source_callback - - patched.return_value = None - client = client_class() - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=expected_host, - scopes=None, - client_cert_source_for_mtls=expected_client_cert_source, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - ) - - # Check the case client_cert_source and ADC client cert are not provided. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - with mock.patch.object(transport_class, '__init__') as patched: - with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False): - patched.return_value = None - client = client_class() - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - ) - - -@pytest.mark.parametrize("client_class,transport_class,transport_name", [ - (TranscoderServiceClient, transports.TranscoderServiceGrpcTransport, "grpc"), - (TranscoderServiceAsyncClient, transports.TranscoderServiceGrpcAsyncIOTransport, "grpc_asyncio"), -]) -def test_transcoder_service_client_client_options_scopes(client_class, transport_class, transport_name): - # Check the case scopes are provided. - options = client_options.ClientOptions( - scopes=["1", "2"], - ) - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_ENDPOINT, - scopes=["1", "2"], - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - ) - -@pytest.mark.parametrize("client_class,transport_class,transport_name", [ - (TranscoderServiceClient, transports.TranscoderServiceGrpcTransport, "grpc"), - (TranscoderServiceAsyncClient, transports.TranscoderServiceGrpcAsyncIOTransport, "grpc_asyncio"), -]) -def test_transcoder_service_client_client_options_credentials_file(client_class, transport_class, transport_name): - # Check the case credentials file is provided. - options = client_options.ClientOptions( - credentials_file="credentials.json" - ) - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options) - patched.assert_called_once_with( - credentials=None, - credentials_file="credentials.json", - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - ) - - -def test_transcoder_service_client_client_options_from_dict(): - with mock.patch('google.cloud.video.transcoder_v1beta1.services.transcoder_service.transports.TranscoderServiceGrpcTransport.__init__') as grpc_transport: - grpc_transport.return_value = None - client = TranscoderServiceClient( - client_options={'api_endpoint': 'squid.clam.whelk'} - ) - grpc_transport.assert_called_once_with( - credentials=None, - credentials_file=None, - host="squid.clam.whelk", - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - ) - - -def test_create_job(transport: str = 'grpc', request_type=services.CreateJobRequest): - client = TranscoderServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_job), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = resources.Job( - name='name_value', - input_uri='input_uri_value', - output_uri='output_uri_value', - priority=898, - state=resources.Job.ProcessingState.PENDING, - failure_reason='failure_reason_value', - ttl_after_completion_days=2670, - template_id='template_id_value', - ) - response = client.create_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == services.CreateJobRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, resources.Job) - assert response.name == 'name_value' - assert response.input_uri == 'input_uri_value' - assert response.output_uri == 'output_uri_value' - assert response.priority == 898 - assert response.state == resources.Job.ProcessingState.PENDING - assert response.failure_reason == 'failure_reason_value' - assert response.ttl_after_completion_days == 2670 - - -def test_create_job_from_dict(): - test_create_job(request_type=dict) - - -def test_create_job_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = TranscoderServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_job), - '__call__') as call: - client.create_job() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == services.CreateJobRequest() - - -@pytest.mark.asyncio -async def test_create_job_async(transport: str = 'grpc_asyncio', request_type=services.CreateJobRequest): - client = TranscoderServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_job), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(resources.Job( - name='name_value', - input_uri='input_uri_value', - output_uri='output_uri_value', - priority=898, - state=resources.Job.ProcessingState.PENDING, - failure_reason='failure_reason_value', - ttl_after_completion_days=2670, - )) - response = await client.create_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == services.CreateJobRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, resources.Job) - assert response.name == 'name_value' - assert response.input_uri == 'input_uri_value' - assert response.output_uri == 'output_uri_value' - assert response.priority == 898 - assert response.state == resources.Job.ProcessingState.PENDING - assert response.failure_reason == 'failure_reason_value' - assert response.ttl_after_completion_days == 2670 - - -@pytest.mark.asyncio -async def test_create_job_async_from_dict(): - await test_create_job_async(request_type=dict) - - -def test_create_job_field_headers(): - client = TranscoderServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = services.CreateJobRequest() - - request.parent = 'parent/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_job), - '__call__') as call: - call.return_value = resources.Job() - client.create_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent/value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_create_job_field_headers_async(): - client = TranscoderServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = services.CreateJobRequest() - - request.parent = 'parent/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_job), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.Job()) - await client.create_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent/value', - ) in kw['metadata'] - - -def test_create_job_flattened(): - client = TranscoderServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_job), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = resources.Job() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.create_job( - parent='parent_value', - job=resources.Job(name='name_value'), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0].parent == 'parent_value' - assert args[0].job == resources.Job(name='name_value') - - -def test_create_job_flattened_error(): - client = TranscoderServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_job( - services.CreateJobRequest(), - parent='parent_value', - job=resources.Job(name='name_value'), - ) - - -@pytest.mark.asyncio -async def test_create_job_flattened_async(): - client = TranscoderServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_job), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = resources.Job() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.Job()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.create_job( - parent='parent_value', - job=resources.Job(name='name_value'), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0].parent == 'parent_value' - assert args[0].job == resources.Job(name='name_value') - - -@pytest.mark.asyncio -async def test_create_job_flattened_error_async(): - client = TranscoderServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.create_job( - services.CreateJobRequest(), - parent='parent_value', - job=resources.Job(name='name_value'), - ) - - -def test_list_jobs(transport: str = 'grpc', request_type=services.ListJobsRequest): - client = TranscoderServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_jobs), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = services.ListJobsResponse( - next_page_token='next_page_token_value', - ) - response = client.list_jobs(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == services.ListJobsRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListJobsPager) - assert response.next_page_token == 'next_page_token_value' - - -def test_list_jobs_from_dict(): - test_list_jobs(request_type=dict) - - -def test_list_jobs_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = TranscoderServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_jobs), - '__call__') as call: - client.list_jobs() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == services.ListJobsRequest() - - -@pytest.mark.asyncio -async def test_list_jobs_async(transport: str = 'grpc_asyncio', request_type=services.ListJobsRequest): - client = TranscoderServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_jobs), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(services.ListJobsResponse( - next_page_token='next_page_token_value', - )) - response = await client.list_jobs(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == services.ListJobsRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListJobsAsyncPager) - assert response.next_page_token == 'next_page_token_value' - - -@pytest.mark.asyncio -async def test_list_jobs_async_from_dict(): - await test_list_jobs_async(request_type=dict) - - -def test_list_jobs_field_headers(): - client = TranscoderServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = services.ListJobsRequest() - - request.parent = 'parent/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_jobs), - '__call__') as call: - call.return_value = services.ListJobsResponse() - client.list_jobs(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent/value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_list_jobs_field_headers_async(): - client = TranscoderServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = services.ListJobsRequest() - - request.parent = 'parent/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_jobs), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(services.ListJobsResponse()) - await client.list_jobs(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent/value', - ) in kw['metadata'] - - -def test_list_jobs_flattened(): - client = TranscoderServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_jobs), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = services.ListJobsResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list_jobs( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0].parent == 'parent_value' - - -def test_list_jobs_flattened_error(): - client = TranscoderServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_jobs( - services.ListJobsRequest(), - parent='parent_value', - ) - - -@pytest.mark.asyncio -async def test_list_jobs_flattened_async(): - client = TranscoderServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_jobs), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = services.ListJobsResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(services.ListJobsResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.list_jobs( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0].parent == 'parent_value' - - -@pytest.mark.asyncio -async def test_list_jobs_flattened_error_async(): - client = TranscoderServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.list_jobs( - services.ListJobsRequest(), - parent='parent_value', - ) - - -def test_list_jobs_pager(): - client = TranscoderServiceClient( - credentials=ga_credentials.AnonymousCredentials, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_jobs), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - services.ListJobsResponse( - jobs=[ - resources.Job(), - resources.Job(), - resources.Job(), - ], - next_page_token='abc', - ), - services.ListJobsResponse( - jobs=[], - next_page_token='def', - ), - services.ListJobsResponse( - jobs=[ - resources.Job(), - ], - next_page_token='ghi', - ), - services.ListJobsResponse( - jobs=[ - resources.Job(), - resources.Job(), - ], - ), - RuntimeError, - ) - - metadata = () - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ('parent', ''), - )), - ) - pager = client.list_jobs(request={}) - - assert pager._metadata == metadata - - results = [i for i in pager] - assert len(results) == 6 - assert all(isinstance(i, resources.Job) - for i in results) - -def test_list_jobs_pages(): - client = TranscoderServiceClient( - credentials=ga_credentials.AnonymousCredentials, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_jobs), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - services.ListJobsResponse( - jobs=[ - resources.Job(), - resources.Job(), - resources.Job(), - ], - next_page_token='abc', - ), - services.ListJobsResponse( - jobs=[], - next_page_token='def', - ), - services.ListJobsResponse( - jobs=[ - resources.Job(), - ], - next_page_token='ghi', - ), - services.ListJobsResponse( - jobs=[ - resources.Job(), - resources.Job(), - ], - ), - RuntimeError, - ) - pages = list(client.list_jobs(request={}).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.asyncio -async def test_list_jobs_async_pager(): - client = TranscoderServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_jobs), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - services.ListJobsResponse( - jobs=[ - resources.Job(), - resources.Job(), - resources.Job(), - ], - next_page_token='abc', - ), - services.ListJobsResponse( - jobs=[], - next_page_token='def', - ), - services.ListJobsResponse( - jobs=[ - resources.Job(), - ], - next_page_token='ghi', - ), - services.ListJobsResponse( - jobs=[ - resources.Job(), - resources.Job(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_jobs(request={},) - assert async_pager.next_page_token == 'abc' - responses = [] - async for response in async_pager: - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, resources.Job) - for i in responses) - -@pytest.mark.asyncio -async def test_list_jobs_async_pages(): - client = TranscoderServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_jobs), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - services.ListJobsResponse( - jobs=[ - resources.Job(), - resources.Job(), - resources.Job(), - ], - next_page_token='abc', - ), - services.ListJobsResponse( - jobs=[], - next_page_token='def', - ), - services.ListJobsResponse( - jobs=[ - resources.Job(), - ], - next_page_token='ghi', - ), - services.ListJobsResponse( - jobs=[ - resources.Job(), - resources.Job(), - ], - ), - RuntimeError, - ) - pages = [] - async for page_ in (await client.list_jobs(request={})).pages: - pages.append(page_) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -def test_get_job(transport: str = 'grpc', request_type=services.GetJobRequest): - client = TranscoderServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_job), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = resources.Job( - name='name_value', - input_uri='input_uri_value', - output_uri='output_uri_value', - priority=898, - state=resources.Job.ProcessingState.PENDING, - failure_reason='failure_reason_value', - ttl_after_completion_days=2670, - template_id='template_id_value', - ) - response = client.get_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == services.GetJobRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, resources.Job) - assert response.name == 'name_value' - assert response.input_uri == 'input_uri_value' - assert response.output_uri == 'output_uri_value' - assert response.priority == 898 - assert response.state == resources.Job.ProcessingState.PENDING - assert response.failure_reason == 'failure_reason_value' - assert response.ttl_after_completion_days == 2670 - - -def test_get_job_from_dict(): - test_get_job(request_type=dict) - - -def test_get_job_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = TranscoderServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_job), - '__call__') as call: - client.get_job() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == services.GetJobRequest() - - -@pytest.mark.asyncio -async def test_get_job_async(transport: str = 'grpc_asyncio', request_type=services.GetJobRequest): - client = TranscoderServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_job), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(resources.Job( - name='name_value', - input_uri='input_uri_value', - output_uri='output_uri_value', - priority=898, - state=resources.Job.ProcessingState.PENDING, - failure_reason='failure_reason_value', - ttl_after_completion_days=2670, - )) - response = await client.get_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == services.GetJobRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, resources.Job) - assert response.name == 'name_value' - assert response.input_uri == 'input_uri_value' - assert response.output_uri == 'output_uri_value' - assert response.priority == 898 - assert response.state == resources.Job.ProcessingState.PENDING - assert response.failure_reason == 'failure_reason_value' - assert response.ttl_after_completion_days == 2670 - - -@pytest.mark.asyncio -async def test_get_job_async_from_dict(): - await test_get_job_async(request_type=dict) - - -def test_get_job_field_headers(): - client = TranscoderServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = services.GetJobRequest() - - request.name = 'name/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_job), - '__call__') as call: - call.return_value = resources.Job() - client.get_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name/value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_get_job_field_headers_async(): - client = TranscoderServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = services.GetJobRequest() - - request.name = 'name/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_job), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.Job()) - await client.get_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name/value', - ) in kw['metadata'] - - -def test_get_job_flattened(): - client = TranscoderServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_job), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = resources.Job() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get_job( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0].name == 'name_value' - - -def test_get_job_flattened_error(): - client = TranscoderServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_job( - services.GetJobRequest(), - name='name_value', - ) - - -@pytest.mark.asyncio -async def test_get_job_flattened_async(): - client = TranscoderServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_job), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = resources.Job() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.Job()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.get_job( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0].name == 'name_value' - - -@pytest.mark.asyncio -async def test_get_job_flattened_error_async(): - client = TranscoderServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.get_job( - services.GetJobRequest(), - name='name_value', - ) - - -def test_delete_job(transport: str = 'grpc', request_type=services.DeleteJobRequest): - client = TranscoderServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_job), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.delete_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == services.DeleteJobRequest() - - # Establish that the response is the type that we expect. - assert response is None - - -def test_delete_job_from_dict(): - test_delete_job(request_type=dict) - - -def test_delete_job_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = TranscoderServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_job), - '__call__') as call: - client.delete_job() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == services.DeleteJobRequest() - - -@pytest.mark.asyncio -async def test_delete_job_async(transport: str = 'grpc_asyncio', request_type=services.DeleteJobRequest): - client = TranscoderServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_job), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == services.DeleteJobRequest() - - # Establish that the response is the type that we expect. - assert response is None - - -@pytest.mark.asyncio -async def test_delete_job_async_from_dict(): - await test_delete_job_async(request_type=dict) - - -def test_delete_job_field_headers(): - client = TranscoderServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = services.DeleteJobRequest() - - request.name = 'name/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_job), - '__call__') as call: - call.return_value = None - client.delete_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name/value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_delete_job_field_headers_async(): - client = TranscoderServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = services.DeleteJobRequest() - - request.name = 'name/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_job), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name/value', - ) in kw['metadata'] - - -def test_delete_job_flattened(): - client = TranscoderServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_job), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.delete_job( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0].name == 'name_value' - - -def test_delete_job_flattened_error(): - client = TranscoderServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_job( - services.DeleteJobRequest(), - name='name_value', - ) - - -@pytest.mark.asyncio -async def test_delete_job_flattened_async(): - client = TranscoderServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_job), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.delete_job( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0].name == 'name_value' - - -@pytest.mark.asyncio -async def test_delete_job_flattened_error_async(): - client = TranscoderServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.delete_job( - services.DeleteJobRequest(), - name='name_value', - ) - - -def test_create_job_template(transport: str = 'grpc', request_type=services.CreateJobTemplateRequest): - client = TranscoderServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_job_template), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = resources.JobTemplate( - name='name_value', - ) - response = client.create_job_template(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == services.CreateJobTemplateRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, resources.JobTemplate) - assert response.name == 'name_value' - - -def test_create_job_template_from_dict(): - test_create_job_template(request_type=dict) - - -def test_create_job_template_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = TranscoderServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_job_template), - '__call__') as call: - client.create_job_template() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == services.CreateJobTemplateRequest() - - -@pytest.mark.asyncio -async def test_create_job_template_async(transport: str = 'grpc_asyncio', request_type=services.CreateJobTemplateRequest): - client = TranscoderServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_job_template), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(resources.JobTemplate( - name='name_value', - )) - response = await client.create_job_template(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == services.CreateJobTemplateRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, resources.JobTemplate) - assert response.name == 'name_value' - - -@pytest.mark.asyncio -async def test_create_job_template_async_from_dict(): - await test_create_job_template_async(request_type=dict) - - -def test_create_job_template_field_headers(): - client = TranscoderServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = services.CreateJobTemplateRequest() - - request.parent = 'parent/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_job_template), - '__call__') as call: - call.return_value = resources.JobTemplate() - client.create_job_template(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent/value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_create_job_template_field_headers_async(): - client = TranscoderServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = services.CreateJobTemplateRequest() - - request.parent = 'parent/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_job_template), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.JobTemplate()) - await client.create_job_template(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent/value', - ) in kw['metadata'] - - -def test_create_job_template_flattened(): - client = TranscoderServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_job_template), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = resources.JobTemplate() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.create_job_template( - parent='parent_value', - job_template=resources.JobTemplate(name='name_value'), - job_template_id='job_template_id_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0].parent == 'parent_value' - assert args[0].job_template == resources.JobTemplate(name='name_value') - assert args[0].job_template_id == 'job_template_id_value' - - -def test_create_job_template_flattened_error(): - client = TranscoderServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_job_template( - services.CreateJobTemplateRequest(), - parent='parent_value', - job_template=resources.JobTemplate(name='name_value'), - job_template_id='job_template_id_value', - ) - - -@pytest.mark.asyncio -async def test_create_job_template_flattened_async(): - client = TranscoderServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_job_template), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = resources.JobTemplate() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.JobTemplate()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.create_job_template( - parent='parent_value', - job_template=resources.JobTemplate(name='name_value'), - job_template_id='job_template_id_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0].parent == 'parent_value' - assert args[0].job_template == resources.JobTemplate(name='name_value') - assert args[0].job_template_id == 'job_template_id_value' - - -@pytest.mark.asyncio -async def test_create_job_template_flattened_error_async(): - client = TranscoderServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.create_job_template( - services.CreateJobTemplateRequest(), - parent='parent_value', - job_template=resources.JobTemplate(name='name_value'), - job_template_id='job_template_id_value', - ) - - -def test_list_job_templates(transport: str = 'grpc', request_type=services.ListJobTemplatesRequest): - client = TranscoderServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_job_templates), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = services.ListJobTemplatesResponse( - next_page_token='next_page_token_value', - ) - response = client.list_job_templates(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == services.ListJobTemplatesRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListJobTemplatesPager) - assert response.next_page_token == 'next_page_token_value' - - -def test_list_job_templates_from_dict(): - test_list_job_templates(request_type=dict) - - -def test_list_job_templates_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = TranscoderServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_job_templates), - '__call__') as call: - client.list_job_templates() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == services.ListJobTemplatesRequest() - - -@pytest.mark.asyncio -async def test_list_job_templates_async(transport: str = 'grpc_asyncio', request_type=services.ListJobTemplatesRequest): - client = TranscoderServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_job_templates), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(services.ListJobTemplatesResponse( - next_page_token='next_page_token_value', - )) - response = await client.list_job_templates(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == services.ListJobTemplatesRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListJobTemplatesAsyncPager) - assert response.next_page_token == 'next_page_token_value' - - -@pytest.mark.asyncio -async def test_list_job_templates_async_from_dict(): - await test_list_job_templates_async(request_type=dict) - - -def test_list_job_templates_field_headers(): - client = TranscoderServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = services.ListJobTemplatesRequest() - - request.parent = 'parent/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_job_templates), - '__call__') as call: - call.return_value = services.ListJobTemplatesResponse() - client.list_job_templates(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent/value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_list_job_templates_field_headers_async(): - client = TranscoderServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = services.ListJobTemplatesRequest() - - request.parent = 'parent/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_job_templates), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(services.ListJobTemplatesResponse()) - await client.list_job_templates(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent/value', - ) in kw['metadata'] - - -def test_list_job_templates_flattened(): - client = TranscoderServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_job_templates), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = services.ListJobTemplatesResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list_job_templates( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0].parent == 'parent_value' - - -def test_list_job_templates_flattened_error(): - client = TranscoderServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_job_templates( - services.ListJobTemplatesRequest(), - parent='parent_value', - ) - - -@pytest.mark.asyncio -async def test_list_job_templates_flattened_async(): - client = TranscoderServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_job_templates), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = services.ListJobTemplatesResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(services.ListJobTemplatesResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.list_job_templates( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0].parent == 'parent_value' - - -@pytest.mark.asyncio -async def test_list_job_templates_flattened_error_async(): - client = TranscoderServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.list_job_templates( - services.ListJobTemplatesRequest(), - parent='parent_value', - ) - - -def test_list_job_templates_pager(): - client = TranscoderServiceClient( - credentials=ga_credentials.AnonymousCredentials, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_job_templates), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - services.ListJobTemplatesResponse( - job_templates=[ - resources.JobTemplate(), - resources.JobTemplate(), - resources.JobTemplate(), - ], - next_page_token='abc', - ), - services.ListJobTemplatesResponse( - job_templates=[], - next_page_token='def', - ), - services.ListJobTemplatesResponse( - job_templates=[ - resources.JobTemplate(), - ], - next_page_token='ghi', - ), - services.ListJobTemplatesResponse( - job_templates=[ - resources.JobTemplate(), - resources.JobTemplate(), - ], - ), - RuntimeError, - ) - - metadata = () - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ('parent', ''), - )), - ) - pager = client.list_job_templates(request={}) - - assert pager._metadata == metadata - - results = [i for i in pager] - assert len(results) == 6 - assert all(isinstance(i, resources.JobTemplate) - for i in results) - -def test_list_job_templates_pages(): - client = TranscoderServiceClient( - credentials=ga_credentials.AnonymousCredentials, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_job_templates), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - services.ListJobTemplatesResponse( - job_templates=[ - resources.JobTemplate(), - resources.JobTemplate(), - resources.JobTemplate(), - ], - next_page_token='abc', - ), - services.ListJobTemplatesResponse( - job_templates=[], - next_page_token='def', - ), - services.ListJobTemplatesResponse( - job_templates=[ - resources.JobTemplate(), - ], - next_page_token='ghi', - ), - services.ListJobTemplatesResponse( - job_templates=[ - resources.JobTemplate(), - resources.JobTemplate(), - ], - ), - RuntimeError, - ) - pages = list(client.list_job_templates(request={}).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.asyncio -async def test_list_job_templates_async_pager(): - client = TranscoderServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_job_templates), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - services.ListJobTemplatesResponse( - job_templates=[ - resources.JobTemplate(), - resources.JobTemplate(), - resources.JobTemplate(), - ], - next_page_token='abc', - ), - services.ListJobTemplatesResponse( - job_templates=[], - next_page_token='def', - ), - services.ListJobTemplatesResponse( - job_templates=[ - resources.JobTemplate(), - ], - next_page_token='ghi', - ), - services.ListJobTemplatesResponse( - job_templates=[ - resources.JobTemplate(), - resources.JobTemplate(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_job_templates(request={},) - assert async_pager.next_page_token == 'abc' - responses = [] - async for response in async_pager: - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, resources.JobTemplate) - for i in responses) - -@pytest.mark.asyncio -async def test_list_job_templates_async_pages(): - client = TranscoderServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_job_templates), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - services.ListJobTemplatesResponse( - job_templates=[ - resources.JobTemplate(), - resources.JobTemplate(), - resources.JobTemplate(), - ], - next_page_token='abc', - ), - services.ListJobTemplatesResponse( - job_templates=[], - next_page_token='def', - ), - services.ListJobTemplatesResponse( - job_templates=[ - resources.JobTemplate(), - ], - next_page_token='ghi', - ), - services.ListJobTemplatesResponse( - job_templates=[ - resources.JobTemplate(), - resources.JobTemplate(), - ], - ), - RuntimeError, - ) - pages = [] - async for page_ in (await client.list_job_templates(request={})).pages: - pages.append(page_) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -def test_get_job_template(transport: str = 'grpc', request_type=services.GetJobTemplateRequest): - client = TranscoderServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_job_template), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = resources.JobTemplate( - name='name_value', - ) - response = client.get_job_template(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == services.GetJobTemplateRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, resources.JobTemplate) - assert response.name == 'name_value' - - -def test_get_job_template_from_dict(): - test_get_job_template(request_type=dict) - - -def test_get_job_template_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = TranscoderServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_job_template), - '__call__') as call: - client.get_job_template() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == services.GetJobTemplateRequest() - - -@pytest.mark.asyncio -async def test_get_job_template_async(transport: str = 'grpc_asyncio', request_type=services.GetJobTemplateRequest): - client = TranscoderServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_job_template), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(resources.JobTemplate( - name='name_value', - )) - response = await client.get_job_template(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == services.GetJobTemplateRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, resources.JobTemplate) - assert response.name == 'name_value' - - -@pytest.mark.asyncio -async def test_get_job_template_async_from_dict(): - await test_get_job_template_async(request_type=dict) - - -def test_get_job_template_field_headers(): - client = TranscoderServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = services.GetJobTemplateRequest() - - request.name = 'name/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_job_template), - '__call__') as call: - call.return_value = resources.JobTemplate() - client.get_job_template(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name/value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_get_job_template_field_headers_async(): - client = TranscoderServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = services.GetJobTemplateRequest() - - request.name = 'name/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_job_template), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.JobTemplate()) - await client.get_job_template(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name/value', - ) in kw['metadata'] - - -def test_get_job_template_flattened(): - client = TranscoderServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_job_template), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = resources.JobTemplate() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get_job_template( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0].name == 'name_value' - - -def test_get_job_template_flattened_error(): - client = TranscoderServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_job_template( - services.GetJobTemplateRequest(), - name='name_value', - ) - - -@pytest.mark.asyncio -async def test_get_job_template_flattened_async(): - client = TranscoderServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_job_template), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = resources.JobTemplate() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.JobTemplate()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.get_job_template( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0].name == 'name_value' - - -@pytest.mark.asyncio -async def test_get_job_template_flattened_error_async(): - client = TranscoderServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.get_job_template( - services.GetJobTemplateRequest(), - name='name_value', - ) - - -def test_delete_job_template(transport: str = 'grpc', request_type=services.DeleteJobTemplateRequest): - client = TranscoderServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_job_template), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.delete_job_template(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == services.DeleteJobTemplateRequest() - - # Establish that the response is the type that we expect. - assert response is None - - -def test_delete_job_template_from_dict(): - test_delete_job_template(request_type=dict) - - -def test_delete_job_template_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = TranscoderServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_job_template), - '__call__') as call: - client.delete_job_template() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == services.DeleteJobTemplateRequest() - - -@pytest.mark.asyncio -async def test_delete_job_template_async(transport: str = 'grpc_asyncio', request_type=services.DeleteJobTemplateRequest): - client = TranscoderServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_job_template), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_job_template(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == services.DeleteJobTemplateRequest() - - # Establish that the response is the type that we expect. - assert response is None - - -@pytest.mark.asyncio -async def test_delete_job_template_async_from_dict(): - await test_delete_job_template_async(request_type=dict) - - -def test_delete_job_template_field_headers(): - client = TranscoderServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = services.DeleteJobTemplateRequest() - - request.name = 'name/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_job_template), - '__call__') as call: - call.return_value = None - client.delete_job_template(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name/value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_delete_job_template_field_headers_async(): - client = TranscoderServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = services.DeleteJobTemplateRequest() - - request.name = 'name/value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_job_template), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_job_template(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name/value', - ) in kw['metadata'] - - -def test_delete_job_template_flattened(): - client = TranscoderServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_job_template), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.delete_job_template( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0].name == 'name_value' - - -def test_delete_job_template_flattened_error(): - client = TranscoderServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_job_template( - services.DeleteJobTemplateRequest(), - name='name_value', - ) - - -@pytest.mark.asyncio -async def test_delete_job_template_flattened_async(): - client = TranscoderServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_job_template), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.delete_job_template( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0].name == 'name_value' - - -@pytest.mark.asyncio -async def test_delete_job_template_flattened_error_async(): - client = TranscoderServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.delete_job_template( - services.DeleteJobTemplateRequest(), - name='name_value', - ) - - -def test_credentials_transport_error(): - # It is an error to provide credentials and a transport instance. - transport = transports.TranscoderServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = TranscoderServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # It is an error to provide a credentials file and a transport instance. - transport = transports.TranscoderServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = TranscoderServiceClient( - client_options={"credentials_file": "credentials.json"}, - transport=transport, - ) - - # It is an error to provide scopes and a transport instance. - transport = transports.TranscoderServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = TranscoderServiceClient( - client_options={"scopes": ["1", "2"]}, - transport=transport, - ) - - -def test_transport_instance(): - # A client may be instantiated with a custom transport instance. - transport = transports.TranscoderServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - client = TranscoderServiceClient(transport=transport) - assert client.transport is transport - -def test_transport_get_channel(): - # A client may be instantiated with a custom transport instance. - transport = transports.TranscoderServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel - - transport = transports.TranscoderServiceGrpcAsyncIOTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel - -@pytest.mark.parametrize("transport_class", [ - transports.TranscoderServiceGrpcTransport, - transports.TranscoderServiceGrpcAsyncIOTransport, -]) -def test_transport_adc(transport_class): - # Test default credentials are used if not provided. - with mock.patch.object(google.auth, 'default') as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class() - adc.assert_called_once() - -def test_transport_grpc_default(): - # A client should use the gRPC transport by default. - client = TranscoderServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - assert isinstance( - client.transport, - transports.TranscoderServiceGrpcTransport, - ) - -def test_transcoder_service_base_transport_error(): - # Passing both a credentials object and credentials_file should raise an error - with pytest.raises(core_exceptions.DuplicateCredentialArgs): - transport = transports.TranscoderServiceTransport( - credentials=ga_credentials.AnonymousCredentials(), - credentials_file="credentials.json" - ) - - -def test_transcoder_service_base_transport(): - # Instantiate the base transport. - with mock.patch('google.cloud.video.transcoder_v1beta1.services.transcoder_service.transports.TranscoderServiceTransport.__init__') as Transport: - Transport.return_value = None - transport = transports.TranscoderServiceTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Every method on the transport should just blindly - # raise NotImplementedError. - methods = ( - 'create_job', - 'list_jobs', - 'get_job', - 'delete_job', - 'create_job_template', - 'list_job_templates', - 'get_job_template', - 'delete_job_template', - ) - for method in methods: - with pytest.raises(NotImplementedError): - getattr(transport, method)(request=object()) - - -@requires_google_auth_gte_1_25_0 -def test_transcoder_service_base_transport_with_credentials_file(): - # Instantiate the base transport with a credentials file - with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.video.transcoder_v1beta1.services.transcoder_service.transports.TranscoderServiceTransport._prep_wrapped_messages') as Transport: - Transport.return_value = None - load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.TranscoderServiceTransport( - credentials_file="credentials.json", - quota_project_id="octopus", - ) - load_creds.assert_called_once_with("credentials.json", - scopes=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', -), - quota_project_id="octopus", - ) - - -@requires_google_auth_lt_1_25_0 -def test_transcoder_service_base_transport_with_credentials_file_old_google_auth(): - # Instantiate the base transport with a credentials file - with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.video.transcoder_v1beta1.services.transcoder_service.transports.TranscoderServiceTransport._prep_wrapped_messages') as Transport: - Transport.return_value = None - load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.TranscoderServiceTransport( - credentials_file="credentials.json", - quota_project_id="octopus", - ) - load_creds.assert_called_once_with("credentials.json", scopes=( - 'https://www.googleapis.com/auth/cloud-platform', - ), - quota_project_id="octopus", - ) - - -def test_transcoder_service_base_transport_with_adc(): - # Test the default credentials are used if credentials and credentials_file are None. - with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.video.transcoder_v1beta1.services.transcoder_service.transports.TranscoderServiceTransport._prep_wrapped_messages') as Transport: - Transport.return_value = None - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.TranscoderServiceTransport() - adc.assert_called_once() - - -@requires_google_auth_gte_1_25_0 -def test_transcoder_service_auth_adc(): - # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - TranscoderServiceClient() - adc.assert_called_once_with( - scopes=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', -), - quota_project_id=None, - ) - - -@requires_google_auth_lt_1_25_0 -def test_transcoder_service_auth_adc_old_google_auth(): - # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - TranscoderServiceClient() - adc.assert_called_once_with( - scopes=( 'https://www.googleapis.com/auth/cloud-platform',), - quota_project_id=None, - ) - - -@pytest.mark.parametrize( - "transport_class", - [ - transports.TranscoderServiceGrpcTransport, - transports.TranscoderServiceGrpcAsyncIOTransport, - ], -) -@requires_google_auth_gte_1_25_0 -def test_transcoder_service_transport_auth_adc(transport_class): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class(quota_project_id="octopus", scopes=["1", "2"]) - adc.assert_called_once_with( - scopes=["1", "2"], - default_scopes=( 'https://www.googleapis.com/auth/cloud-platform',), - quota_project_id="octopus", - ) - - -@pytest.mark.parametrize( - "transport_class", - [ - transports.TranscoderServiceGrpcTransport, - transports.TranscoderServiceGrpcAsyncIOTransport, - ], -) -@requires_google_auth_lt_1_25_0 -def test_transcoder_service_transport_auth_adc_old_google_auth(transport_class): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class(quota_project_id="octopus") - adc.assert_called_once_with(scopes=( - 'https://www.googleapis.com/auth/cloud-platform', -), - quota_project_id="octopus", - ) - - -@pytest.mark.parametrize( - "transport_class,grpc_helpers", - [ - (transports.TranscoderServiceGrpcTransport, grpc_helpers), - (transports.TranscoderServiceGrpcAsyncIOTransport, grpc_helpers_async) - ], -) -def test_transcoder_service_transport_create_channel(transport_class, grpc_helpers): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch.object( - grpc_helpers, "create_channel", autospec=True - ) as create_channel: - creds = ga_credentials.AnonymousCredentials() - adc.return_value = (creds, None) - transport_class( - quota_project_id="octopus", - scopes=["1", "2"] - ) - - create_channel.assert_called_with( - "transcoder.googleapis.com:443", - credentials=creds, - credentials_file=None, - quota_project_id="octopus", - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', -), - scopes=["1", "2"], - default_host="transcoder.googleapis.com", - ssl_credentials=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - -@pytest.mark.parametrize("transport_class", [transports.TranscoderServiceGrpcTransport, transports.TranscoderServiceGrpcAsyncIOTransport]) -def test_transcoder_service_grpc_transport_client_cert_source_for_mtls( - transport_class -): - cred = ga_credentials.AnonymousCredentials() - - # Check ssl_channel_credentials is used if provided. - with mock.patch.object(transport_class, "create_channel") as mock_create_channel: - mock_ssl_channel_creds = mock.Mock() - transport_class( - host="squid.clam.whelk", - credentials=cred, - ssl_channel_credentials=mock_ssl_channel_creds - ) - mock_create_channel.assert_called_once_with( - "squid.clam.whelk:443", - credentials=cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_channel_creds, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls - # is used. - with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): - with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: - transport_class( - credentials=cred, - client_cert_source_for_mtls=client_cert_source_callback - ) - expected_cert, expected_key = client_cert_source_callback() - mock_ssl_cred.assert_called_once_with( - certificate_chain=expected_cert, - private_key=expected_key - ) - - -def test_transcoder_service_host_no_port(): - client = TranscoderServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions(api_endpoint='transcoder.googleapis.com'), - ) - assert client.transport._host == 'transcoder.googleapis.com:443' - - -def test_transcoder_service_host_with_port(): - client = TranscoderServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions(api_endpoint='transcoder.googleapis.com:8000'), - ) - assert client.transport._host == 'transcoder.googleapis.com:8000' - -def test_transcoder_service_grpc_transport_channel(): - channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials()) - - # Check that channel is used if provided. - transport = transports.TranscoderServiceGrpcTransport( - host="squid.clam.whelk", - channel=channel, - ) - assert transport.grpc_channel == channel - assert transport._host == "squid.clam.whelk:443" - assert transport._ssl_channel_credentials == None - - -def test_transcoder_service_grpc_asyncio_transport_channel(): - channel = aio.secure_channel('http://localhost/', grpc.local_channel_credentials()) - - # Check that channel is used if provided. - transport = transports.TranscoderServiceGrpcAsyncIOTransport( - host="squid.clam.whelk", - channel=channel, - ) - assert transport.grpc_channel == channel - assert transport._host == "squid.clam.whelk:443" - assert transport._ssl_channel_credentials == None - - -# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are -# removed from grpc/grpc_asyncio transport constructor. -@pytest.mark.parametrize("transport_class", [transports.TranscoderServiceGrpcTransport, transports.TranscoderServiceGrpcAsyncIOTransport]) -def test_transcoder_service_transport_channel_mtls_with_client_cert_source( - transport_class -): - with mock.patch("grpc.ssl_channel_credentials", autospec=True) as grpc_ssl_channel_cred: - with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: - mock_ssl_cred = mock.Mock() - grpc_ssl_channel_cred.return_value = mock_ssl_cred - - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - - cred = ga_credentials.AnonymousCredentials() - with pytest.warns(DeprecationWarning): - with mock.patch.object(google.auth, 'default') as adc: - adc.return_value = (cred, None) - transport = transport_class( - host="squid.clam.whelk", - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=client_cert_source_callback, - ) - adc.assert_called_once() - - grpc_ssl_channel_cred.assert_called_once_with( - certificate_chain=b"cert bytes", private_key=b"key bytes" - ) - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - assert transport.grpc_channel == mock_grpc_channel - assert transport._ssl_channel_credentials == mock_ssl_cred - - -# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are -# removed from grpc/grpc_asyncio transport constructor. -@pytest.mark.parametrize("transport_class", [transports.TranscoderServiceGrpcTransport, transports.TranscoderServiceGrpcAsyncIOTransport]) -def test_transcoder_service_transport_channel_mtls_with_adc( - transport_class -): - mock_ssl_cred = mock.Mock() - with mock.patch.multiple( - "google.auth.transport.grpc.SslCredentials", - __init__=mock.Mock(return_value=None), - ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), - ): - with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - mock_cred = mock.Mock() - - with pytest.warns(DeprecationWarning): - transport = transport_class( - host="squid.clam.whelk", - credentials=mock_cred, - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=None, - ) - - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=mock_cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - assert transport.grpc_channel == mock_grpc_channel - - -def test_job_path(): - project = "squid" - location = "clam" - job = "whelk" - expected = "projects/{project}/locations/{location}/jobs/{job}".format(project=project, location=location, job=job, ) - actual = TranscoderServiceClient.job_path(project, location, job) - assert expected == actual - - -def test_parse_job_path(): - expected = { - "project": "octopus", - "location": "oyster", - "job": "nudibranch", - } - path = TranscoderServiceClient.job_path(**expected) - - # Check that the path construction is reversible. - actual = TranscoderServiceClient.parse_job_path(path) - assert expected == actual - -def test_job_template_path(): - project = "cuttlefish" - location = "mussel" - job_template = "winkle" - expected = "projects/{project}/locations/{location}/jobTemplates/{job_template}".format(project=project, location=location, job_template=job_template, ) - actual = TranscoderServiceClient.job_template_path(project, location, job_template) - assert expected == actual - - -def test_parse_job_template_path(): - expected = { - "project": "nautilus", - "location": "scallop", - "job_template": "abalone", - } - path = TranscoderServiceClient.job_template_path(**expected) - - # Check that the path construction is reversible. - actual = TranscoderServiceClient.parse_job_template_path(path) - assert expected == actual - -def test_common_billing_account_path(): - billing_account = "squid" - expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) - actual = TranscoderServiceClient.common_billing_account_path(billing_account) - assert expected == actual - - -def test_parse_common_billing_account_path(): - expected = { - "billing_account": "clam", - } - path = TranscoderServiceClient.common_billing_account_path(**expected) - - # Check that the path construction is reversible. - actual = TranscoderServiceClient.parse_common_billing_account_path(path) - assert expected == actual - -def test_common_folder_path(): - folder = "whelk" - expected = "folders/{folder}".format(folder=folder, ) - actual = TranscoderServiceClient.common_folder_path(folder) - assert expected == actual - - -def test_parse_common_folder_path(): - expected = { - "folder": "octopus", - } - path = TranscoderServiceClient.common_folder_path(**expected) - - # Check that the path construction is reversible. - actual = TranscoderServiceClient.parse_common_folder_path(path) - assert expected == actual - -def test_common_organization_path(): - organization = "oyster" - expected = "organizations/{organization}".format(organization=organization, ) - actual = TranscoderServiceClient.common_organization_path(organization) - assert expected == actual - - -def test_parse_common_organization_path(): - expected = { - "organization": "nudibranch", - } - path = TranscoderServiceClient.common_organization_path(**expected) - - # Check that the path construction is reversible. - actual = TranscoderServiceClient.parse_common_organization_path(path) - assert expected == actual - -def test_common_project_path(): - project = "cuttlefish" - expected = "projects/{project}".format(project=project, ) - actual = TranscoderServiceClient.common_project_path(project) - assert expected == actual - - -def test_parse_common_project_path(): - expected = { - "project": "mussel", - } - path = TranscoderServiceClient.common_project_path(**expected) - - # Check that the path construction is reversible. - actual = TranscoderServiceClient.parse_common_project_path(path) - assert expected == actual - -def test_common_location_path(): - project = "winkle" - location = "nautilus" - expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) - actual = TranscoderServiceClient.common_location_path(project, location) - assert expected == actual - - -def test_parse_common_location_path(): - expected = { - "project": "scallop", - "location": "abalone", - } - path = TranscoderServiceClient.common_location_path(**expected) - - # Check that the path construction is reversible. - actual = TranscoderServiceClient.parse_common_location_path(path) - assert expected == actual - - -def test_client_withDEFAULT_CLIENT_INFO(): - client_info = gapic_v1.client_info.ClientInfo() - - with mock.patch.object(transports.TranscoderServiceTransport, '_prep_wrapped_messages') as prep: - client = TranscoderServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - client_info=client_info, - ) - prep.assert_called_once_with(client_info) - - with mock.patch.object(transports.TranscoderServiceTransport, '_prep_wrapped_messages') as prep: - transport_class = TranscoderServiceClient.get_transport_class() - transport = transport_class( - credentials=ga_credentials.AnonymousCredentials(), - client_info=client_info, - ) - prep.assert_called_once_with(client_info) diff --git a/tests/unit/gapic/transcoder_v1beta1/test_transcoder_service.py b/tests/unit/gapic/transcoder_v1beta1/test_transcoder_service.py index af6112f..bece6f5 100644 --- a/tests/unit/gapic/transcoder_v1beta1/test_transcoder_service.py +++ b/tests/unit/gapic/transcoder_v1beta1/test_transcoder_service.py @@ -531,6 +531,7 @@ def test_create_job(transport: str = "grpc", request_type=services.CreateJobRequ priority=898, state=resources.Job.ProcessingState.PENDING, failure_reason="failure_reason_value", + ttl_after_completion_days=2670, template_id="template_id_value", ) response = client.create_job(request) @@ -548,6 +549,7 @@ def test_create_job(transport: str = "grpc", request_type=services.CreateJobRequ assert response.priority == 898 assert response.state == resources.Job.ProcessingState.PENDING assert response.failure_reason == "failure_reason_value" + assert response.ttl_after_completion_days == 2670 def test_create_job_from_dict(): @@ -592,6 +594,7 @@ async def test_create_job_async( priority=898, state=resources.Job.ProcessingState.PENDING, failure_reason="failure_reason_value", + ttl_after_completion_days=2670, ) ) response = await client.create_job(request) @@ -609,6 +612,7 @@ async def test_create_job_async( assert response.priority == 898 assert response.state == resources.Job.ProcessingState.PENDING assert response.failure_reason == "failure_reason_value" + assert response.ttl_after_completion_days == 2670 @pytest.mark.asyncio @@ -1074,6 +1078,7 @@ def test_get_job(transport: str = "grpc", request_type=services.GetJobRequest): priority=898, state=resources.Job.ProcessingState.PENDING, failure_reason="failure_reason_value", + ttl_after_completion_days=2670, template_id="template_id_value", ) response = client.get_job(request) @@ -1091,6 +1096,7 @@ def test_get_job(transport: str = "grpc", request_type=services.GetJobRequest): assert response.priority == 898 assert response.state == resources.Job.ProcessingState.PENDING assert response.failure_reason == "failure_reason_value" + assert response.ttl_after_completion_days == 2670 def test_get_job_from_dict(): @@ -1135,6 +1141,7 @@ async def test_get_job_async( priority=898, state=resources.Job.ProcessingState.PENDING, failure_reason="failure_reason_value", + ttl_after_completion_days=2670, ) ) response = await client.get_job(request) @@ -1152,6 +1159,7 @@ async def test_get_job_async( assert response.priority == 898 assert response.state == resources.Job.ProcessingState.PENDING assert response.failure_reason == "failure_reason_value" + assert response.ttl_after_completion_days == 2670 @pytest.mark.asyncio