From e09605b90882c63116a2b248abca0bbef4ed27f4 Mon Sep 17 00:00:00 2001 From: Owl Bot Date: Mon, 9 Jan 2023 22:55:53 +0000 Subject: [PATCH 1/2] feat: Add support for python 3.11 chore: Update gapic-generator-python to v1.8.0 PiperOrigin-RevId: 500768693 Source-Link: https://github.com/googleapis/googleapis/commit/190b612e3d0ff8f025875a669e5d68a1446d43c1 Source-Link: https://github.com/googleapis/googleapis-gen/commit/7bf29a414b9ecac3170f0b65bdc2a95705c0ef1a Copy-Tag: eyJwIjoiLmdpdGh1Yi8uT3dsQm90LnlhbWwiLCJoIjoiN2JmMjlhNDE0YjllY2FjMzE3MGYwYjY1YmRjMmE5NTcwNWMwZWYxYSJ9 --- owl-bot-staging/v1/.coveragerc | 12 + owl-bot-staging/v1/.flake8 | 33 + owl-bot-staging/v1/MANIFEST.in | 2 + owl-bot-staging/v1/README.rst | 49 + owl-bot-staging/v1/docs/conf.py | 376 ++ owl-bot-staging/v1/docs/index.rst | 7 + .../v1/docs/transcoder_v1/services.rst | 6 + .../docs/transcoder_v1/transcoder_service.rst | 10 + .../v1/docs/transcoder_v1/types.rst | 6 + .../google/cloud/video/transcoder/__init__.py | 83 + .../cloud/video/transcoder/gapic_version.py | 16 + .../v1/google/cloud/video/transcoder/py.typed | 2 + .../cloud/video/transcoder_v1/__init__.py | 84 + .../video/transcoder_v1/gapic_metadata.json | 103 + .../video/transcoder_v1/gapic_version.py | 16 + .../google/cloud/video/transcoder_v1/py.typed | 2 + .../video/transcoder_v1/services/__init__.py | 15 + .../services/transcoder_service/__init__.py | 22 + .../transcoder_service/async_client.py | 1054 +++++ .../services/transcoder_service/client.py | 1265 ++++++ .../services/transcoder_service/pagers.py | 261 ++ .../transcoder_service/transports/__init__.py | 33 + .../transcoder_service/transports/base.py | 248 ++ .../transcoder_service/transports/grpc.py | 454 +++ .../transports/grpc_asyncio.py | 453 +++ .../video/transcoder_v1/types/__init__.py | 78 + .../video/transcoder_v1/types/resources.py | 2172 ++++++++++ .../video/transcoder_v1/types/services.py | 325 ++ owl-bot-staging/v1/mypy.ini | 3 + owl-bot-staging/v1/noxfile.py | 184 + ...data_google.cloud.video.transcoder.v1.json | 1315 +++++++ ...ted_transcoder_service_create_job_async.py | 56 + ...ated_transcoder_service_create_job_sync.py | 56 + ...coder_service_create_job_template_async.py | 53 + ...scoder_service_create_job_template_sync.py | 53 + ...ted_transcoder_service_delete_job_async.py | 50 + ...ated_transcoder_service_delete_job_sync.py | 50 + ...coder_service_delete_job_template_async.py | 50 + ...scoder_service_delete_job_template_sync.py | 50 + ...erated_transcoder_service_get_job_async.py | 52 + ...nerated_transcoder_service_get_job_sync.py | 52 + ...anscoder_service_get_job_template_async.py | 52 + ...ranscoder_service_get_job_template_sync.py | 52 + ...scoder_service_list_job_templates_async.py | 53 + ...nscoder_service_list_job_templates_sync.py | 53 + ...ated_transcoder_service_list_jobs_async.py | 53 + ...rated_transcoder_service_list_jobs_sync.py | 53 + .../scripts/fixup_transcoder_v1_keywords.py | 183 + owl-bot-staging/v1/setup.py | 92 + .../v1/testing/constraints-3.10.txt | 6 + .../v1/testing/constraints-3.11.txt | 6 + .../v1/testing/constraints-3.12.txt | 6 + .../v1/testing/constraints-3.7.txt | 9 + .../v1/testing/constraints-3.8.txt | 6 + .../v1/testing/constraints-3.9.txt | 6 + owl-bot-staging/v1/tests/__init__.py | 16 + owl-bot-staging/v1/tests/unit/__init__.py | 16 + .../v1/tests/unit/gapic/__init__.py | 16 + .../unit/gapic/transcoder_v1/__init__.py | 16 + .../transcoder_v1/test_transcoder_service.py | 3484 +++++++++++++++++ 60 files changed, 13389 insertions(+) create mode 100644 owl-bot-staging/v1/.coveragerc create mode 100644 owl-bot-staging/v1/.flake8 create mode 100644 owl-bot-staging/v1/MANIFEST.in create mode 100644 owl-bot-staging/v1/README.rst create mode 100644 owl-bot-staging/v1/docs/conf.py create mode 100644 owl-bot-staging/v1/docs/index.rst create mode 100644 owl-bot-staging/v1/docs/transcoder_v1/services.rst create mode 100644 owl-bot-staging/v1/docs/transcoder_v1/transcoder_service.rst create mode 100644 owl-bot-staging/v1/docs/transcoder_v1/types.rst create mode 100644 owl-bot-staging/v1/google/cloud/video/transcoder/__init__.py create mode 100644 owl-bot-staging/v1/google/cloud/video/transcoder/gapic_version.py create mode 100644 owl-bot-staging/v1/google/cloud/video/transcoder/py.typed create mode 100644 owl-bot-staging/v1/google/cloud/video/transcoder_v1/__init__.py create mode 100644 owl-bot-staging/v1/google/cloud/video/transcoder_v1/gapic_metadata.json create mode 100644 owl-bot-staging/v1/google/cloud/video/transcoder_v1/gapic_version.py create mode 100644 owl-bot-staging/v1/google/cloud/video/transcoder_v1/py.typed create mode 100644 owl-bot-staging/v1/google/cloud/video/transcoder_v1/services/__init__.py create mode 100644 owl-bot-staging/v1/google/cloud/video/transcoder_v1/services/transcoder_service/__init__.py create mode 100644 owl-bot-staging/v1/google/cloud/video/transcoder_v1/services/transcoder_service/async_client.py create mode 100644 owl-bot-staging/v1/google/cloud/video/transcoder_v1/services/transcoder_service/client.py create mode 100644 owl-bot-staging/v1/google/cloud/video/transcoder_v1/services/transcoder_service/pagers.py create mode 100644 owl-bot-staging/v1/google/cloud/video/transcoder_v1/services/transcoder_service/transports/__init__.py create mode 100644 owl-bot-staging/v1/google/cloud/video/transcoder_v1/services/transcoder_service/transports/base.py create mode 100644 owl-bot-staging/v1/google/cloud/video/transcoder_v1/services/transcoder_service/transports/grpc.py create mode 100644 owl-bot-staging/v1/google/cloud/video/transcoder_v1/services/transcoder_service/transports/grpc_asyncio.py create mode 100644 owl-bot-staging/v1/google/cloud/video/transcoder_v1/types/__init__.py create mode 100644 owl-bot-staging/v1/google/cloud/video/transcoder_v1/types/resources.py create mode 100644 owl-bot-staging/v1/google/cloud/video/transcoder_v1/types/services.py create mode 100644 owl-bot-staging/v1/mypy.ini create mode 100644 owl-bot-staging/v1/noxfile.py create mode 100644 owl-bot-staging/v1/samples/generated_samples/snippet_metadata_google.cloud.video.transcoder.v1.json create mode 100644 owl-bot-staging/v1/samples/generated_samples/transcoder_v1_generated_transcoder_service_create_job_async.py create mode 100644 owl-bot-staging/v1/samples/generated_samples/transcoder_v1_generated_transcoder_service_create_job_sync.py create mode 100644 owl-bot-staging/v1/samples/generated_samples/transcoder_v1_generated_transcoder_service_create_job_template_async.py create mode 100644 owl-bot-staging/v1/samples/generated_samples/transcoder_v1_generated_transcoder_service_create_job_template_sync.py create mode 100644 owl-bot-staging/v1/samples/generated_samples/transcoder_v1_generated_transcoder_service_delete_job_async.py create mode 100644 owl-bot-staging/v1/samples/generated_samples/transcoder_v1_generated_transcoder_service_delete_job_sync.py create mode 100644 owl-bot-staging/v1/samples/generated_samples/transcoder_v1_generated_transcoder_service_delete_job_template_async.py create mode 100644 owl-bot-staging/v1/samples/generated_samples/transcoder_v1_generated_transcoder_service_delete_job_template_sync.py create mode 100644 owl-bot-staging/v1/samples/generated_samples/transcoder_v1_generated_transcoder_service_get_job_async.py create mode 100644 owl-bot-staging/v1/samples/generated_samples/transcoder_v1_generated_transcoder_service_get_job_sync.py create mode 100644 owl-bot-staging/v1/samples/generated_samples/transcoder_v1_generated_transcoder_service_get_job_template_async.py create mode 100644 owl-bot-staging/v1/samples/generated_samples/transcoder_v1_generated_transcoder_service_get_job_template_sync.py create mode 100644 owl-bot-staging/v1/samples/generated_samples/transcoder_v1_generated_transcoder_service_list_job_templates_async.py create mode 100644 owl-bot-staging/v1/samples/generated_samples/transcoder_v1_generated_transcoder_service_list_job_templates_sync.py create mode 100644 owl-bot-staging/v1/samples/generated_samples/transcoder_v1_generated_transcoder_service_list_jobs_async.py create mode 100644 owl-bot-staging/v1/samples/generated_samples/transcoder_v1_generated_transcoder_service_list_jobs_sync.py create mode 100644 owl-bot-staging/v1/scripts/fixup_transcoder_v1_keywords.py create mode 100644 owl-bot-staging/v1/setup.py create mode 100644 owl-bot-staging/v1/testing/constraints-3.10.txt create mode 100644 owl-bot-staging/v1/testing/constraints-3.11.txt create mode 100644 owl-bot-staging/v1/testing/constraints-3.12.txt create mode 100644 owl-bot-staging/v1/testing/constraints-3.7.txt create mode 100644 owl-bot-staging/v1/testing/constraints-3.8.txt create mode 100644 owl-bot-staging/v1/testing/constraints-3.9.txt create mode 100644 owl-bot-staging/v1/tests/__init__.py create mode 100644 owl-bot-staging/v1/tests/unit/__init__.py create mode 100644 owl-bot-staging/v1/tests/unit/gapic/__init__.py create mode 100644 owl-bot-staging/v1/tests/unit/gapic/transcoder_v1/__init__.py create mode 100644 owl-bot-staging/v1/tests/unit/gapic/transcoder_v1/test_transcoder_service.py diff --git a/owl-bot-staging/v1/.coveragerc b/owl-bot-staging/v1/.coveragerc new file mode 100644 index 0000000..33d10d7 --- /dev/null +++ b/owl-bot-staging/v1/.coveragerc @@ -0,0 +1,12 @@ +[run] +branch = True + +[report] +show_missing = True +omit = + google/cloud/video/transcoder/__init__.py +exclude_lines = + # Re-enable the standard pragma + pragma: NO COVER + # Ignore debug-only repr + def __repr__ diff --git a/owl-bot-staging/v1/.flake8 b/owl-bot-staging/v1/.flake8 new file mode 100644 index 0000000..29227d4 --- /dev/null +++ b/owl-bot-staging/v1/.flake8 @@ -0,0 +1,33 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2020 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# Generated by synthtool. DO NOT EDIT! +[flake8] +ignore = E203, E266, E501, W503 +exclude = + # Exclude generated code. + **/proto/** + **/gapic/** + **/services/** + **/types/** + *_pb2.py + + # Standard linting exemptions. + **/.nox/** + __pycache__, + .git, + *.pyc, + conf.py diff --git a/owl-bot-staging/v1/MANIFEST.in b/owl-bot-staging/v1/MANIFEST.in new file mode 100644 index 0000000..da1cb61 --- /dev/null +++ b/owl-bot-staging/v1/MANIFEST.in @@ -0,0 +1,2 @@ +recursive-include google/cloud/video/transcoder *.py +recursive-include google/cloud/video/transcoder_v1 *.py diff --git a/owl-bot-staging/v1/README.rst b/owl-bot-staging/v1/README.rst new file mode 100644 index 0000000..43621a1 --- /dev/null +++ b/owl-bot-staging/v1/README.rst @@ -0,0 +1,49 @@ +Python Client for Google Cloud Video Transcoder API +================================================= + +Quick Start +----------- + +In order to use this library, you first need to go through the following steps: + +1. `Select or create a Cloud Platform project.`_ +2. `Enable billing for your project.`_ +3. Enable the Google Cloud Video Transcoder API. +4. `Setup Authentication.`_ + +.. _Select or create a Cloud Platform project.: https://console.cloud.google.com/project +.. _Enable billing for your project.: https://cloud.google.com/billing/docs/how-to/modify-project#enable_billing_for_a_project +.. _Setup Authentication.: https://googleapis.dev/python/google-api-core/latest/auth.html + +Installation +~~~~~~~~~~~~ + +Install this library in a `virtualenv`_ using pip. `virtualenv`_ is a tool to +create isolated Python environments. The basic problem it addresses is one of +dependencies and versions, and indirectly permissions. + +With `virtualenv`_, it's possible to install this library without needing system +install permissions, and without clashing with the installed system +dependencies. + +.. _`virtualenv`: https://virtualenv.pypa.io/en/latest/ + + +Mac/Linux +^^^^^^^^^ + +.. code-block:: console + + python3 -m venv + source /bin/activate + /bin/pip install /path/to/library + + +Windows +^^^^^^^ + +.. code-block:: console + + python3 -m venv + \Scripts\activate + \Scripts\pip.exe install \path\to\library diff --git a/owl-bot-staging/v1/docs/conf.py b/owl-bot-staging/v1/docs/conf.py new file mode 100644 index 0000000..a906f71 --- /dev/null +++ b/owl-bot-staging/v1/docs/conf.py @@ -0,0 +1,376 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# +# google-cloud-video-transcoder documentation build configuration file +# +# This file is execfile()d with the current directory set to its +# containing dir. +# +# Note that not all possible configuration values are present in this +# autogenerated file. +# +# All configuration values have a default; values that are commented out +# serve to show the default. + +import sys +import os +import shlex + +# If extensions (or modules to document with autodoc) are in another directory, +# add these directories to sys.path here. If the directory is relative to the +# documentation root, use os.path.abspath to make it absolute, like shown here. +sys.path.insert(0, os.path.abspath("..")) + +__version__ = "0.1.0" + +# -- General configuration ------------------------------------------------ + +# If your documentation needs a minimal Sphinx version, state it here. +needs_sphinx = "4.0.1" + +# Add any Sphinx extension module names here, as strings. They can be +# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom +# ones. +extensions = [ + "sphinx.ext.autodoc", + "sphinx.ext.autosummary", + "sphinx.ext.intersphinx", + "sphinx.ext.coverage", + "sphinx.ext.napoleon", + "sphinx.ext.todo", + "sphinx.ext.viewcode", +] + +# autodoc/autosummary flags +autoclass_content = "both" +autodoc_default_flags = ["members"] +autosummary_generate = True + + +# Add any paths that contain templates here, relative to this directory. +templates_path = ["_templates"] + +# Allow markdown includes (so releases.md can include CHANGLEOG.md) +# http://www.sphinx-doc.org/en/master/markdown.html +source_parsers = {".md": "recommonmark.parser.CommonMarkParser"} + +# The suffix(es) of source filenames. +# You can specify multiple suffix as a list of string: +source_suffix = [".rst", ".md"] + +# The encoding of source files. +# source_encoding = 'utf-8-sig' + +# The root toctree document. +root_doc = "index" + +# General information about the project. +project = u"google-cloud-video-transcoder" +copyright = u"2022, Google, LLC" +author = u"Google APIs" # TODO: autogenerate this bit + +# The version info for the project you're documenting, acts as replacement for +# |version| and |release|, also used in various other places throughout the +# built documents. +# +# The full version, including alpha/beta/rc tags. +release = __version__ +# The short X.Y version. +version = ".".join(release.split(".")[0:2]) + +# The language for content autogenerated by Sphinx. Refer to documentation +# for a list of supported languages. +# +# This is also used if you do content translation via gettext catalogs. +# Usually you set "language" from the command line for these cases. +language = None + +# There are two options for replacing |today|: either, you set today to some +# non-false value, then it is used: +# today = '' +# Else, today_fmt is used as the format for a strftime call. +# today_fmt = '%B %d, %Y' + +# List of patterns, relative to source directory, that match files and +# directories to ignore when looking for source files. +exclude_patterns = ["_build"] + +# The reST default role (used for this markup: `text`) to use for all +# documents. +# default_role = None + +# If true, '()' will be appended to :func: etc. cross-reference text. +# add_function_parentheses = True + +# If true, the current module name will be prepended to all description +# unit titles (such as .. function::). +# add_module_names = True + +# If true, sectionauthor and moduleauthor directives will be shown in the +# output. They are ignored by default. +# show_authors = False + +# The name of the Pygments (syntax highlighting) style to use. +pygments_style = "sphinx" + +# A list of ignored prefixes for module index sorting. +# modindex_common_prefix = [] + +# If true, keep warnings as "system message" paragraphs in the built documents. +# keep_warnings = False + +# If true, `todo` and `todoList` produce output, else they produce nothing. +todo_include_todos = True + + +# -- Options for HTML output ---------------------------------------------- + +# The theme to use for HTML and HTML Help pages. See the documentation for +# a list of builtin themes. +html_theme = "alabaster" + +# Theme options are theme-specific and customize the look and feel of a theme +# further. For a list of options available for each theme, see the +# documentation. +html_theme_options = { + "description": "Google Cloud Video Client Libraries for Python", + "github_user": "googleapis", + "github_repo": "google-cloud-python", + "github_banner": True, + "font_family": "'Roboto', Georgia, sans", + "head_font_family": "'Roboto', Georgia, serif", + "code_font_family": "'Roboto Mono', 'Consolas', monospace", +} + +# Add any paths that contain custom themes here, relative to this directory. +# html_theme_path = [] + +# The name for this set of Sphinx documents. If None, it defaults to +# " v documentation". +# html_title = None + +# A shorter title for the navigation bar. Default is the same as html_title. +# html_short_title = None + +# The name of an image file (relative to this directory) to place at the top +# of the sidebar. +# html_logo = None + +# The name of an image file (within the static path) to use as favicon of the +# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 +# pixels large. +# html_favicon = None + +# Add any paths that contain custom static files (such as style sheets) here, +# relative to this directory. They are copied after the builtin static files, +# so a file named "default.css" will overwrite the builtin "default.css". +html_static_path = ["_static"] + +# Add any extra paths that contain custom files (such as robots.txt or +# .htaccess) here, relative to this directory. These files are copied +# directly to the root of the documentation. +# html_extra_path = [] + +# If not '', a 'Last updated on:' timestamp is inserted at every page bottom, +# using the given strftime format. +# html_last_updated_fmt = '%b %d, %Y' + +# If true, SmartyPants will be used to convert quotes and dashes to +# typographically correct entities. +# html_use_smartypants = True + +# Custom sidebar templates, maps document names to template names. +# html_sidebars = {} + +# Additional templates that should be rendered to pages, maps page names to +# template names. +# html_additional_pages = {} + +# If false, no module index is generated. +# html_domain_indices = True + +# If false, no index is generated. +# html_use_index = True + +# If true, the index is split into individual pages for each letter. +# html_split_index = False + +# If true, links to the reST sources are added to the pages. +# html_show_sourcelink = True + +# If true, "Created using Sphinx" is shown in the HTML footer. Default is True. +# html_show_sphinx = True + +# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. +# html_show_copyright = True + +# If true, an OpenSearch description file will be output, and all pages will +# contain a tag referring to it. The value of this option must be the +# base URL from which the finished HTML is served. +# html_use_opensearch = '' + +# This is the file name suffix for HTML files (e.g. ".xhtml"). +# html_file_suffix = None + +# Language to be used for generating the HTML full-text search index. +# Sphinx supports the following languages: +# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja' +# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr' +# html_search_language = 'en' + +# A dictionary with options for the search language support, empty by default. +# Now only 'ja' uses this config value +# html_search_options = {'type': 'default'} + +# The name of a javascript file (relative to the configuration directory) that +# implements a search results scorer. If empty, the default will be used. +# html_search_scorer = 'scorer.js' + +# Output file base name for HTML help builder. +htmlhelp_basename = "google-cloud-video-transcoder-doc" + +# -- Options for warnings ------------------------------------------------------ + + +suppress_warnings = [ + # Temporarily suppress this to avoid "more than one target found for + # cross-reference" warning, which are intractable for us to avoid while in + # a mono-repo. + # See https://github.com/sphinx-doc/sphinx/blob + # /2a65ffeef5c107c19084fabdd706cdff3f52d93c/sphinx/domains/python.py#L843 + "ref.python" +] + +# -- Options for LaTeX output --------------------------------------------- + +latex_elements = { + # The paper size ('letterpaper' or 'a4paper'). + # 'papersize': 'letterpaper', + # The font size ('10pt', '11pt' or '12pt'). + # 'pointsize': '10pt', + # Additional stuff for the LaTeX preamble. + # 'preamble': '', + # Latex figure (float) alignment + # 'figure_align': 'htbp', +} + +# Grouping the document tree into LaTeX files. List of tuples +# (source start file, target name, title, +# author, documentclass [howto, manual, or own class]). +latex_documents = [ + ( + root_doc, + "google-cloud-video-transcoder.tex", + u"google-cloud-video-transcoder Documentation", + author, + "manual", + ) +] + +# The name of an image file (relative to this directory) to place at the top of +# the title page. +# latex_logo = None + +# For "manual" documents, if this is true, then toplevel headings are parts, +# not chapters. +# latex_use_parts = False + +# If true, show page references after internal links. +# latex_show_pagerefs = False + +# If true, show URL addresses after external links. +# latex_show_urls = False + +# Documents to append as an appendix to all manuals. +# latex_appendices = [] + +# If false, no module index is generated. +# latex_domain_indices = True + + +# -- Options for manual page output --------------------------------------- + +# One entry per manual page. List of tuples +# (source start file, name, description, authors, manual section). +man_pages = [ + ( + root_doc, + "google-cloud-video-transcoder", + u"Google Cloud Video Transcoder Documentation", + [author], + 1, + ) +] + +# If true, show URL addresses after external links. +# man_show_urls = False + + +# -- Options for Texinfo output ------------------------------------------- + +# Grouping the document tree into Texinfo files. List of tuples +# (source start file, target name, title, author, +# dir menu entry, description, category) +texinfo_documents = [ + ( + root_doc, + "google-cloud-video-transcoder", + u"google-cloud-video-transcoder Documentation", + author, + "google-cloud-video-transcoder", + "GAPIC library for Google Cloud Video Transcoder API", + "APIs", + ) +] + +# Documents to append as an appendix to all manuals. +# texinfo_appendices = [] + +# If false, no module index is generated. +# texinfo_domain_indices = True + +# How to display URL addresses: 'footnote', 'no', or 'inline'. +# texinfo_show_urls = 'footnote' + +# If true, do not generate a @detailmenu in the "Top" node's menu. +# texinfo_no_detailmenu = False + + +# Example configuration for intersphinx: refer to the Python standard library. +intersphinx_mapping = { + "python": ("http://python.readthedocs.org/en/latest/", None), + "gax": ("https://gax-python.readthedocs.org/en/latest/", None), + "google-auth": ("https://google-auth.readthedocs.io/en/stable", None), + "google-gax": ("https://gax-python.readthedocs.io/en/latest/", None), + "google.api_core": ("https://googleapis.dev/python/google-api-core/latest/", None), + "grpc": ("https://grpc.io/grpc/python/", None), + "requests": ("http://requests.kennethreitz.org/en/stable/", None), + "proto": ("https://proto-plus-python.readthedocs.io/en/stable", None), + "protobuf": ("https://googleapis.dev/python/protobuf/latest/", None), +} + + +# Napoleon settings +napoleon_google_docstring = True +napoleon_numpy_docstring = True +napoleon_include_private_with_doc = False +napoleon_include_special_with_doc = True +napoleon_use_admonition_for_examples = False +napoleon_use_admonition_for_notes = False +napoleon_use_admonition_for_references = False +napoleon_use_ivar = False +napoleon_use_param = True +napoleon_use_rtype = True diff --git a/owl-bot-staging/v1/docs/index.rst b/owl-bot-staging/v1/docs/index.rst new file mode 100644 index 0000000..0cfe564 --- /dev/null +++ b/owl-bot-staging/v1/docs/index.rst @@ -0,0 +1,7 @@ +API Reference +------------- +.. toctree:: + :maxdepth: 2 + + transcoder_v1/services + transcoder_v1/types diff --git a/owl-bot-staging/v1/docs/transcoder_v1/services.rst b/owl-bot-staging/v1/docs/transcoder_v1/services.rst new file mode 100644 index 0000000..1bd129e --- /dev/null +++ b/owl-bot-staging/v1/docs/transcoder_v1/services.rst @@ -0,0 +1,6 @@ +Services for Google Cloud Video Transcoder v1 API +================================================= +.. toctree:: + :maxdepth: 2 + + transcoder_service diff --git a/owl-bot-staging/v1/docs/transcoder_v1/transcoder_service.rst b/owl-bot-staging/v1/docs/transcoder_v1/transcoder_service.rst new file mode 100644 index 0000000..5bf6bd8 --- /dev/null +++ b/owl-bot-staging/v1/docs/transcoder_v1/transcoder_service.rst @@ -0,0 +1,10 @@ +TranscoderService +----------------------------------- + +.. automodule:: google.cloud.video.transcoder_v1.services.transcoder_service + :members: + :inherited-members: + +.. automodule:: google.cloud.video.transcoder_v1.services.transcoder_service.pagers + :members: + :inherited-members: diff --git a/owl-bot-staging/v1/docs/transcoder_v1/types.rst b/owl-bot-staging/v1/docs/transcoder_v1/types.rst new file mode 100644 index 0000000..6fe8d2e --- /dev/null +++ b/owl-bot-staging/v1/docs/transcoder_v1/types.rst @@ -0,0 +1,6 @@ +Types for Google Cloud Video Transcoder v1 API +============================================== + +.. automodule:: google.cloud.video.transcoder_v1.types + :members: + :show-inheritance: diff --git a/owl-bot-staging/v1/google/cloud/video/transcoder/__init__.py b/owl-bot-staging/v1/google/cloud/video/transcoder/__init__.py new file mode 100644 index 0000000..02a7e66 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/video/transcoder/__init__.py @@ -0,0 +1,83 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from google.cloud.video.transcoder import gapic_version as package_version + +__version__ = package_version.__version__ + + +from google.cloud.video.transcoder_v1.services.transcoder_service.client import TranscoderServiceClient +from google.cloud.video.transcoder_v1.services.transcoder_service.async_client import TranscoderServiceAsyncClient + +from google.cloud.video.transcoder_v1.types.resources import AdBreak +from google.cloud.video.transcoder_v1.types.resources import AudioStream +from google.cloud.video.transcoder_v1.types.resources import EditAtom +from google.cloud.video.transcoder_v1.types.resources import ElementaryStream +from google.cloud.video.transcoder_v1.types.resources import Input +from google.cloud.video.transcoder_v1.types.resources import Job +from google.cloud.video.transcoder_v1.types.resources import JobConfig +from google.cloud.video.transcoder_v1.types.resources import JobTemplate +from google.cloud.video.transcoder_v1.types.resources import Manifest +from google.cloud.video.transcoder_v1.types.resources import MuxStream +from google.cloud.video.transcoder_v1.types.resources import Output +from google.cloud.video.transcoder_v1.types.resources import Overlay +from google.cloud.video.transcoder_v1.types.resources import PreprocessingConfig +from google.cloud.video.transcoder_v1.types.resources import PubsubDestination +from google.cloud.video.transcoder_v1.types.resources import SegmentSettings +from google.cloud.video.transcoder_v1.types.resources import SpriteSheet +from google.cloud.video.transcoder_v1.types.resources import TextStream +from google.cloud.video.transcoder_v1.types.resources import VideoStream +from google.cloud.video.transcoder_v1.types.services import CreateJobRequest +from google.cloud.video.transcoder_v1.types.services import CreateJobTemplateRequest +from google.cloud.video.transcoder_v1.types.services import DeleteJobRequest +from google.cloud.video.transcoder_v1.types.services import DeleteJobTemplateRequest +from google.cloud.video.transcoder_v1.types.services import GetJobRequest +from google.cloud.video.transcoder_v1.types.services import GetJobTemplateRequest +from google.cloud.video.transcoder_v1.types.services import ListJobsRequest +from google.cloud.video.transcoder_v1.types.services import ListJobsResponse +from google.cloud.video.transcoder_v1.types.services import ListJobTemplatesRequest +from google.cloud.video.transcoder_v1.types.services import ListJobTemplatesResponse + +__all__ = ('TranscoderServiceClient', + 'TranscoderServiceAsyncClient', + 'AdBreak', + 'AudioStream', + 'EditAtom', + 'ElementaryStream', + 'Input', + 'Job', + 'JobConfig', + 'JobTemplate', + 'Manifest', + 'MuxStream', + 'Output', + 'Overlay', + 'PreprocessingConfig', + 'PubsubDestination', + 'SegmentSettings', + 'SpriteSheet', + 'TextStream', + 'VideoStream', + 'CreateJobRequest', + 'CreateJobTemplateRequest', + 'DeleteJobRequest', + 'DeleteJobTemplateRequest', + 'GetJobRequest', + 'GetJobTemplateRequest', + 'ListJobsRequest', + 'ListJobsResponse', + 'ListJobTemplatesRequest', + 'ListJobTemplatesResponse', +) diff --git a/owl-bot-staging/v1/google/cloud/video/transcoder/gapic_version.py b/owl-bot-staging/v1/google/cloud/video/transcoder/gapic_version.py new file mode 100644 index 0000000..405b1ce --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/video/transcoder/gapic_version.py @@ -0,0 +1,16 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +__version__ = "0.1.0" # {x-release-please-version} diff --git a/owl-bot-staging/v1/google/cloud/video/transcoder/py.typed b/owl-bot-staging/v1/google/cloud/video/transcoder/py.typed new file mode 100644 index 0000000..a2716a6 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/video/transcoder/py.typed @@ -0,0 +1,2 @@ +# Marker file for PEP 561. +# The google-cloud-video-transcoder package uses inline types. diff --git a/owl-bot-staging/v1/google/cloud/video/transcoder_v1/__init__.py b/owl-bot-staging/v1/google/cloud/video/transcoder_v1/__init__.py new file mode 100644 index 0000000..95ed57b --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/video/transcoder_v1/__init__.py @@ -0,0 +1,84 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from google.cloud.video.transcoder import gapic_version as package_version + +__version__ = package_version.__version__ + + +from .services.transcoder_service import TranscoderServiceClient +from .services.transcoder_service import TranscoderServiceAsyncClient + +from .types.resources import AdBreak +from .types.resources import AudioStream +from .types.resources import EditAtom +from .types.resources import ElementaryStream +from .types.resources import Input +from .types.resources import Job +from .types.resources import JobConfig +from .types.resources import JobTemplate +from .types.resources import Manifest +from .types.resources import MuxStream +from .types.resources import Output +from .types.resources import Overlay +from .types.resources import PreprocessingConfig +from .types.resources import PubsubDestination +from .types.resources import SegmentSettings +from .types.resources import SpriteSheet +from .types.resources import TextStream +from .types.resources import VideoStream +from .types.services import CreateJobRequest +from .types.services import CreateJobTemplateRequest +from .types.services import DeleteJobRequest +from .types.services import DeleteJobTemplateRequest +from .types.services import GetJobRequest +from .types.services import GetJobTemplateRequest +from .types.services import ListJobsRequest +from .types.services import ListJobsResponse +from .types.services import ListJobTemplatesRequest +from .types.services import ListJobTemplatesResponse + +__all__ = ( + 'TranscoderServiceAsyncClient', +'AdBreak', +'AudioStream', +'CreateJobRequest', +'CreateJobTemplateRequest', +'DeleteJobRequest', +'DeleteJobTemplateRequest', +'EditAtom', +'ElementaryStream', +'GetJobRequest', +'GetJobTemplateRequest', +'Input', +'Job', +'JobConfig', +'JobTemplate', +'ListJobTemplatesRequest', +'ListJobTemplatesResponse', +'ListJobsRequest', +'ListJobsResponse', +'Manifest', +'MuxStream', +'Output', +'Overlay', +'PreprocessingConfig', +'PubsubDestination', +'SegmentSettings', +'SpriteSheet', +'TextStream', +'TranscoderServiceClient', +'VideoStream', +) diff --git a/owl-bot-staging/v1/google/cloud/video/transcoder_v1/gapic_metadata.json b/owl-bot-staging/v1/google/cloud/video/transcoder_v1/gapic_metadata.json new file mode 100644 index 0000000..6651379 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/video/transcoder_v1/gapic_metadata.json @@ -0,0 +1,103 @@ + { + "comment": "This file maps proto services/RPCs to the corresponding library clients/methods", + "language": "python", + "libraryPackage": "google.cloud.video.transcoder_v1", + "protoPackage": "google.cloud.video.transcoder.v1", + "schema": "1.0", + "services": { + "TranscoderService": { + "clients": { + "grpc": { + "libraryClient": "TranscoderServiceClient", + "rpcs": { + "CreateJob": { + "methods": [ + "create_job" + ] + }, + "CreateJobTemplate": { + "methods": [ + "create_job_template" + ] + }, + "DeleteJob": { + "methods": [ + "delete_job" + ] + }, + "DeleteJobTemplate": { + "methods": [ + "delete_job_template" + ] + }, + "GetJob": { + "methods": [ + "get_job" + ] + }, + "GetJobTemplate": { + "methods": [ + "get_job_template" + ] + }, + "ListJobTemplates": { + "methods": [ + "list_job_templates" + ] + }, + "ListJobs": { + "methods": [ + "list_jobs" + ] + } + } + }, + "grpc-async": { + "libraryClient": "TranscoderServiceAsyncClient", + "rpcs": { + "CreateJob": { + "methods": [ + "create_job" + ] + }, + "CreateJobTemplate": { + "methods": [ + "create_job_template" + ] + }, + "DeleteJob": { + "methods": [ + "delete_job" + ] + }, + "DeleteJobTemplate": { + "methods": [ + "delete_job_template" + ] + }, + "GetJob": { + "methods": [ + "get_job" + ] + }, + "GetJobTemplate": { + "methods": [ + "get_job_template" + ] + }, + "ListJobTemplates": { + "methods": [ + "list_job_templates" + ] + }, + "ListJobs": { + "methods": [ + "list_jobs" + ] + } + } + } + } + } + } +} diff --git a/owl-bot-staging/v1/google/cloud/video/transcoder_v1/gapic_version.py b/owl-bot-staging/v1/google/cloud/video/transcoder_v1/gapic_version.py new file mode 100644 index 0000000..405b1ce --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/video/transcoder_v1/gapic_version.py @@ -0,0 +1,16 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +__version__ = "0.1.0" # {x-release-please-version} diff --git a/owl-bot-staging/v1/google/cloud/video/transcoder_v1/py.typed b/owl-bot-staging/v1/google/cloud/video/transcoder_v1/py.typed new file mode 100644 index 0000000..a2716a6 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/video/transcoder_v1/py.typed @@ -0,0 +1,2 @@ +# Marker file for PEP 561. +# The google-cloud-video-transcoder package uses inline types. diff --git a/owl-bot-staging/v1/google/cloud/video/transcoder_v1/services/__init__.py b/owl-bot-staging/v1/google/cloud/video/transcoder_v1/services/__init__.py new file mode 100644 index 0000000..e8e1c38 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/video/transcoder_v1/services/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/owl-bot-staging/v1/google/cloud/video/transcoder_v1/services/transcoder_service/__init__.py b/owl-bot-staging/v1/google/cloud/video/transcoder_v1/services/transcoder_service/__init__.py new file mode 100644 index 0000000..a27abd8 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/video/transcoder_v1/services/transcoder_service/__init__.py @@ -0,0 +1,22 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .client import TranscoderServiceClient +from .async_client import TranscoderServiceAsyncClient + +__all__ = ( + 'TranscoderServiceClient', + 'TranscoderServiceAsyncClient', +) diff --git a/owl-bot-staging/v1/google/cloud/video/transcoder_v1/services/transcoder_service/async_client.py b/owl-bot-staging/v1/google/cloud/video/transcoder_v1/services/transcoder_service/async_client.py new file mode 100644 index 0000000..2bfe811 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/video/transcoder_v1/services/transcoder_service/async_client.py @@ -0,0 +1,1054 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import functools +import re +from typing import Dict, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union + +from google.cloud.video.transcoder_v1 import gapic_version as package_version + +from google.api_core.client_options import ClientOptions +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.cloud.video.transcoder_v1.services.transcoder_service import pagers +from google.cloud.video.transcoder_v1.types import resources +from google.cloud.video.transcoder_v1.types import services +from google.protobuf import timestamp_pb2 # type: ignore +from google.rpc import status_pb2 # type: ignore +from .transports.base import TranscoderServiceTransport, DEFAULT_CLIENT_INFO +from .transports.grpc_asyncio import TranscoderServiceGrpcAsyncIOTransport +from .client import TranscoderServiceClient + + +class TranscoderServiceAsyncClient: + """Using the Transcoder API, you can queue asynchronous jobs for + transcoding media into various output formats. Output formats + may include different streaming standards such as HTTP Live + Streaming (HLS) and Dynamic Adaptive Streaming over HTTP (DASH). + You can also customize jobs using advanced features such as + Digital Rights Management (DRM), audio equalization, content + concatenation, and digital ad-stitch ready content generation. + """ + + _client: TranscoderServiceClient + + DEFAULT_ENDPOINT = TranscoderServiceClient.DEFAULT_ENDPOINT + DEFAULT_MTLS_ENDPOINT = TranscoderServiceClient.DEFAULT_MTLS_ENDPOINT + + job_path = staticmethod(TranscoderServiceClient.job_path) + parse_job_path = staticmethod(TranscoderServiceClient.parse_job_path) + job_template_path = staticmethod(TranscoderServiceClient.job_template_path) + parse_job_template_path = staticmethod(TranscoderServiceClient.parse_job_template_path) + common_billing_account_path = staticmethod(TranscoderServiceClient.common_billing_account_path) + parse_common_billing_account_path = staticmethod(TranscoderServiceClient.parse_common_billing_account_path) + common_folder_path = staticmethod(TranscoderServiceClient.common_folder_path) + parse_common_folder_path = staticmethod(TranscoderServiceClient.parse_common_folder_path) + common_organization_path = staticmethod(TranscoderServiceClient.common_organization_path) + parse_common_organization_path = staticmethod(TranscoderServiceClient.parse_common_organization_path) + common_project_path = staticmethod(TranscoderServiceClient.common_project_path) + parse_common_project_path = staticmethod(TranscoderServiceClient.parse_common_project_path) + common_location_path = staticmethod(TranscoderServiceClient.common_location_path) + parse_common_location_path = staticmethod(TranscoderServiceClient.parse_common_location_path) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + TranscoderServiceAsyncClient: The constructed client. + """ + return TranscoderServiceClient.from_service_account_info.__func__(TranscoderServiceAsyncClient, info, *args, **kwargs) # type: ignore + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + TranscoderServiceAsyncClient: The constructed client. + """ + return TranscoderServiceClient.from_service_account_file.__func__(TranscoderServiceAsyncClient, filename, *args, **kwargs) # type: ignore + + from_service_account_json = from_service_account_file + + @classmethod + def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[ClientOptions] = None): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + return TranscoderServiceClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore + + @property + def transport(self) -> TranscoderServiceTransport: + """Returns the transport used by the client instance. + + Returns: + TranscoderServiceTransport: The transport used by the client instance. + """ + return self._client.transport + + get_transport_class = functools.partial(type(TranscoderServiceClient).get_transport_class, type(TranscoderServiceClient)) + + def __init__(self, *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Union[str, TranscoderServiceTransport] = "grpc_asyncio", + client_options: Optional[ClientOptions] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the transcoder service client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, ~.TranscoderServiceTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (ClientOptions): Custom options for the client. It + won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + """ + self._client = TranscoderServiceClient( + credentials=credentials, + transport=transport, + client_options=client_options, + client_info=client_info, + + ) + + async def create_job(self, + request: Optional[Union[services.CreateJobRequest, dict]] = None, + *, + parent: Optional[str] = None, + job: Optional[resources.Job] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> resources.Job: + r"""Creates a job in the specified region. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud.video import transcoder_v1 + + async def sample_create_job(): + # Create a client + client = transcoder_v1.TranscoderServiceAsyncClient() + + # Initialize request argument(s) + job = transcoder_v1.Job() + job.template_id = "template_id_value" + + request = transcoder_v1.CreateJobRequest( + parent="parent_value", + job=job, + ) + + # Make the request + response = await client.create_job(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.video.transcoder_v1.types.CreateJobRequest, dict]]): + The request object. Request message for + `TranscoderService.CreateJob`. + parent (:class:`str`): + Required. The parent location to create and process this + job. Format: ``projects/{project}/locations/{location}`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + job (:class:`google.cloud.video.transcoder_v1.types.Job`): + Required. Parameters for creating + transcoding job. + + This corresponds to the ``job`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.video.transcoder_v1.types.Job: + Transcoding job resource. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, job]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = services.CreateJobRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if job is not None: + request.job = job + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.create_job, + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_jobs(self, + request: Optional[Union[services.ListJobsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListJobsAsyncPager: + r"""Lists jobs in the specified region. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud.video import transcoder_v1 + + async def sample_list_jobs(): + # Create a client + client = transcoder_v1.TranscoderServiceAsyncClient() + + # Initialize request argument(s) + request = transcoder_v1.ListJobsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_jobs(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.video.transcoder_v1.types.ListJobsRequest, dict]]): + The request object. Request message for + `TranscoderService.ListJobs`. The parent location from + which to retrieve the collection of jobs. + parent (:class:`str`): + Required. Format: + ``projects/{project}/locations/{location}`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.video.transcoder_v1.services.transcoder_service.pagers.ListJobsAsyncPager: + Response message for TranscoderService.ListJobs. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = services.ListJobsRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_jobs, + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListJobsAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_job(self, + request: Optional[Union[services.GetJobRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> resources.Job: + r"""Returns the job data. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud.video import transcoder_v1 + + async def sample_get_job(): + # Create a client + client = transcoder_v1.TranscoderServiceAsyncClient() + + # Initialize request argument(s) + request = transcoder_v1.GetJobRequest( + name="name_value", + ) + + # Make the request + response = await client.get_job(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.video.transcoder_v1.types.GetJobRequest, dict]]): + The request object. Request message for + `TranscoderService.GetJob`. + name (:class:`str`): + Required. The name of the job to retrieve. Format: + ``projects/{project}/locations/{location}/jobs/{job}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.video.transcoder_v1.types.Job: + Transcoding job resource. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = services.GetJobRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_job, + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_job(self, + request: Optional[Union[services.DeleteJobRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a job. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud.video import transcoder_v1 + + async def sample_delete_job(): + # Create a client + client = transcoder_v1.TranscoderServiceAsyncClient() + + # Initialize request argument(s) + request = transcoder_v1.DeleteJobRequest( + name="name_value", + ) + + # Make the request + await client.delete_job(request=request) + + Args: + request (Optional[Union[google.cloud.video.transcoder_v1.types.DeleteJobRequest, dict]]): + The request object. Request message for + `TranscoderService.DeleteJob`. + name (:class:`str`): + Required. The name of the job to delete. Format: + ``projects/{project}/locations/{location}/jobs/{job}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = services.DeleteJobRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.delete_job, + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def create_job_template(self, + request: Optional[Union[services.CreateJobTemplateRequest, dict]] = None, + *, + parent: Optional[str] = None, + job_template: Optional[resources.JobTemplate] = None, + job_template_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> resources.JobTemplate: + r"""Creates a job template in the specified region. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud.video import transcoder_v1 + + async def sample_create_job_template(): + # Create a client + client = transcoder_v1.TranscoderServiceAsyncClient() + + # Initialize request argument(s) + request = transcoder_v1.CreateJobTemplateRequest( + parent="parent_value", + job_template_id="job_template_id_value", + ) + + # Make the request + response = await client.create_job_template(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.video.transcoder_v1.types.CreateJobTemplateRequest, dict]]): + The request object. Request message for + `TranscoderService.CreateJobTemplate`. + parent (:class:`str`): + Required. The parent location to create this job + template. Format: + ``projects/{project}/locations/{location}`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + job_template (:class:`google.cloud.video.transcoder_v1.types.JobTemplate`): + Required. Parameters for creating job + template. + + This corresponds to the ``job_template`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + job_template_id (:class:`str`): + Required. The ID to use for the job template, which will + become the final component of the job template's + resource name. + + This value should be 4-63 characters, and valid + characters must match the regular expression + ``[a-zA-Z][a-zA-Z0-9_-]*``. + + This corresponds to the ``job_template_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.video.transcoder_v1.types.JobTemplate: + Transcoding job template resource. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, job_template, job_template_id]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = services.CreateJobTemplateRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if job_template is not None: + request.job_template = job_template + if job_template_id is not None: + request.job_template_id = job_template_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.create_job_template, + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def list_job_templates(self, + request: Optional[Union[services.ListJobTemplatesRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListJobTemplatesAsyncPager: + r"""Lists job templates in the specified region. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud.video import transcoder_v1 + + async def sample_list_job_templates(): + # Create a client + client = transcoder_v1.TranscoderServiceAsyncClient() + + # Initialize request argument(s) + request = transcoder_v1.ListJobTemplatesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_job_templates(request=request) + + # Handle the response + async for response in page_result: + print(response) + + Args: + request (Optional[Union[google.cloud.video.transcoder_v1.types.ListJobTemplatesRequest, dict]]): + The request object. Request message for + `TranscoderService.ListJobTemplates`. + parent (:class:`str`): + Required. The parent location from which to retrieve the + collection of job templates. Format: + ``projects/{project}/locations/{location}`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.video.transcoder_v1.services.transcoder_service.pagers.ListJobTemplatesAsyncPager: + Response message for TranscoderService.ListJobTemplates. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = services.ListJobTemplatesRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.list_job_templates, + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__aiter__` convenience method. + response = pagers.ListJobTemplatesAsyncPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def get_job_template(self, + request: Optional[Union[services.GetJobTemplateRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> resources.JobTemplate: + r"""Returns the job template data. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud.video import transcoder_v1 + + async def sample_get_job_template(): + # Create a client + client = transcoder_v1.TranscoderServiceAsyncClient() + + # Initialize request argument(s) + request = transcoder_v1.GetJobTemplateRequest( + name="name_value", + ) + + # Make the request + response = await client.get_job_template(request=request) + + # Handle the response + print(response) + + Args: + request (Optional[Union[google.cloud.video.transcoder_v1.types.GetJobTemplateRequest, dict]]): + The request object. Request message for + `TranscoderService.GetJobTemplate`. + name (:class:`str`): + Required. The name of the job template to retrieve. + Format: + ``projects/{project}/locations/{location}/jobTemplates/{job_template}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.video.transcoder_v1.types.JobTemplate: + Transcoding job template resource. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = services.GetJobTemplateRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.get_job_template, + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + async def delete_job_template(self, + request: Optional[Union[services.DeleteJobTemplateRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a job template. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud.video import transcoder_v1 + + async def sample_delete_job_template(): + # Create a client + client = transcoder_v1.TranscoderServiceAsyncClient() + + # Initialize request argument(s) + request = transcoder_v1.DeleteJobTemplateRequest( + name="name_value", + ) + + # Make the request + await client.delete_job_template(request=request) + + Args: + request (Optional[Union[google.cloud.video.transcoder_v1.types.DeleteJobTemplateRequest, dict]]): + The request object. Request message for + `TranscoderService.DeleteJobTemplate`. + name (:class:`str`): + Required. The name of the job template to delete. + ``projects/{project}/locations/{location}/jobTemplates/{job_template}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError("If the `request` argument is set, then none of " + "the individual field arguments should be set.") + + request = services.DeleteJobTemplateRequest(request) + + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = gapic_v1.method_async.wrap_method( + self._client._transport.delete_job_template, + default_timeout=60.0, + client_info=DEFAULT_CLIENT_INFO, + ) + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + async def __aenter__(self): + return self + + async def __aexit__(self, exc_type, exc, tb): + await self.transport.close() + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +__all__ = ( + "TranscoderServiceAsyncClient", +) diff --git a/owl-bot-staging/v1/google/cloud/video/transcoder_v1/services/transcoder_service/client.py b/owl-bot-staging/v1/google/cloud/video/transcoder_v1/services/transcoder_service/client.py new file mode 100644 index 0000000..bffe916 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/video/transcoder_v1/services/transcoder_service/client.py @@ -0,0 +1,1265 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +import os +import re +from typing import Dict, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union, cast + +from google.cloud.video.transcoder_v1 import gapic_version as package_version + +from google.api_core import client_options as client_options_lib +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport import mtls # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore +from google.oauth2 import service_account # type: ignore + +try: + OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] +except AttributeError: # pragma: NO COVER + OptionalRetry = Union[retries.Retry, object] # type: ignore + +from google.cloud.video.transcoder_v1.services.transcoder_service import pagers +from google.cloud.video.transcoder_v1.types import resources +from google.cloud.video.transcoder_v1.types import services +from google.protobuf import timestamp_pb2 # type: ignore +from google.rpc import status_pb2 # type: ignore +from .transports.base import TranscoderServiceTransport, DEFAULT_CLIENT_INFO +from .transports.grpc import TranscoderServiceGrpcTransport +from .transports.grpc_asyncio import TranscoderServiceGrpcAsyncIOTransport + + +class TranscoderServiceClientMeta(type): + """Metaclass for the TranscoderService client. + + This provides class-level methods for building and retrieving + support objects (e.g. transport) without polluting the client instance + objects. + """ + _transport_registry = OrderedDict() # type: Dict[str, Type[TranscoderServiceTransport]] + _transport_registry["grpc"] = TranscoderServiceGrpcTransport + _transport_registry["grpc_asyncio"] = TranscoderServiceGrpcAsyncIOTransport + + def get_transport_class(cls, + label: Optional[str] = None, + ) -> Type[TranscoderServiceTransport]: + """Returns an appropriate transport class. + + Args: + label: The name of the desired transport. If none is + provided, then the first transport in the registry is used. + + Returns: + The transport class to use. + """ + # If a specific transport is requested, return that one. + if label: + return cls._transport_registry[label] + + # No transport is requested; return the default (that is, the first one + # in the dictionary). + return next(iter(cls._transport_registry.values())) + + +class TranscoderServiceClient(metaclass=TranscoderServiceClientMeta): + """Using the Transcoder API, you can queue asynchronous jobs for + transcoding media into various output formats. Output formats + may include different streaming standards such as HTTP Live + Streaming (HLS) and Dynamic Adaptive Streaming over HTTP (DASH). + You can also customize jobs using advanced features such as + Digital Rights Management (DRM), audio equalization, content + concatenation, and digital ad-stitch ready content generation. + """ + + @staticmethod + def _get_default_mtls_endpoint(api_endpoint): + """Converts api endpoint to mTLS endpoint. + + Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to + "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. + Args: + api_endpoint (Optional[str]): the api endpoint to convert. + Returns: + str: converted mTLS api endpoint. + """ + if not api_endpoint: + return api_endpoint + + mtls_endpoint_re = re.compile( + r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" + ) + + m = mtls_endpoint_re.match(api_endpoint) + name, mtls, sandbox, googledomain = m.groups() + if mtls or not googledomain: + return api_endpoint + + if sandbox: + return api_endpoint.replace( + "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" + ) + + return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") + + DEFAULT_ENDPOINT = "transcoder.googleapis.com" + DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore + DEFAULT_ENDPOINT + ) + + @classmethod + def from_service_account_info(cls, info: dict, *args, **kwargs): + """Creates an instance of this client using the provided credentials + info. + + Args: + info (dict): The service account private key info. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + TranscoderServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_info(info) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + @classmethod + def from_service_account_file(cls, filename: str, *args, **kwargs): + """Creates an instance of this client using the provided credentials + file. + + Args: + filename (str): The path to the service account private key json + file. + args: Additional arguments to pass to the constructor. + kwargs: Additional arguments to pass to the constructor. + + Returns: + TranscoderServiceClient: The constructed client. + """ + credentials = service_account.Credentials.from_service_account_file( + filename) + kwargs["credentials"] = credentials + return cls(*args, **kwargs) + + from_service_account_json = from_service_account_file + + @property + def transport(self) -> TranscoderServiceTransport: + """Returns the transport used by the client instance. + + Returns: + TranscoderServiceTransport: The transport used by the client + instance. + """ + return self._transport + + @staticmethod + def job_path(project: str,location: str,job: str,) -> str: + """Returns a fully-qualified job string.""" + return "projects/{project}/locations/{location}/jobs/{job}".format(project=project, location=location, job=job, ) + + @staticmethod + def parse_job_path(path: str) -> Dict[str,str]: + """Parses a job path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/jobs/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def job_template_path(project: str,location: str,job_template: str,) -> str: + """Returns a fully-qualified job_template string.""" + return "projects/{project}/locations/{location}/jobTemplates/{job_template}".format(project=project, location=location, job_template=job_template, ) + + @staticmethod + def parse_job_template_path(path: str) -> Dict[str,str]: + """Parses a job_template path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/jobTemplates/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_billing_account_path(billing_account: str, ) -> str: + """Returns a fully-qualified billing_account string.""" + return "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + + @staticmethod + def parse_common_billing_account_path(path: str) -> Dict[str,str]: + """Parse a billing_account path into its component segments.""" + m = re.match(r"^billingAccounts/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_folder_path(folder: str, ) -> str: + """Returns a fully-qualified folder string.""" + return "folders/{folder}".format(folder=folder, ) + + @staticmethod + def parse_common_folder_path(path: str) -> Dict[str,str]: + """Parse a folder path into its component segments.""" + m = re.match(r"^folders/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_organization_path(organization: str, ) -> str: + """Returns a fully-qualified organization string.""" + return "organizations/{organization}".format(organization=organization, ) + + @staticmethod + def parse_common_organization_path(path: str) -> Dict[str,str]: + """Parse a organization path into its component segments.""" + m = re.match(r"^organizations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_project_path(project: str, ) -> str: + """Returns a fully-qualified project string.""" + return "projects/{project}".format(project=project, ) + + @staticmethod + def parse_common_project_path(path: str) -> Dict[str,str]: + """Parse a project path into its component segments.""" + m = re.match(r"^projects/(?P.+?)$", path) + return m.groupdict() if m else {} + + @staticmethod + def common_location_path(project: str, location: str, ) -> str: + """Returns a fully-qualified location string.""" + return "projects/{project}/locations/{location}".format(project=project, location=location, ) + + @staticmethod + def parse_common_location_path(path: str) -> Dict[str,str]: + """Parse a location path into its component segments.""" + m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) + return m.groupdict() if m else {} + + @classmethod + def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_options_lib.ClientOptions] = None): + """Return the API endpoint and client cert source for mutual TLS. + + The client cert source is determined in the following order: + (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the + client cert source is None. + (2) if `client_options.client_cert_source` is provided, use the provided one; if the + default client cert source exists, use the default one; otherwise the client cert + source is None. + + The API endpoint is determined in the following order: + (1) if `client_options.api_endpoint` if provided, use the provided one. + (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the + default mTLS endpoint; if the environment variable is "never", use the default API + endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise + use the default API endpoint. + + More details can be found at https://google.aip.dev/auth/4114. + + Args: + client_options (google.api_core.client_options.ClientOptions): Custom options for the + client. Only the `api_endpoint` and `client_cert_source` properties may be used + in this method. + + Returns: + Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the + client cert source to use. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If any errors happen. + """ + if client_options is None: + client_options = client_options_lib.ClientOptions() + use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") + use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") + if use_client_cert not in ("true", "false"): + raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") + if use_mtls_endpoint not in ("auto", "never", "always"): + raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") + + # Figure out the client cert source to use. + client_cert_source = None + if use_client_cert == "true": + if client_options.client_cert_source: + client_cert_source = client_options.client_cert_source + elif mtls.has_default_client_cert_source(): + client_cert_source = mtls.default_client_cert_source() + + # Figure out which api endpoint to use. + if client_options.api_endpoint is not None: + api_endpoint = client_options.api_endpoint + elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): + api_endpoint = cls.DEFAULT_MTLS_ENDPOINT + else: + api_endpoint = cls.DEFAULT_ENDPOINT + + return api_endpoint, client_cert_source + + def __init__(self, *, + credentials: Optional[ga_credentials.Credentials] = None, + transport: Optional[Union[str, TranscoderServiceTransport]] = None, + client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + ) -> None: + """Instantiates the transcoder service client. + + Args: + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + transport (Union[str, TranscoderServiceTransport]): The + transport to use. If set to None, a transport is chosen + automatically. + client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the + client. It won't take effect if a ``transport`` instance is provided. + (1) The ``api_endpoint`` property can be used to override the + default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT + environment variable can also be used to override the endpoint: + "always" (always use the default mTLS endpoint), "never" (always + use the default regular endpoint) and "auto" (auto switch to the + default mTLS endpoint if client certificate is present, this is + the default value). However, the ``api_endpoint`` property takes + precedence if provided. + (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable + is "true", then the ``client_cert_source`` property can be used + to provide client certificate for mutual TLS transport. If + not provided, the default SSL client certificate will be used if + present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not + set, no client certificate will be used. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + """ + if isinstance(client_options, dict): + client_options = client_options_lib.from_dict(client_options) + if client_options is None: + client_options = client_options_lib.ClientOptions() + client_options = cast(client_options_lib.ClientOptions, client_options) + + api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source(client_options) + + api_key_value = getattr(client_options, "api_key", None) + if api_key_value and credentials: + raise ValueError("client_options.api_key and credentials are mutually exclusive") + + # Save or instantiate the transport. + # Ordinarily, we provide the transport, but allowing a custom transport + # instance provides an extensibility point for unusual situations. + if isinstance(transport, TranscoderServiceTransport): + # transport is a TranscoderServiceTransport instance. + if credentials or client_options.credentials_file or api_key_value: + raise ValueError("When providing a transport instance, " + "provide its credentials directly.") + if client_options.scopes: + raise ValueError( + "When providing a transport instance, provide its scopes " + "directly." + ) + self._transport = transport + else: + import google.auth._default # type: ignore + + if api_key_value and hasattr(google.auth._default, "get_api_key_credentials"): + credentials = google.auth._default.get_api_key_credentials(api_key_value) + + Transport = type(self).get_transport_class(transport) + self._transport = Transport( + credentials=credentials, + credentials_file=client_options.credentials_file, + host=api_endpoint, + scopes=client_options.scopes, + client_cert_source_for_mtls=client_cert_source_func, + quota_project_id=client_options.quota_project_id, + client_info=client_info, + always_use_jwt_access=True, + api_audience=client_options.api_audience, + ) + + def create_job(self, + request: Optional[Union[services.CreateJobRequest, dict]] = None, + *, + parent: Optional[str] = None, + job: Optional[resources.Job] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> resources.Job: + r"""Creates a job in the specified region. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud.video import transcoder_v1 + + def sample_create_job(): + # Create a client + client = transcoder_v1.TranscoderServiceClient() + + # Initialize request argument(s) + job = transcoder_v1.Job() + job.template_id = "template_id_value" + + request = transcoder_v1.CreateJobRequest( + parent="parent_value", + job=job, + ) + + # Make the request + response = client.create_job(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.video.transcoder_v1.types.CreateJobRequest, dict]): + The request object. Request message for + `TranscoderService.CreateJob`. + parent (str): + Required. The parent location to create and process this + job. Format: ``projects/{project}/locations/{location}`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + job (google.cloud.video.transcoder_v1.types.Job): + Required. Parameters for creating + transcoding job. + + This corresponds to the ``job`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.video.transcoder_v1.types.Job: + Transcoding job resource. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, job]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a services.CreateJobRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, services.CreateJobRequest): + request = services.CreateJobRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if job is not None: + request.job = job + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_job] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_jobs(self, + request: Optional[Union[services.ListJobsRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListJobsPager: + r"""Lists jobs in the specified region. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud.video import transcoder_v1 + + def sample_list_jobs(): + # Create a client + client = transcoder_v1.TranscoderServiceClient() + + # Initialize request argument(s) + request = transcoder_v1.ListJobsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_jobs(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.video.transcoder_v1.types.ListJobsRequest, dict]): + The request object. Request message for + `TranscoderService.ListJobs`. The parent location from + which to retrieve the collection of jobs. + parent (str): + Required. Format: + ``projects/{project}/locations/{location}`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.video.transcoder_v1.services.transcoder_service.pagers.ListJobsPager: + Response message for TranscoderService.ListJobs. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a services.ListJobsRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, services.ListJobsRequest): + request = services.ListJobsRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_jobs] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListJobsPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_job(self, + request: Optional[Union[services.GetJobRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> resources.Job: + r"""Returns the job data. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud.video import transcoder_v1 + + def sample_get_job(): + # Create a client + client = transcoder_v1.TranscoderServiceClient() + + # Initialize request argument(s) + request = transcoder_v1.GetJobRequest( + name="name_value", + ) + + # Make the request + response = client.get_job(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.video.transcoder_v1.types.GetJobRequest, dict]): + The request object. Request message for + `TranscoderService.GetJob`. + name (str): + Required. The name of the job to retrieve. Format: + ``projects/{project}/locations/{location}/jobs/{job}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.video.transcoder_v1.types.Job: + Transcoding job resource. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a services.GetJobRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, services.GetJobRequest): + request = services.GetJobRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_job] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_job(self, + request: Optional[Union[services.DeleteJobRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a job. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud.video import transcoder_v1 + + def sample_delete_job(): + # Create a client + client = transcoder_v1.TranscoderServiceClient() + + # Initialize request argument(s) + request = transcoder_v1.DeleteJobRequest( + name="name_value", + ) + + # Make the request + client.delete_job(request=request) + + Args: + request (Union[google.cloud.video.transcoder_v1.types.DeleteJobRequest, dict]): + The request object. Request message for + `TranscoderService.DeleteJob`. + name (str): + Required. The name of the job to delete. Format: + ``projects/{project}/locations/{location}/jobs/{job}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a services.DeleteJobRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, services.DeleteJobRequest): + request = services.DeleteJobRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_job] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def create_job_template(self, + request: Optional[Union[services.CreateJobTemplateRequest, dict]] = None, + *, + parent: Optional[str] = None, + job_template: Optional[resources.JobTemplate] = None, + job_template_id: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> resources.JobTemplate: + r"""Creates a job template in the specified region. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud.video import transcoder_v1 + + def sample_create_job_template(): + # Create a client + client = transcoder_v1.TranscoderServiceClient() + + # Initialize request argument(s) + request = transcoder_v1.CreateJobTemplateRequest( + parent="parent_value", + job_template_id="job_template_id_value", + ) + + # Make the request + response = client.create_job_template(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.video.transcoder_v1.types.CreateJobTemplateRequest, dict]): + The request object. Request message for + `TranscoderService.CreateJobTemplate`. + parent (str): + Required. The parent location to create this job + template. Format: + ``projects/{project}/locations/{location}`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + job_template (google.cloud.video.transcoder_v1.types.JobTemplate): + Required. Parameters for creating job + template. + + This corresponds to the ``job_template`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + job_template_id (str): + Required. The ID to use for the job template, which will + become the final component of the job template's + resource name. + + This value should be 4-63 characters, and valid + characters must match the regular expression + ``[a-zA-Z][a-zA-Z0-9_-]*``. + + This corresponds to the ``job_template_id`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.video.transcoder_v1.types.JobTemplate: + Transcoding job template resource. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent, job_template, job_template_id]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a services.CreateJobTemplateRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, services.CreateJobTemplateRequest): + request = services.CreateJobTemplateRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + if job_template is not None: + request.job_template = job_template + if job_template_id is not None: + request.job_template_id = job_template_id + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.create_job_template] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def list_job_templates(self, + request: Optional[Union[services.ListJobTemplatesRequest, dict]] = None, + *, + parent: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> pagers.ListJobTemplatesPager: + r"""Lists job templates in the specified region. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud.video import transcoder_v1 + + def sample_list_job_templates(): + # Create a client + client = transcoder_v1.TranscoderServiceClient() + + # Initialize request argument(s) + request = transcoder_v1.ListJobTemplatesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_job_templates(request=request) + + # Handle the response + for response in page_result: + print(response) + + Args: + request (Union[google.cloud.video.transcoder_v1.types.ListJobTemplatesRequest, dict]): + The request object. Request message for + `TranscoderService.ListJobTemplates`. + parent (str): + Required. The parent location from which to retrieve the + collection of job templates. Format: + ``projects/{project}/locations/{location}`` + + This corresponds to the ``parent`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.video.transcoder_v1.services.transcoder_service.pagers.ListJobTemplatesPager: + Response message for TranscoderService.ListJobTemplates. + + Iterating over this object will yield results and + resolve additional pages automatically. + + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([parent]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a services.ListJobTemplatesRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, services.ListJobTemplatesRequest): + request = services.ListJobTemplatesRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if parent is not None: + request.parent = parent + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.list_job_templates] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("parent", request.parent), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # This method is paged; wrap the response in a pager, which provides + # an `__iter__` convenience method. + response = pagers.ListJobTemplatesPager( + method=rpc, + request=request, + response=response, + metadata=metadata, + ) + + # Done; return the response. + return response + + def get_job_template(self, + request: Optional[Union[services.GetJobTemplateRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> resources.JobTemplate: + r"""Returns the job template data. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud.video import transcoder_v1 + + def sample_get_job_template(): + # Create a client + client = transcoder_v1.TranscoderServiceClient() + + # Initialize request argument(s) + request = transcoder_v1.GetJobTemplateRequest( + name="name_value", + ) + + # Make the request + response = client.get_job_template(request=request) + + # Handle the response + print(response) + + Args: + request (Union[google.cloud.video.transcoder_v1.types.GetJobTemplateRequest, dict]): + The request object. Request message for + `TranscoderService.GetJobTemplate`. + name (str): + Required. The name of the job template to retrieve. + Format: + ``projects/{project}/locations/{location}/jobTemplates/{job_template}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + + Returns: + google.cloud.video.transcoder_v1.types.JobTemplate: + Transcoding job template resource. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a services.GetJobTemplateRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, services.GetJobTemplateRequest): + request = services.GetJobTemplateRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.get_job_template] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + # Done; return the response. + return response + + def delete_job_template(self, + request: Optional[Union[services.DeleteJobTemplateRequest, dict]] = None, + *, + name: Optional[str] = None, + retry: OptionalRetry = gapic_v1.method.DEFAULT, + timeout: Union[float, object] = gapic_v1.method.DEFAULT, + metadata: Sequence[Tuple[str, str]] = (), + ) -> None: + r"""Deletes a job template. + + .. code-block:: python + + # This snippet has been automatically generated and should be regarded as a + # code template only. + # It will require modifications to work: + # - It may require correct/in-range values for request initialization. + # - It may require specifying regional endpoints when creating the service + # client as shown in: + # https://googleapis.dev/python/google-api-core/latest/client_options.html + from google.cloud.video import transcoder_v1 + + def sample_delete_job_template(): + # Create a client + client = transcoder_v1.TranscoderServiceClient() + + # Initialize request argument(s) + request = transcoder_v1.DeleteJobTemplateRequest( + name="name_value", + ) + + # Make the request + client.delete_job_template(request=request) + + Args: + request (Union[google.cloud.video.transcoder_v1.types.DeleteJobTemplateRequest, dict]): + The request object. Request message for + `TranscoderService.DeleteJobTemplate`. + name (str): + Required. The name of the job template to delete. + ``projects/{project}/locations/{location}/jobTemplates/{job_template}`` + + This corresponds to the ``name`` field + on the ``request`` instance; if ``request`` is provided, this + should not be set. + retry (google.api_core.retry.Retry): Designation of what errors, if any, + should be retried. + timeout (float): The timeout for this request. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + # Create or coerce a protobuf request object. + # Quick check: If we got a request object, we should *not* have + # gotten any keyword arguments that map to the request. + has_flattened_params = any([name]) + if request is not None and has_flattened_params: + raise ValueError('If the `request` argument is set, then none of ' + 'the individual field arguments should be set.') + + # Minor optimization to avoid making a copy if the user passes + # in a services.DeleteJobTemplateRequest. + # There's no risk of modifying the input as we've already verified + # there are no flattened fields. + if not isinstance(request, services.DeleteJobTemplateRequest): + request = services.DeleteJobTemplateRequest(request) + # If we have keyword arguments corresponding to fields on the + # request, apply these. + if name is not None: + request.name = name + + # Wrap the RPC method; this adds retry and timeout information, + # and friendly error handling. + rpc = self._transport._wrapped_methods[self._transport.delete_job_template] + + # Certain fields should be provided within the metadata header; + # add these here. + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ("name", request.name), + )), + ) + + # Send the request. + rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) + + def __enter__(self): + return self + + def __exit__(self, type, value, traceback): + """Releases underlying transport's resources. + + .. warning:: + ONLY use as a context manager if the transport is NOT shared + with other clients! Exiting the with block will CLOSE the transport + and may cause errors in other clients! + """ + self.transport.close() + + + + + + + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +__all__ = ( + "TranscoderServiceClient", +) diff --git a/owl-bot-staging/v1/google/cloud/video/transcoder_v1/services/transcoder_service/pagers.py b/owl-bot-staging/v1/google/cloud/video/transcoder_v1/services/transcoder_service/pagers.py new file mode 100644 index 0000000..180638c --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/video/transcoder_v1/services/transcoder_service/pagers.py @@ -0,0 +1,261 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import Any, AsyncIterator, Awaitable, Callable, Sequence, Tuple, Optional, Iterator + +from google.cloud.video.transcoder_v1.types import resources +from google.cloud.video.transcoder_v1.types import services + + +class ListJobsPager: + """A pager for iterating through ``list_jobs`` requests. + + This class thinly wraps an initial + :class:`google.cloud.video.transcoder_v1.types.ListJobsResponse` object, and + provides an ``__iter__`` method to iterate through its + ``jobs`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListJobs`` requests and continue to iterate + through the ``jobs`` field on the + corresponding responses. + + All the usual :class:`google.cloud.video.transcoder_v1.types.ListJobsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., services.ListJobsResponse], + request: services.ListJobsRequest, + response: services.ListJobsResponse, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.video.transcoder_v1.types.ListJobsRequest): + The initial request object. + response (google.cloud.video.transcoder_v1.types.ListJobsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = services.ListJobsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[services.ListJobsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[resources.Job]: + for page in self.pages: + yield from page.jobs + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListJobsAsyncPager: + """A pager for iterating through ``list_jobs`` requests. + + This class thinly wraps an initial + :class:`google.cloud.video.transcoder_v1.types.ListJobsResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``jobs`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListJobs`` requests and continue to iterate + through the ``jobs`` field on the + corresponding responses. + + All the usual :class:`google.cloud.video.transcoder_v1.types.ListJobsResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., Awaitable[services.ListJobsResponse]], + request: services.ListJobsRequest, + response: services.ListJobsResponse, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.video.transcoder_v1.types.ListJobsRequest): + The initial request object. + response (google.cloud.video.transcoder_v1.types.ListJobsResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = services.ListJobsRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[services.ListJobsResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + def __aiter__(self) -> AsyncIterator[resources.Job]: + async def async_generator(): + async for page in self.pages: + for response in page.jobs: + yield response + + return async_generator() + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListJobTemplatesPager: + """A pager for iterating through ``list_job_templates`` requests. + + This class thinly wraps an initial + :class:`google.cloud.video.transcoder_v1.types.ListJobTemplatesResponse` object, and + provides an ``__iter__`` method to iterate through its + ``job_templates`` field. + + If there are more pages, the ``__iter__`` method will make additional + ``ListJobTemplates`` requests and continue to iterate + through the ``job_templates`` field on the + corresponding responses. + + All the usual :class:`google.cloud.video.transcoder_v1.types.ListJobTemplatesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., services.ListJobTemplatesResponse], + request: services.ListJobTemplatesRequest, + response: services.ListJobTemplatesResponse, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiate the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.video.transcoder_v1.types.ListJobTemplatesRequest): + The initial request object. + response (google.cloud.video.transcoder_v1.types.ListJobTemplatesResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = services.ListJobTemplatesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + def pages(self) -> Iterator[services.ListJobTemplatesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = self._method(self._request, metadata=self._metadata) + yield self._response + + def __iter__(self) -> Iterator[resources.JobTemplate]: + for page in self.pages: + yield from page.job_templates + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) + + +class ListJobTemplatesAsyncPager: + """A pager for iterating through ``list_job_templates`` requests. + + This class thinly wraps an initial + :class:`google.cloud.video.transcoder_v1.types.ListJobTemplatesResponse` object, and + provides an ``__aiter__`` method to iterate through its + ``job_templates`` field. + + If there are more pages, the ``__aiter__`` method will make additional + ``ListJobTemplates`` requests and continue to iterate + through the ``job_templates`` field on the + corresponding responses. + + All the usual :class:`google.cloud.video.transcoder_v1.types.ListJobTemplatesResponse` + attributes are available on the pager. If multiple requests are made, only + the most recent response is retained, and thus used for attribute lookup. + """ + def __init__(self, + method: Callable[..., Awaitable[services.ListJobTemplatesResponse]], + request: services.ListJobTemplatesRequest, + response: services.ListJobTemplatesResponse, + *, + metadata: Sequence[Tuple[str, str]] = ()): + """Instantiates the pager. + + Args: + method (Callable): The method that was originally called, and + which instantiated this pager. + request (google.cloud.video.transcoder_v1.types.ListJobTemplatesRequest): + The initial request object. + response (google.cloud.video.transcoder_v1.types.ListJobTemplatesResponse): + The initial response object. + metadata (Sequence[Tuple[str, str]]): Strings which should be + sent along with the request as metadata. + """ + self._method = method + self._request = services.ListJobTemplatesRequest(request) + self._response = response + self._metadata = metadata + + def __getattr__(self, name: str) -> Any: + return getattr(self._response, name) + + @property + async def pages(self) -> AsyncIterator[services.ListJobTemplatesResponse]: + yield self._response + while self._response.next_page_token: + self._request.page_token = self._response.next_page_token + self._response = await self._method(self._request, metadata=self._metadata) + yield self._response + def __aiter__(self) -> AsyncIterator[resources.JobTemplate]: + async def async_generator(): + async for page in self.pages: + for response in page.job_templates: + yield response + + return async_generator() + + def __repr__(self) -> str: + return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) diff --git a/owl-bot-staging/v1/google/cloud/video/transcoder_v1/services/transcoder_service/transports/__init__.py b/owl-bot-staging/v1/google/cloud/video/transcoder_v1/services/transcoder_service/transports/__init__.py new file mode 100644 index 0000000..bdb6a47 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/video/transcoder_v1/services/transcoder_service/transports/__init__.py @@ -0,0 +1,33 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from collections import OrderedDict +from typing import Dict, Type + +from .base import TranscoderServiceTransport +from .grpc import TranscoderServiceGrpcTransport +from .grpc_asyncio import TranscoderServiceGrpcAsyncIOTransport + + +# Compile a registry of transports. +_transport_registry = OrderedDict() # type: Dict[str, Type[TranscoderServiceTransport]] +_transport_registry['grpc'] = TranscoderServiceGrpcTransport +_transport_registry['grpc_asyncio'] = TranscoderServiceGrpcAsyncIOTransport + +__all__ = ( + 'TranscoderServiceTransport', + 'TranscoderServiceGrpcTransport', + 'TranscoderServiceGrpcAsyncIOTransport', +) diff --git a/owl-bot-staging/v1/google/cloud/video/transcoder_v1/services/transcoder_service/transports/base.py b/owl-bot-staging/v1/google/cloud/video/transcoder_v1/services/transcoder_service/transports/base.py new file mode 100644 index 0000000..31f0fbb --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/video/transcoder_v1/services/transcoder_service/transports/base.py @@ -0,0 +1,248 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import abc +from typing import Awaitable, Callable, Dict, Optional, Sequence, Union + +from google.cloud.video.transcoder_v1 import gapic_version as package_version + +import google.auth # type: ignore +import google.api_core +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import retry as retries +from google.auth import credentials as ga_credentials # type: ignore +from google.oauth2 import service_account # type: ignore + +from google.cloud.video.transcoder_v1.types import resources +from google.cloud.video.transcoder_v1.types import services +from google.protobuf import empty_pb2 # type: ignore + +DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) + + +class TranscoderServiceTransport(abc.ABC): + """Abstract transport class for TranscoderService.""" + + AUTH_SCOPES = ( + 'https://www.googleapis.com/auth/cloud-platform', + ) + + DEFAULT_HOST: str = 'transcoder.googleapis.com' + def __init__( + self, *, + host: str = DEFAULT_HOST, + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + **kwargs, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A list of scopes. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + """ + + scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} + + # Save the scopes. + self._scopes = scopes + + # If no credentials are provided, then determine the appropriate + # defaults. + if credentials and credentials_file: + raise core_exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive") + + if credentials_file is not None: + credentials, _ = google.auth.load_credentials_from_file( + credentials_file, + **scopes_kwargs, + quota_project_id=quota_project_id + ) + elif credentials is None: + credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) + # Don't apply audience if the credentials file passed from user. + if hasattr(credentials, "with_gdch_audience"): + credentials = credentials.with_gdch_audience(api_audience if api_audience else host) + + # If the credentials are service account credentials, then always try to use self signed JWT. + if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): + credentials = credentials.with_always_use_jwt_access(True) + + # Save the credentials. + self._credentials = credentials + + # Save the hostname. Default to port 443 (HTTPS) if none is specified. + if ':' not in host: + host += ':443' + self._host = host + + def _prep_wrapped_messages(self, client_info): + # Precompute the wrapped methods. + self._wrapped_methods = { + self.create_job: gapic_v1.method.wrap_method( + self.create_job, + default_timeout=60.0, + client_info=client_info, + ), + self.list_jobs: gapic_v1.method.wrap_method( + self.list_jobs, + default_timeout=60.0, + client_info=client_info, + ), + self.get_job: gapic_v1.method.wrap_method( + self.get_job, + default_timeout=60.0, + client_info=client_info, + ), + self.delete_job: gapic_v1.method.wrap_method( + self.delete_job, + default_timeout=60.0, + client_info=client_info, + ), + self.create_job_template: gapic_v1.method.wrap_method( + self.create_job_template, + default_timeout=60.0, + client_info=client_info, + ), + self.list_job_templates: gapic_v1.method.wrap_method( + self.list_job_templates, + default_timeout=60.0, + client_info=client_info, + ), + self.get_job_template: gapic_v1.method.wrap_method( + self.get_job_template, + default_timeout=60.0, + client_info=client_info, + ), + self.delete_job_template: gapic_v1.method.wrap_method( + self.delete_job_template, + default_timeout=60.0, + client_info=client_info, + ), + } + + def close(self): + """Closes resources associated with the transport. + + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! + """ + raise NotImplementedError() + + @property + def create_job(self) -> Callable[ + [services.CreateJobRequest], + Union[ + resources.Job, + Awaitable[resources.Job] + ]]: + raise NotImplementedError() + + @property + def list_jobs(self) -> Callable[ + [services.ListJobsRequest], + Union[ + services.ListJobsResponse, + Awaitable[services.ListJobsResponse] + ]]: + raise NotImplementedError() + + @property + def get_job(self) -> Callable[ + [services.GetJobRequest], + Union[ + resources.Job, + Awaitable[resources.Job] + ]]: + raise NotImplementedError() + + @property + def delete_job(self) -> Callable[ + [services.DeleteJobRequest], + Union[ + empty_pb2.Empty, + Awaitable[empty_pb2.Empty] + ]]: + raise NotImplementedError() + + @property + def create_job_template(self) -> Callable[ + [services.CreateJobTemplateRequest], + Union[ + resources.JobTemplate, + Awaitable[resources.JobTemplate] + ]]: + raise NotImplementedError() + + @property + def list_job_templates(self) -> Callable[ + [services.ListJobTemplatesRequest], + Union[ + services.ListJobTemplatesResponse, + Awaitable[services.ListJobTemplatesResponse] + ]]: + raise NotImplementedError() + + @property + def get_job_template(self) -> Callable[ + [services.GetJobTemplateRequest], + Union[ + resources.JobTemplate, + Awaitable[resources.JobTemplate] + ]]: + raise NotImplementedError() + + @property + def delete_job_template(self) -> Callable[ + [services.DeleteJobTemplateRequest], + Union[ + empty_pb2.Empty, + Awaitable[empty_pb2.Empty] + ]]: + raise NotImplementedError() + + @property + def kind(self) -> str: + raise NotImplementedError() + + +__all__ = ( + 'TranscoderServiceTransport', +) diff --git a/owl-bot-staging/v1/google/cloud/video/transcoder_v1/services/transcoder_service/transports/grpc.py b/owl-bot-staging/v1/google/cloud/video/transcoder_v1/services/transcoder_service/transports/grpc.py new file mode 100644 index 0000000..28376c5 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/video/transcoder_v1/services/transcoder_service/transports/grpc.py @@ -0,0 +1,454 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import warnings +from typing import Callable, Dict, Optional, Sequence, Tuple, Union + +from google.api_core import grpc_helpers +from google.api_core import gapic_v1 +import google.auth # type: ignore +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore + +import grpc # type: ignore + +from google.cloud.video.transcoder_v1.types import resources +from google.cloud.video.transcoder_v1.types import services +from google.protobuf import empty_pb2 # type: ignore +from .base import TranscoderServiceTransport, DEFAULT_CLIENT_INFO + + +class TranscoderServiceGrpcTransport(TranscoderServiceTransport): + """gRPC backend transport for TranscoderService. + + Using the Transcoder API, you can queue asynchronous jobs for + transcoding media into various output formats. Output formats + may include different streaming standards such as HTTP Live + Streaming (HLS) and Dynamic Adaptive Streaming over HTTP (DASH). + You can also customize jobs using advanced features such as + Digital Rights Management (DRM), audio equalization, content + concatenation, and digital ad-stitch ready content generation. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + _stubs: Dict[str, Callable] + + def __init__(self, *, + host: str = 'transcoder.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[grpc.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional(Sequence[str])): A list of scopes. This argument is + ignored if ``channel`` is provided. + channel (Optional[grpc.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @classmethod + def create_channel(cls, + host: str = 'transcoder.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs) -> grpc.Channel: + """Create and return a gRPC channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is mutually exclusive with credentials. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + grpc.Channel: A gRPC channel object. + + Raises: + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + + return grpc_helpers.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs + ) + + @property + def grpc_channel(self) -> grpc.Channel: + """Return the channel designed to connect to this service. + """ + return self._grpc_channel + + @property + def create_job(self) -> Callable[ + [services.CreateJobRequest], + resources.Job]: + r"""Return a callable for the create job method over gRPC. + + Creates a job in the specified region. + + Returns: + Callable[[~.CreateJobRequest], + ~.Job]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_job' not in self._stubs: + self._stubs['create_job'] = self.grpc_channel.unary_unary( + '/google.cloud.video.transcoder.v1.TranscoderService/CreateJob', + request_serializer=services.CreateJobRequest.serialize, + response_deserializer=resources.Job.deserialize, + ) + return self._stubs['create_job'] + + @property + def list_jobs(self) -> Callable[ + [services.ListJobsRequest], + services.ListJobsResponse]: + r"""Return a callable for the list jobs method over gRPC. + + Lists jobs in the specified region. + + Returns: + Callable[[~.ListJobsRequest], + ~.ListJobsResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_jobs' not in self._stubs: + self._stubs['list_jobs'] = self.grpc_channel.unary_unary( + '/google.cloud.video.transcoder.v1.TranscoderService/ListJobs', + request_serializer=services.ListJobsRequest.serialize, + response_deserializer=services.ListJobsResponse.deserialize, + ) + return self._stubs['list_jobs'] + + @property + def get_job(self) -> Callable[ + [services.GetJobRequest], + resources.Job]: + r"""Return a callable for the get job method over gRPC. + + Returns the job data. + + Returns: + Callable[[~.GetJobRequest], + ~.Job]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_job' not in self._stubs: + self._stubs['get_job'] = self.grpc_channel.unary_unary( + '/google.cloud.video.transcoder.v1.TranscoderService/GetJob', + request_serializer=services.GetJobRequest.serialize, + response_deserializer=resources.Job.deserialize, + ) + return self._stubs['get_job'] + + @property + def delete_job(self) -> Callable[ + [services.DeleteJobRequest], + empty_pb2.Empty]: + r"""Return a callable for the delete job method over gRPC. + + Deletes a job. + + Returns: + Callable[[~.DeleteJobRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_job' not in self._stubs: + self._stubs['delete_job'] = self.grpc_channel.unary_unary( + '/google.cloud.video.transcoder.v1.TranscoderService/DeleteJob', + request_serializer=services.DeleteJobRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs['delete_job'] + + @property + def create_job_template(self) -> Callable[ + [services.CreateJobTemplateRequest], + resources.JobTemplate]: + r"""Return a callable for the create job template method over gRPC. + + Creates a job template in the specified region. + + Returns: + Callable[[~.CreateJobTemplateRequest], + ~.JobTemplate]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_job_template' not in self._stubs: + self._stubs['create_job_template'] = self.grpc_channel.unary_unary( + '/google.cloud.video.transcoder.v1.TranscoderService/CreateJobTemplate', + request_serializer=services.CreateJobTemplateRequest.serialize, + response_deserializer=resources.JobTemplate.deserialize, + ) + return self._stubs['create_job_template'] + + @property + def list_job_templates(self) -> Callable[ + [services.ListJobTemplatesRequest], + services.ListJobTemplatesResponse]: + r"""Return a callable for the list job templates method over gRPC. + + Lists job templates in the specified region. + + Returns: + Callable[[~.ListJobTemplatesRequest], + ~.ListJobTemplatesResponse]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_job_templates' not in self._stubs: + self._stubs['list_job_templates'] = self.grpc_channel.unary_unary( + '/google.cloud.video.transcoder.v1.TranscoderService/ListJobTemplates', + request_serializer=services.ListJobTemplatesRequest.serialize, + response_deserializer=services.ListJobTemplatesResponse.deserialize, + ) + return self._stubs['list_job_templates'] + + @property + def get_job_template(self) -> Callable[ + [services.GetJobTemplateRequest], + resources.JobTemplate]: + r"""Return a callable for the get job template method over gRPC. + + Returns the job template data. + + Returns: + Callable[[~.GetJobTemplateRequest], + ~.JobTemplate]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_job_template' not in self._stubs: + self._stubs['get_job_template'] = self.grpc_channel.unary_unary( + '/google.cloud.video.transcoder.v1.TranscoderService/GetJobTemplate', + request_serializer=services.GetJobTemplateRequest.serialize, + response_deserializer=resources.JobTemplate.deserialize, + ) + return self._stubs['get_job_template'] + + @property + def delete_job_template(self) -> Callable[ + [services.DeleteJobTemplateRequest], + empty_pb2.Empty]: + r"""Return a callable for the delete job template method over gRPC. + + Deletes a job template. + + Returns: + Callable[[~.DeleteJobTemplateRequest], + ~.Empty]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_job_template' not in self._stubs: + self._stubs['delete_job_template'] = self.grpc_channel.unary_unary( + '/google.cloud.video.transcoder.v1.TranscoderService/DeleteJobTemplate', + request_serializer=services.DeleteJobTemplateRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs['delete_job_template'] + + def close(self): + self.grpc_channel.close() + + @property + def kind(self) -> str: + return "grpc" + + +__all__ = ( + 'TranscoderServiceGrpcTransport', +) diff --git a/owl-bot-staging/v1/google/cloud/video/transcoder_v1/services/transcoder_service/transports/grpc_asyncio.py b/owl-bot-staging/v1/google/cloud/video/transcoder_v1/services/transcoder_service/transports/grpc_asyncio.py new file mode 100644 index 0000000..4cc0b8e --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/video/transcoder_v1/services/transcoder_service/transports/grpc_asyncio.py @@ -0,0 +1,453 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import warnings +from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union + +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers_async +from google.auth import credentials as ga_credentials # type: ignore +from google.auth.transport.grpc import SslCredentials # type: ignore + +import grpc # type: ignore +from grpc.experimental import aio # type: ignore + +from google.cloud.video.transcoder_v1.types import resources +from google.cloud.video.transcoder_v1.types import services +from google.protobuf import empty_pb2 # type: ignore +from .base import TranscoderServiceTransport, DEFAULT_CLIENT_INFO +from .grpc import TranscoderServiceGrpcTransport + + +class TranscoderServiceGrpcAsyncIOTransport(TranscoderServiceTransport): + """gRPC AsyncIO backend transport for TranscoderService. + + Using the Transcoder API, you can queue asynchronous jobs for + transcoding media into various output formats. Output formats + may include different streaming standards such as HTTP Live + Streaming (HLS) and Dynamic Adaptive Streaming over HTTP (DASH). + You can also customize jobs using advanced features such as + Digital Rights Management (DRM), audio equalization, content + concatenation, and digital ad-stitch ready content generation. + + This class defines the same methods as the primary client, so the + primary client can load the underlying transport implementation + and call it. + + It sends protocol buffers over the wire using gRPC (which is built on + top of HTTP/2); the ``grpcio`` package must be installed. + """ + + _grpc_channel: aio.Channel + _stubs: Dict[str, Callable] = {} + + @classmethod + def create_channel(cls, + host: str = 'transcoder.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + quota_project_id: Optional[str] = None, + **kwargs) -> aio.Channel: + """Create and return a gRPC AsyncIO channel object. + Args: + host (Optional[str]): The host for the channel to use. + credentials (Optional[~.Credentials]): The + authorization credentials to attach to requests. These + credentials identify this application to the service. If + none are specified, the client will attempt to ascertain + the credentials from the environment. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + kwargs (Optional[dict]): Keyword arguments, which are passed to the + channel creation. + Returns: + aio.Channel: A gRPC AsyncIO channel object. + """ + + return grpc_helpers_async.create_channel( + host, + credentials=credentials, + credentials_file=credentials_file, + quota_project_id=quota_project_id, + default_scopes=cls.AUTH_SCOPES, + scopes=scopes, + default_host=cls.DEFAULT_HOST, + **kwargs + ) + + def __init__(self, *, + host: str = 'transcoder.googleapis.com', + credentials: Optional[ga_credentials.Credentials] = None, + credentials_file: Optional[str] = None, + scopes: Optional[Sequence[str]] = None, + channel: Optional[aio.Channel] = None, + api_mtls_endpoint: Optional[str] = None, + client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, + client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, + quota_project_id: Optional[str] = None, + client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, + always_use_jwt_access: Optional[bool] = False, + api_audience: Optional[str] = None, + ) -> None: + """Instantiate the transport. + + Args: + host (Optional[str]): + The hostname to connect to. + credentials (Optional[google.auth.credentials.Credentials]): The + authorization credentials to attach to requests. These + credentials identify the application to the service; if none + are specified, the client will attempt to ascertain the + credentials from the environment. + This argument is ignored if ``channel`` is provided. + credentials_file (Optional[str]): A file with credentials that can + be loaded with :func:`google.auth.load_credentials_from_file`. + This argument is ignored if ``channel`` is provided. + scopes (Optional[Sequence[str]]): A optional list of scopes needed for this + service. These are only used when credentials are not specified and + are passed to :func:`google.auth.default`. + channel (Optional[aio.Channel]): A ``Channel`` instance through + which to make calls. + api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. + If provided, it overrides the ``host`` argument and tries to create + a mutual TLS channel with client SSL credentials from + ``client_cert_source`` or application default SSL credentials. + client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): + Deprecated. A callback to provide client SSL certificate bytes and + private key bytes, both in PEM format. It is ignored if + ``api_mtls_endpoint`` is None. + ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials + for the grpc channel. It is ignored if ``channel`` is provided. + client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): + A callback to provide client certificate bytes and private key bytes, + both in PEM format. It is used to configure a mutual TLS channel. It is + ignored if ``channel`` or ``ssl_channel_credentials`` is provided. + quota_project_id (Optional[str]): An optional project to use for billing + and quota. + client_info (google.api_core.gapic_v1.client_info.ClientInfo): + The client info used to send a user-agent string along with + API requests. If ``None``, then default info will be used. + Generally, you only need to set this if you're developing + your own client library. + always_use_jwt_access (Optional[bool]): Whether self signed JWT should + be used for service account credentials. + + Raises: + google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport + creation failed for any reason. + google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` + and ``credentials_file`` are passed. + """ + self._grpc_channel = None + self._ssl_channel_credentials = ssl_channel_credentials + self._stubs: Dict[str, Callable] = {} + + if api_mtls_endpoint: + warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) + if client_cert_source: + warnings.warn("client_cert_source is deprecated", DeprecationWarning) + + if channel: + # Ignore credentials if a channel was passed. + credentials = False + # If a channel was explicitly provided, set it. + self._grpc_channel = channel + self._ssl_channel_credentials = None + else: + if api_mtls_endpoint: + host = api_mtls_endpoint + + # Create SSL credentials with client_cert_source or application + # default SSL credentials. + if client_cert_source: + cert, key = client_cert_source() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + else: + self._ssl_channel_credentials = SslCredentials().ssl_credentials + + else: + if client_cert_source_for_mtls and not ssl_channel_credentials: + cert, key = client_cert_source_for_mtls() + self._ssl_channel_credentials = grpc.ssl_channel_credentials( + certificate_chain=cert, private_key=key + ) + + # The base transport sets the host, credentials and scopes + super().__init__( + host=host, + credentials=credentials, + credentials_file=credentials_file, + scopes=scopes, + quota_project_id=quota_project_id, + client_info=client_info, + always_use_jwt_access=always_use_jwt_access, + api_audience=api_audience, + ) + + if not self._grpc_channel: + self._grpc_channel = type(self).create_channel( + self._host, + # use the credentials which are saved + credentials=self._credentials, + # Set ``credentials_file`` to ``None`` here as + # the credentials that we saved earlier should be used. + credentials_file=None, + scopes=self._scopes, + ssl_credentials=self._ssl_channel_credentials, + quota_project_id=quota_project_id, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Wrap messages. This must be done after self._grpc_channel exists + self._prep_wrapped_messages(client_info) + + @property + def grpc_channel(self) -> aio.Channel: + """Create the channel designed to connect to this service. + + This property caches on the instance; repeated calls return + the same channel. + """ + # Return the channel from cache. + return self._grpc_channel + + @property + def create_job(self) -> Callable[ + [services.CreateJobRequest], + Awaitable[resources.Job]]: + r"""Return a callable for the create job method over gRPC. + + Creates a job in the specified region. + + Returns: + Callable[[~.CreateJobRequest], + Awaitable[~.Job]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_job' not in self._stubs: + self._stubs['create_job'] = self.grpc_channel.unary_unary( + '/google.cloud.video.transcoder.v1.TranscoderService/CreateJob', + request_serializer=services.CreateJobRequest.serialize, + response_deserializer=resources.Job.deserialize, + ) + return self._stubs['create_job'] + + @property + def list_jobs(self) -> Callable[ + [services.ListJobsRequest], + Awaitable[services.ListJobsResponse]]: + r"""Return a callable for the list jobs method over gRPC. + + Lists jobs in the specified region. + + Returns: + Callable[[~.ListJobsRequest], + Awaitable[~.ListJobsResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_jobs' not in self._stubs: + self._stubs['list_jobs'] = self.grpc_channel.unary_unary( + '/google.cloud.video.transcoder.v1.TranscoderService/ListJobs', + request_serializer=services.ListJobsRequest.serialize, + response_deserializer=services.ListJobsResponse.deserialize, + ) + return self._stubs['list_jobs'] + + @property + def get_job(self) -> Callable[ + [services.GetJobRequest], + Awaitable[resources.Job]]: + r"""Return a callable for the get job method over gRPC. + + Returns the job data. + + Returns: + Callable[[~.GetJobRequest], + Awaitable[~.Job]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_job' not in self._stubs: + self._stubs['get_job'] = self.grpc_channel.unary_unary( + '/google.cloud.video.transcoder.v1.TranscoderService/GetJob', + request_serializer=services.GetJobRequest.serialize, + response_deserializer=resources.Job.deserialize, + ) + return self._stubs['get_job'] + + @property + def delete_job(self) -> Callable[ + [services.DeleteJobRequest], + Awaitable[empty_pb2.Empty]]: + r"""Return a callable for the delete job method over gRPC. + + Deletes a job. + + Returns: + Callable[[~.DeleteJobRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_job' not in self._stubs: + self._stubs['delete_job'] = self.grpc_channel.unary_unary( + '/google.cloud.video.transcoder.v1.TranscoderService/DeleteJob', + request_serializer=services.DeleteJobRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs['delete_job'] + + @property + def create_job_template(self) -> Callable[ + [services.CreateJobTemplateRequest], + Awaitable[resources.JobTemplate]]: + r"""Return a callable for the create job template method over gRPC. + + Creates a job template in the specified region. + + Returns: + Callable[[~.CreateJobTemplateRequest], + Awaitable[~.JobTemplate]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'create_job_template' not in self._stubs: + self._stubs['create_job_template'] = self.grpc_channel.unary_unary( + '/google.cloud.video.transcoder.v1.TranscoderService/CreateJobTemplate', + request_serializer=services.CreateJobTemplateRequest.serialize, + response_deserializer=resources.JobTemplate.deserialize, + ) + return self._stubs['create_job_template'] + + @property + def list_job_templates(self) -> Callable[ + [services.ListJobTemplatesRequest], + Awaitable[services.ListJobTemplatesResponse]]: + r"""Return a callable for the list job templates method over gRPC. + + Lists job templates in the specified region. + + Returns: + Callable[[~.ListJobTemplatesRequest], + Awaitable[~.ListJobTemplatesResponse]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'list_job_templates' not in self._stubs: + self._stubs['list_job_templates'] = self.grpc_channel.unary_unary( + '/google.cloud.video.transcoder.v1.TranscoderService/ListJobTemplates', + request_serializer=services.ListJobTemplatesRequest.serialize, + response_deserializer=services.ListJobTemplatesResponse.deserialize, + ) + return self._stubs['list_job_templates'] + + @property + def get_job_template(self) -> Callable[ + [services.GetJobTemplateRequest], + Awaitable[resources.JobTemplate]]: + r"""Return a callable for the get job template method over gRPC. + + Returns the job template data. + + Returns: + Callable[[~.GetJobTemplateRequest], + Awaitable[~.JobTemplate]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'get_job_template' not in self._stubs: + self._stubs['get_job_template'] = self.grpc_channel.unary_unary( + '/google.cloud.video.transcoder.v1.TranscoderService/GetJobTemplate', + request_serializer=services.GetJobTemplateRequest.serialize, + response_deserializer=resources.JobTemplate.deserialize, + ) + return self._stubs['get_job_template'] + + @property + def delete_job_template(self) -> Callable[ + [services.DeleteJobTemplateRequest], + Awaitable[empty_pb2.Empty]]: + r"""Return a callable for the delete job template method over gRPC. + + Deletes a job template. + + Returns: + Callable[[~.DeleteJobTemplateRequest], + Awaitable[~.Empty]]: + A function that, when called, will call the underlying RPC + on the server. + """ + # Generate a "stub function" on-the-fly which will actually make + # the request. + # gRPC handles serialization and deserialization, so we just need + # to pass in the functions for each. + if 'delete_job_template' not in self._stubs: + self._stubs['delete_job_template'] = self.grpc_channel.unary_unary( + '/google.cloud.video.transcoder.v1.TranscoderService/DeleteJobTemplate', + request_serializer=services.DeleteJobTemplateRequest.serialize, + response_deserializer=empty_pb2.Empty.FromString, + ) + return self._stubs['delete_job_template'] + + def close(self): + return self.grpc_channel.close() + + +__all__ = ( + 'TranscoderServiceGrpcAsyncIOTransport', +) diff --git a/owl-bot-staging/v1/google/cloud/video/transcoder_v1/types/__init__.py b/owl-bot-staging/v1/google/cloud/video/transcoder_v1/types/__init__.py new file mode 100644 index 0000000..ee09558 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/video/transcoder_v1/types/__init__.py @@ -0,0 +1,78 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from .resources import ( + AdBreak, + AudioStream, + EditAtom, + ElementaryStream, + Input, + Job, + JobConfig, + JobTemplate, + Manifest, + MuxStream, + Output, + Overlay, + PreprocessingConfig, + PubsubDestination, + SegmentSettings, + SpriteSheet, + TextStream, + VideoStream, +) +from .services import ( + CreateJobRequest, + CreateJobTemplateRequest, + DeleteJobRequest, + DeleteJobTemplateRequest, + GetJobRequest, + GetJobTemplateRequest, + ListJobsRequest, + ListJobsResponse, + ListJobTemplatesRequest, + ListJobTemplatesResponse, +) + +__all__ = ( + 'AdBreak', + 'AudioStream', + 'EditAtom', + 'ElementaryStream', + 'Input', + 'Job', + 'JobConfig', + 'JobTemplate', + 'Manifest', + 'MuxStream', + 'Output', + 'Overlay', + 'PreprocessingConfig', + 'PubsubDestination', + 'SegmentSettings', + 'SpriteSheet', + 'TextStream', + 'VideoStream', + 'CreateJobRequest', + 'CreateJobTemplateRequest', + 'DeleteJobRequest', + 'DeleteJobTemplateRequest', + 'GetJobRequest', + 'GetJobTemplateRequest', + 'ListJobsRequest', + 'ListJobsResponse', + 'ListJobTemplatesRequest', + 'ListJobTemplatesResponse', +) diff --git a/owl-bot-staging/v1/google/cloud/video/transcoder_v1/types/resources.py b/owl-bot-staging/v1/google/cloud/video/transcoder_v1/types/resources.py new file mode 100644 index 0000000..3dab7f2 --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/video/transcoder_v1/types/resources.py @@ -0,0 +1,2172 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +from google.protobuf import duration_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +from google.rpc import status_pb2 # type: ignore + + +__protobuf__ = proto.module( + package='google.cloud.video.transcoder.v1', + manifest={ + 'Job', + 'JobTemplate', + 'JobConfig', + 'Input', + 'Output', + 'EditAtom', + 'AdBreak', + 'ElementaryStream', + 'MuxStream', + 'Manifest', + 'PubsubDestination', + 'SpriteSheet', + 'Overlay', + 'PreprocessingConfig', + 'VideoStream', + 'AudioStream', + 'TextStream', + 'SegmentSettings', + }, +) + + +class Job(proto.Message): + r"""Transcoding job resource. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + name (str): + The resource name of the job. Format: + ``projects/{project_number}/locations/{location}/jobs/{job}`` + input_uri (str): + Input only. Specify the ``input_uri`` to populate empty + ``uri`` fields in each element of ``Job.config.inputs`` or + ``JobTemplate.config.inputs`` when using template. URI of + the media. Input files must be at least 5 seconds in + duration and stored in Cloud Storage (for example, + ``gs://bucket/inputs/file.mp4``). See `Supported input and + output + formats `__. + output_uri (str): + Input only. Specify the ``output_uri`` to populate an empty + ``Job.config.output.uri`` or + ``JobTemplate.config.output.uri`` when using template. URI + for the output file(s). For example, + ``gs://my-bucket/outputs/``. See `Supported input and output + formats `__. + template_id (str): + Input only. Specify the ``template_id`` to use for + populating ``Job.config``. The default is ``preset/web-hd``. + + Preset Transcoder templates: + + - ``preset/{preset_id}`` + + - User defined JobTemplate: ``{job_template_id}`` + + This field is a member of `oneof`_ ``job_config``. + config (google.cloud.video.transcoder_v1.types.JobConfig): + The configuration for this job. + + This field is a member of `oneof`_ ``job_config``. + state (google.cloud.video.transcoder_v1.types.Job.ProcessingState): + Output only. The current state of the job. + create_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time the job was created. + start_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time the transcoding + started. + end_time (google.protobuf.timestamp_pb2.Timestamp): + Output only. The time the transcoding + finished. + ttl_after_completion_days (int): + Job time to live value in days, which will be + effective after job completion. Job should be + deleted automatically after the given TTL. Enter + a value between 1 and 90. The default is 30. + labels (MutableMapping[str, str]): + The labels associated with this job. You can + use these to organize and group your jobs. + error (google.rpc.status_pb2.Status): + Output only. An error object that describes the reason for + the failure. This property is always present when ``state`` + is ``FAILED``. + """ + class ProcessingState(proto.Enum): + r"""The current state of the job.""" + PROCESSING_STATE_UNSPECIFIED = 0 + PENDING = 1 + RUNNING = 2 + SUCCEEDED = 3 + FAILED = 4 + + name: str = proto.Field( + proto.STRING, + number=1, + ) + input_uri: str = proto.Field( + proto.STRING, + number=2, + ) + output_uri: str = proto.Field( + proto.STRING, + number=3, + ) + template_id: str = proto.Field( + proto.STRING, + number=4, + oneof='job_config', + ) + config: 'JobConfig' = proto.Field( + proto.MESSAGE, + number=5, + oneof='job_config', + message='JobConfig', + ) + state: ProcessingState = proto.Field( + proto.ENUM, + number=8, + enum=ProcessingState, + ) + create_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=12, + message=timestamp_pb2.Timestamp, + ) + start_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=13, + message=timestamp_pb2.Timestamp, + ) + end_time: timestamp_pb2.Timestamp = proto.Field( + proto.MESSAGE, + number=14, + message=timestamp_pb2.Timestamp, + ) + ttl_after_completion_days: int = proto.Field( + proto.INT32, + number=15, + ) + labels: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=16, + ) + error: status_pb2.Status = proto.Field( + proto.MESSAGE, + number=17, + message=status_pb2.Status, + ) + + +class JobTemplate(proto.Message): + r"""Transcoding job template resource. + + Attributes: + name (str): + The resource name of the job template. Format: + ``projects/{project_number}/locations/{location}/jobTemplates/{job_template}`` + config (google.cloud.video.transcoder_v1.types.JobConfig): + The configuration for this template. + labels (MutableMapping[str, str]): + The labels associated with this job template. + You can use these to organize and group your job + templates. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + config: 'JobConfig' = proto.Field( + proto.MESSAGE, + number=2, + message='JobConfig', + ) + labels: MutableMapping[str, str] = proto.MapField( + proto.STRING, + proto.STRING, + number=3, + ) + + +class JobConfig(proto.Message): + r"""Job configuration + + Attributes: + inputs (MutableSequence[google.cloud.video.transcoder_v1.types.Input]): + List of input assets stored in Cloud Storage. + edit_list (MutableSequence[google.cloud.video.transcoder_v1.types.EditAtom]): + List of ``Edit atom``\ s. Defines the ultimate timeline of + the resulting file or manifest. + elementary_streams (MutableSequence[google.cloud.video.transcoder_v1.types.ElementaryStream]): + List of elementary streams. + mux_streams (MutableSequence[google.cloud.video.transcoder_v1.types.MuxStream]): + List of multiplexing settings for output + streams. + manifests (MutableSequence[google.cloud.video.transcoder_v1.types.Manifest]): + List of output manifests. + output (google.cloud.video.transcoder_v1.types.Output): + Output configuration. + ad_breaks (MutableSequence[google.cloud.video.transcoder_v1.types.AdBreak]): + List of ad breaks. Specifies where to insert + ad break tags in the output manifests. + pubsub_destination (google.cloud.video.transcoder_v1.types.PubsubDestination): + Destination on Pub/Sub. + sprite_sheets (MutableSequence[google.cloud.video.transcoder_v1.types.SpriteSheet]): + List of output sprite sheets. + Spritesheets require at least one VideoStream in + the Jobconfig. + overlays (MutableSequence[google.cloud.video.transcoder_v1.types.Overlay]): + List of overlays on the output video, in + descending Z-order. + """ + + inputs: MutableSequence['Input'] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message='Input', + ) + edit_list: MutableSequence['EditAtom'] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message='EditAtom', + ) + elementary_streams: MutableSequence['ElementaryStream'] = proto.RepeatedField( + proto.MESSAGE, + number=3, + message='ElementaryStream', + ) + mux_streams: MutableSequence['MuxStream'] = proto.RepeatedField( + proto.MESSAGE, + number=4, + message='MuxStream', + ) + manifests: MutableSequence['Manifest'] = proto.RepeatedField( + proto.MESSAGE, + number=5, + message='Manifest', + ) + output: 'Output' = proto.Field( + proto.MESSAGE, + number=6, + message='Output', + ) + ad_breaks: MutableSequence['AdBreak'] = proto.RepeatedField( + proto.MESSAGE, + number=7, + message='AdBreak', + ) + pubsub_destination: 'PubsubDestination' = proto.Field( + proto.MESSAGE, + number=8, + message='PubsubDestination', + ) + sprite_sheets: MutableSequence['SpriteSheet'] = proto.RepeatedField( + proto.MESSAGE, + number=9, + message='SpriteSheet', + ) + overlays: MutableSequence['Overlay'] = proto.RepeatedField( + proto.MESSAGE, + number=10, + message='Overlay', + ) + + +class Input(proto.Message): + r"""Input asset. + + Attributes: + key (str): + A unique key for this input. Must be + specified when using advanced mapping and edit + lists. + uri (str): + URI of the media. Input files must be at least 5 seconds in + duration and stored in Cloud Storage (for example, + ``gs://bucket/inputs/file.mp4``). If empty, the value is + populated from ``Job.input_uri``. See `Supported input and + output + formats `__. + preprocessing_config (google.cloud.video.transcoder_v1.types.PreprocessingConfig): + Preprocessing configurations. + """ + + key: str = proto.Field( + proto.STRING, + number=1, + ) + uri: str = proto.Field( + proto.STRING, + number=2, + ) + preprocessing_config: 'PreprocessingConfig' = proto.Field( + proto.MESSAGE, + number=3, + message='PreprocessingConfig', + ) + + +class Output(proto.Message): + r"""Location of output file(s) in a Cloud Storage bucket. + + Attributes: + uri (str): + URI for the output file(s). For example, + ``gs://my-bucket/outputs/``. If empty, the value is + populated from ``Job.output_uri``. See `Supported input and + output + formats `__. + """ + + uri: str = proto.Field( + proto.STRING, + number=1, + ) + + +class EditAtom(proto.Message): + r"""Edit atom. + + Attributes: + key (str): + A unique key for this atom. Must be specified + when using advanced mapping. + inputs (MutableSequence[str]): + List of ``Input.key``\ s identifying files that should be + used in this atom. The listed ``inputs`` must have the same + timeline. + end_time_offset (google.protobuf.duration_pb2.Duration): + End time in seconds for the atom, relative to the input file + timeline. When ``end_time_offset`` is not specified, the + ``inputs`` are used until the end of the atom. + start_time_offset (google.protobuf.duration_pb2.Duration): + Start time in seconds for the atom, relative to the input + file timeline. The default is ``0s``. + """ + + key: str = proto.Field( + proto.STRING, + number=1, + ) + inputs: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=2, + ) + end_time_offset: duration_pb2.Duration = proto.Field( + proto.MESSAGE, + number=3, + message=duration_pb2.Duration, + ) + start_time_offset: duration_pb2.Duration = proto.Field( + proto.MESSAGE, + number=4, + message=duration_pb2.Duration, + ) + + +class AdBreak(proto.Message): + r"""Ad break. + + Attributes: + start_time_offset (google.protobuf.duration_pb2.Duration): + Start time in seconds for the ad break, relative to the + output file timeline. The default is ``0s``. + """ + + start_time_offset: duration_pb2.Duration = proto.Field( + proto.MESSAGE, + number=1, + message=duration_pb2.Duration, + ) + + +class ElementaryStream(proto.Message): + r"""Encoding of an input file such as an audio, video, or text + track. Elementary streams must be packaged before + mapping and sharing between different output formats. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + key (str): + A unique key for this elementary stream. + video_stream (google.cloud.video.transcoder_v1.types.VideoStream): + Encoding of a video stream. + + This field is a member of `oneof`_ ``elementary_stream``. + audio_stream (google.cloud.video.transcoder_v1.types.AudioStream): + Encoding of an audio stream. + + This field is a member of `oneof`_ ``elementary_stream``. + text_stream (google.cloud.video.transcoder_v1.types.TextStream): + Encoding of a text stream. For example, + closed captions or subtitles. + + This field is a member of `oneof`_ ``elementary_stream``. + """ + + key: str = proto.Field( + proto.STRING, + number=4, + ) + video_stream: 'VideoStream' = proto.Field( + proto.MESSAGE, + number=1, + oneof='elementary_stream', + message='VideoStream', + ) + audio_stream: 'AudioStream' = proto.Field( + proto.MESSAGE, + number=2, + oneof='elementary_stream', + message='AudioStream', + ) + text_stream: 'TextStream' = proto.Field( + proto.MESSAGE, + number=3, + oneof='elementary_stream', + message='TextStream', + ) + + +class MuxStream(proto.Message): + r"""Multiplexing settings for output stream. + + Attributes: + key (str): + A unique key for this multiplexed stream. HLS media + manifests will be named ``MuxStream.key`` with the ``.m3u8`` + extension suffix. + file_name (str): + The name of the generated file. The default is + ``MuxStream.key`` with the extension suffix corresponding to + the ``MuxStream.container``. + + Individual segments also have an incremental 10-digit + zero-padded suffix starting from 0 before the extension, + such as ``mux_stream0000000123.ts``. + container (str): + The container format. The default is ``mp4`` + + Supported container formats: + + - ``ts`` + - ``fmp4``- the corresponding file extension is ``.m4s`` + - ``mp4`` + - ``vtt`` + + See also: `Supported input and output + formats `__ + elementary_streams (MutableSequence[str]): + List of ``ElementaryStream.key``\ s multiplexed in this + stream. + segment_settings (google.cloud.video.transcoder_v1.types.SegmentSettings): + Segment settings for ``ts``, ``fmp4`` and ``vtt``. + """ + + key: str = proto.Field( + proto.STRING, + number=1, + ) + file_name: str = proto.Field( + proto.STRING, + number=2, + ) + container: str = proto.Field( + proto.STRING, + number=3, + ) + elementary_streams: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=4, + ) + segment_settings: 'SegmentSettings' = proto.Field( + proto.MESSAGE, + number=5, + message='SegmentSettings', + ) + + +class Manifest(proto.Message): + r"""Manifest configuration. + + Attributes: + file_name (str): + The name of the generated file. The default is ``manifest`` + with the extension suffix corresponding to the + ``Manifest.type``. + type_ (google.cloud.video.transcoder_v1.types.Manifest.ManifestType): + Required. Type of the manifest, can be ``HLS`` or ``DASH``. + mux_streams (MutableSequence[str]): + Required. List of user given ``MuxStream.key``\ s that + should appear in this manifest. + + When ``Manifest.type`` is ``HLS``, a media manifest with + name ``MuxStream.key`` and ``.m3u8`` extension is generated + for each element of the ``Manifest.mux_streams``. + """ + class ManifestType(proto.Enum): + r"""The manifest type can be either ``HLS`` or ``DASH``.""" + MANIFEST_TYPE_UNSPECIFIED = 0 + HLS = 1 + DASH = 2 + + file_name: str = proto.Field( + proto.STRING, + number=1, + ) + type_: ManifestType = proto.Field( + proto.ENUM, + number=2, + enum=ManifestType, + ) + mux_streams: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + + +class PubsubDestination(proto.Message): + r"""A Pub/Sub destination. + + Attributes: + topic (str): + The name of the Pub/Sub topic to publish job completion + notification to. For example: + ``projects/{project}/topics/{topic}``. + """ + + topic: str = proto.Field( + proto.STRING, + number=1, + ) + + +class SpriteSheet(proto.Message): + r"""Sprite sheet configuration. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + format_ (str): + Format type. The default is ``jpeg``. + + Supported formats: + + - ``jpeg`` + file_prefix (str): + Required. File name prefix for the generated sprite sheets. + + Each sprite sheet has an incremental 10-digit zero-padded + suffix starting from 0 before the extension, such as + ``sprite_sheet0000000123.jpeg``. + sprite_width_pixels (int): + Required. The width of sprite in pixels. Must be an even + integer. To preserve the source aspect ratio, set the + [SpriteSheet.sprite_width_pixels][google.cloud.video.transcoder.v1.SpriteSheet.sprite_width_pixels] + field or the + [SpriteSheet.sprite_height_pixels][google.cloud.video.transcoder.v1.SpriteSheet.sprite_height_pixels] + field, but not both (the API will automatically calculate + the missing field). + + For portrait videos that contain horizontal ASR and rotation + metadata, provide the width, in pixels, per the horizontal + ASR. The API calculates the height per the horizontal ASR. + The API detects any rotation metadata and swaps the + requested height and width for the output. + sprite_height_pixels (int): + Required. The height of sprite in pixels. Must be an even + integer. To preserve the source aspect ratio, set the + [SpriteSheet.sprite_height_pixels][google.cloud.video.transcoder.v1.SpriteSheet.sprite_height_pixels] + field or the + [SpriteSheet.sprite_width_pixels][google.cloud.video.transcoder.v1.SpriteSheet.sprite_width_pixels] + field, but not both (the API will automatically calculate + the missing field). + + For portrait videos that contain horizontal ASR and rotation + metadata, provide the height, in pixels, per the horizontal + ASR. The API calculates the width per the horizontal ASR. + The API detects any rotation metadata and swaps the + requested height and width for the output. + column_count (int): + The maximum number of sprites per row in a + sprite sheet. The default is 0, which indicates + no maximum limit. + row_count (int): + The maximum number of rows per sprite sheet. + When the sprite sheet is full, a new sprite + sheet is created. The default is 0, which + indicates no maximum limit. + start_time_offset (google.protobuf.duration_pb2.Duration): + Start time in seconds, relative to the output file timeline. + Determines the first sprite to pick. The default is ``0s``. + end_time_offset (google.protobuf.duration_pb2.Duration): + End time in seconds, relative to the output file timeline. + When ``end_time_offset`` is not specified, the sprites are + generated until the end of the output file. + total_count (int): + Total number of sprites. Create the specified + number of sprites distributed evenly across the + timeline of the output media. The default is + 100. + + This field is a member of `oneof`_ ``extraction_strategy``. + interval (google.protobuf.duration_pb2.Duration): + Starting from ``0s``, create sprites at regular intervals. + Specify the interval value in seconds. + + This field is a member of `oneof`_ ``extraction_strategy``. + quality (int): + The quality of the generated sprite sheet. + Enter a value between 1 and 100, where 1 is the + lowest quality and 100 is the highest quality. + The default is 100. A high quality value + corresponds to a low image data compression + ratio. + """ + + format_: str = proto.Field( + proto.STRING, + number=1, + ) + file_prefix: str = proto.Field( + proto.STRING, + number=2, + ) + sprite_width_pixels: int = proto.Field( + proto.INT32, + number=3, + ) + sprite_height_pixels: int = proto.Field( + proto.INT32, + number=4, + ) + column_count: int = proto.Field( + proto.INT32, + number=5, + ) + row_count: int = proto.Field( + proto.INT32, + number=6, + ) + start_time_offset: duration_pb2.Duration = proto.Field( + proto.MESSAGE, + number=7, + message=duration_pb2.Duration, + ) + end_time_offset: duration_pb2.Duration = proto.Field( + proto.MESSAGE, + number=8, + message=duration_pb2.Duration, + ) + total_count: int = proto.Field( + proto.INT32, + number=9, + oneof='extraction_strategy', + ) + interval: duration_pb2.Duration = proto.Field( + proto.MESSAGE, + number=10, + oneof='extraction_strategy', + message=duration_pb2.Duration, + ) + quality: int = proto.Field( + proto.INT32, + number=11, + ) + + +class Overlay(proto.Message): + r"""Overlay configuration. + + Attributes: + image (google.cloud.video.transcoder_v1.types.Overlay.Image): + Image overlay. + animations (MutableSequence[google.cloud.video.transcoder_v1.types.Overlay.Animation]): + List of Animations. The list should be + chronological, without any time overlap. + """ + class FadeType(proto.Enum): + r"""Fade type for the overlay: ``FADE_IN`` or ``FADE_OUT``.""" + FADE_TYPE_UNSPECIFIED = 0 + FADE_IN = 1 + FADE_OUT = 2 + + class NormalizedCoordinate(proto.Message): + r"""2D normalized coordinates. Default: ``{0.0, 0.0}`` + + Attributes: + x (float): + Normalized x coordinate. + y (float): + Normalized y coordinate. + """ + + x: float = proto.Field( + proto.DOUBLE, + number=1, + ) + y: float = proto.Field( + proto.DOUBLE, + number=2, + ) + + class Image(proto.Message): + r"""Overlaid jpeg image. + + Attributes: + uri (str): + Required. URI of the JPEG image in Cloud Storage. For + example, ``gs://bucket/inputs/image.jpeg``. JPEG is the only + supported image type. + resolution (google.cloud.video.transcoder_v1.types.Overlay.NormalizedCoordinate): + Normalized image resolution, based on output video + resolution. Valid values: ``0.0``–``1.0``. To respect the + original image aspect ratio, set either ``x`` or ``y`` to + ``0.0``. To use the original image resolution, set both + ``x`` and ``y`` to ``0.0``. + alpha (float): + Target image opacity. Valid values are from ``1.0`` (solid, + default) to ``0.0`` (transparent), exclusive. Set this to a + value greater than ``0.0``. + """ + + uri: str = proto.Field( + proto.STRING, + number=1, + ) + resolution: 'Overlay.NormalizedCoordinate' = proto.Field( + proto.MESSAGE, + number=2, + message='Overlay.NormalizedCoordinate', + ) + alpha: float = proto.Field( + proto.DOUBLE, + number=3, + ) + + class AnimationStatic(proto.Message): + r"""Display static overlay object. + + Attributes: + xy (google.cloud.video.transcoder_v1.types.Overlay.NormalizedCoordinate): + Normalized coordinates based on output video resolution. + Valid values: ``0.0``–``1.0``. ``xy`` is the upper-left + coordinate of the overlay object. For example, use the x and + y coordinates {0,0} to position the top-left corner of the + overlay animation in the top-left corner of the output + video. + start_time_offset (google.protobuf.duration_pb2.Duration): + The time to start displaying the overlay + object, in seconds. Default: 0 + """ + + xy: 'Overlay.NormalizedCoordinate' = proto.Field( + proto.MESSAGE, + number=1, + message='Overlay.NormalizedCoordinate', + ) + start_time_offset: duration_pb2.Duration = proto.Field( + proto.MESSAGE, + number=2, + message=duration_pb2.Duration, + ) + + class AnimationFade(proto.Message): + r"""Display overlay object with fade animation. + + Attributes: + fade_type (google.cloud.video.transcoder_v1.types.Overlay.FadeType): + Required. Type of fade animation: ``FADE_IN`` or + ``FADE_OUT``. + xy (google.cloud.video.transcoder_v1.types.Overlay.NormalizedCoordinate): + Normalized coordinates based on output video resolution. + Valid values: ``0.0``–``1.0``. ``xy`` is the upper-left + coordinate of the overlay object. For example, use the x and + y coordinates {0,0} to position the top-left corner of the + overlay animation in the top-left corner of the output + video. + start_time_offset (google.protobuf.duration_pb2.Duration): + The time to start the fade animation, in + seconds. Default: 0 + end_time_offset (google.protobuf.duration_pb2.Duration): + The time to end the fade animation, in seconds. Default: + ``start_time_offset`` + 1s + """ + + fade_type: 'Overlay.FadeType' = proto.Field( + proto.ENUM, + number=1, + enum='Overlay.FadeType', + ) + xy: 'Overlay.NormalizedCoordinate' = proto.Field( + proto.MESSAGE, + number=2, + message='Overlay.NormalizedCoordinate', + ) + start_time_offset: duration_pb2.Duration = proto.Field( + proto.MESSAGE, + number=3, + message=duration_pb2.Duration, + ) + end_time_offset: duration_pb2.Duration = proto.Field( + proto.MESSAGE, + number=4, + message=duration_pb2.Duration, + ) + + class AnimationEnd(proto.Message): + r"""End previous overlay animation from the video. Without + AnimationEnd, the overlay object will keep the state of previous + animation until the end of the video. + + Attributes: + start_time_offset (google.protobuf.duration_pb2.Duration): + The time to end overlay object, in seconds. + Default: 0 + """ + + start_time_offset: duration_pb2.Duration = proto.Field( + proto.MESSAGE, + number=1, + message=duration_pb2.Duration, + ) + + class Animation(proto.Message): + r"""Animation types. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + animation_static (google.cloud.video.transcoder_v1.types.Overlay.AnimationStatic): + Display static overlay object. + + This field is a member of `oneof`_ ``animation_type``. + animation_fade (google.cloud.video.transcoder_v1.types.Overlay.AnimationFade): + Display overlay object with fade animation. + + This field is a member of `oneof`_ ``animation_type``. + animation_end (google.cloud.video.transcoder_v1.types.Overlay.AnimationEnd): + End previous animation. + + This field is a member of `oneof`_ ``animation_type``. + """ + + animation_static: 'Overlay.AnimationStatic' = proto.Field( + proto.MESSAGE, + number=1, + oneof='animation_type', + message='Overlay.AnimationStatic', + ) + animation_fade: 'Overlay.AnimationFade' = proto.Field( + proto.MESSAGE, + number=2, + oneof='animation_type', + message='Overlay.AnimationFade', + ) + animation_end: 'Overlay.AnimationEnd' = proto.Field( + proto.MESSAGE, + number=3, + oneof='animation_type', + message='Overlay.AnimationEnd', + ) + + image: Image = proto.Field( + proto.MESSAGE, + number=1, + message=Image, + ) + animations: MutableSequence[Animation] = proto.RepeatedField( + proto.MESSAGE, + number=2, + message=Animation, + ) + + +class PreprocessingConfig(proto.Message): + r"""Preprocessing configurations. + + Attributes: + color (google.cloud.video.transcoder_v1.types.PreprocessingConfig.Color): + Color preprocessing configuration. + denoise (google.cloud.video.transcoder_v1.types.PreprocessingConfig.Denoise): + Denoise preprocessing configuration. + deblock (google.cloud.video.transcoder_v1.types.PreprocessingConfig.Deblock): + Deblock preprocessing configuration. + audio (google.cloud.video.transcoder_v1.types.PreprocessingConfig.Audio): + Audio preprocessing configuration. + crop (google.cloud.video.transcoder_v1.types.PreprocessingConfig.Crop): + Specify the video cropping configuration. + pad (google.cloud.video.transcoder_v1.types.PreprocessingConfig.Pad): + Specify the video pad filter configuration. + deinterlace (google.cloud.video.transcoder_v1.types.PreprocessingConfig.Deinterlace): + Specify the video deinterlace configuration. + """ + + class Color(proto.Message): + r"""Color preprocessing configuration. + + **Note:** This configuration is not supported. + + Attributes: + saturation (float): + Control color saturation of the video. Enter + a value between -1 and 1, where -1 is fully + desaturated and 1 is maximum saturation. 0 is no + change. The default is 0. + contrast (float): + Control black and white contrast of the + video. Enter a value between -1 and 1, where -1 + is minimum contrast and 1 is maximum contrast. 0 + is no change. The default is 0. + brightness (float): + Control brightness of the video. Enter a + value between -1 and 1, where -1 is minimum + brightness and 1 is maximum brightness. 0 is no + change. The default is 0. + """ + + saturation: float = proto.Field( + proto.DOUBLE, + number=1, + ) + contrast: float = proto.Field( + proto.DOUBLE, + number=2, + ) + brightness: float = proto.Field( + proto.DOUBLE, + number=3, + ) + + class Denoise(proto.Message): + r"""Denoise preprocessing configuration. + + **Note:** This configuration is not supported. + + Attributes: + strength (float): + Set strength of the denoise. Enter a value + between 0 and 1. The higher the value, the + smoother the image. 0 is no denoising. The + default is 0. + tune (str): + Set the denoiser mode. The default is ``standard``. + + Supported denoiser modes: + + - ``standard`` + - ``grain`` + """ + + strength: float = proto.Field( + proto.DOUBLE, + number=1, + ) + tune: str = proto.Field( + proto.STRING, + number=2, + ) + + class Deblock(proto.Message): + r"""Deblock preprocessing configuration. + + **Note:** This configuration is not supported. + + Attributes: + strength (float): + Set strength of the deblocker. Enter a value + between 0 and 1. The higher the value, the + stronger the block removal. 0 is no deblocking. + The default is 0. + enabled (bool): + Enable deblocker. The default is ``false``. + """ + + strength: float = proto.Field( + proto.DOUBLE, + number=1, + ) + enabled: bool = proto.Field( + proto.BOOL, + number=2, + ) + + class Audio(proto.Message): + r"""Audio preprocessing configuration. + + Attributes: + lufs (float): + Specify audio loudness normalization in loudness units + relative to full scale (LUFS). Enter a value between -24 and + 0 (the default), where: + + - -24 is the Advanced Television Systems Committee (ATSC + A/85) standard + - -23 is the EU R128 broadcast standard + - -19 is the prior standard for online mono audio + - -18 is the ReplayGain standard + - -16 is the prior standard for stereo audio + - -14 is the new online audio standard recommended by + Spotify, as well as Amazon Echo + - 0 disables normalization + high_boost (bool): + Enable boosting high frequency components. The default is + ``false``. + + **Note:** This field is not supported. + low_boost (bool): + Enable boosting low frequency components. The default is + ``false``. + + **Note:** This field is not supported. + """ + + lufs: float = proto.Field( + proto.DOUBLE, + number=1, + ) + high_boost: bool = proto.Field( + proto.BOOL, + number=2, + ) + low_boost: bool = proto.Field( + proto.BOOL, + number=3, + ) + + class Crop(proto.Message): + r"""Video cropping configuration for the input video. The cropped + input video is scaled to match the output resolution. + + Attributes: + top_pixels (int): + The number of pixels to crop from the top. + The default is 0. + bottom_pixels (int): + The number of pixels to crop from the bottom. + The default is 0. + left_pixels (int): + The number of pixels to crop from the left. + The default is 0. + right_pixels (int): + The number of pixels to crop from the right. + The default is 0. + """ + + top_pixels: int = proto.Field( + proto.INT32, + number=1, + ) + bottom_pixels: int = proto.Field( + proto.INT32, + number=2, + ) + left_pixels: int = proto.Field( + proto.INT32, + number=3, + ) + right_pixels: int = proto.Field( + proto.INT32, + number=4, + ) + + class Pad(proto.Message): + r"""Pad filter configuration for the input video. The padded + input video is scaled after padding with black to match the + output resolution. + + Attributes: + top_pixels (int): + The number of pixels to add to the top. The + default is 0. + bottom_pixels (int): + The number of pixels to add to the bottom. + The default is 0. + left_pixels (int): + The number of pixels to add to the left. The + default is 0. + right_pixels (int): + The number of pixels to add to the right. The + default is 0. + """ + + top_pixels: int = proto.Field( + proto.INT32, + number=1, + ) + bottom_pixels: int = proto.Field( + proto.INT32, + number=2, + ) + left_pixels: int = proto.Field( + proto.INT32, + number=3, + ) + right_pixels: int = proto.Field( + proto.INT32, + number=4, + ) + + class Deinterlace(proto.Message): + r"""Deinterlace configuration for input video. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + yadif (google.cloud.video.transcoder_v1.types.PreprocessingConfig.Deinterlace.YadifConfig): + Specifies the Yet Another Deinterlacing + Filter Configuration. + + This field is a member of `oneof`_ ``deinterlacing_filter``. + bwdif (google.cloud.video.transcoder_v1.types.PreprocessingConfig.Deinterlace.BwdifConfig): + Specifies the Bob Weaver Deinterlacing Filter + Configuration. + + This field is a member of `oneof`_ ``deinterlacing_filter``. + """ + + class YadifConfig(proto.Message): + r"""Yet Another Deinterlacing Filter Configuration. + + Attributes: + mode (str): + Specifies the deinterlacing mode to adopt. The default is + ``send_frame``. Supported values: + + - ``send_frame``: Output one frame for each frame + - ``send_field``: Output one frame for each field + disable_spatial_interlacing (bool): + Disable spacial interlacing. The default is ``false``. + parity (str): + The picture field parity assumed for the input interlaced + video. The default is ``auto``. Supported values: + + - ``tff``: Assume the top field is first + - ``bff``: Assume the bottom field is first + - ``auto``: Enable automatic detection of field parity + deinterlace_all_frames (bool): + Deinterlace all frames rather than just the frames + identified as interlaced. The default is ``false``. + """ + + mode: str = proto.Field( + proto.STRING, + number=1, + ) + disable_spatial_interlacing: bool = proto.Field( + proto.BOOL, + number=2, + ) + parity: str = proto.Field( + proto.STRING, + number=3, + ) + deinterlace_all_frames: bool = proto.Field( + proto.BOOL, + number=4, + ) + + class BwdifConfig(proto.Message): + r"""Bob Weaver Deinterlacing Filter Configuration. + + Attributes: + mode (str): + Specifies the deinterlacing mode to adopt. The default is + ``send_frame``. Supported values: + + - ``send_frame``: Output one frame for each frame + - ``send_field``: Output one frame for each field + parity (str): + The picture field parity assumed for the input interlaced + video. The default is ``auto``. Supported values: + + - ``tff``: Assume the top field is first + - ``bff``: Assume the bottom field is first + - ``auto``: Enable automatic detection of field parity + deinterlace_all_frames (bool): + Deinterlace all frames rather than just the frames + identified as interlaced. The default is ``false``. + """ + + mode: str = proto.Field( + proto.STRING, + number=1, + ) + parity: str = proto.Field( + proto.STRING, + number=2, + ) + deinterlace_all_frames: bool = proto.Field( + proto.BOOL, + number=3, + ) + + yadif: 'PreprocessingConfig.Deinterlace.YadifConfig' = proto.Field( + proto.MESSAGE, + number=1, + oneof='deinterlacing_filter', + message='PreprocessingConfig.Deinterlace.YadifConfig', + ) + bwdif: 'PreprocessingConfig.Deinterlace.BwdifConfig' = proto.Field( + proto.MESSAGE, + number=2, + oneof='deinterlacing_filter', + message='PreprocessingConfig.Deinterlace.BwdifConfig', + ) + + color: Color = proto.Field( + proto.MESSAGE, + number=1, + message=Color, + ) + denoise: Denoise = proto.Field( + proto.MESSAGE, + number=2, + message=Denoise, + ) + deblock: Deblock = proto.Field( + proto.MESSAGE, + number=3, + message=Deblock, + ) + audio: Audio = proto.Field( + proto.MESSAGE, + number=4, + message=Audio, + ) + crop: Crop = proto.Field( + proto.MESSAGE, + number=5, + message=Crop, + ) + pad: Pad = proto.Field( + proto.MESSAGE, + number=6, + message=Pad, + ) + deinterlace: Deinterlace = proto.Field( + proto.MESSAGE, + number=7, + message=Deinterlace, + ) + + +class VideoStream(proto.Message): + r"""Video stream resource. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + h264 (google.cloud.video.transcoder_v1.types.VideoStream.H264CodecSettings): + H264 codec settings. + + This field is a member of `oneof`_ ``codec_settings``. + h265 (google.cloud.video.transcoder_v1.types.VideoStream.H265CodecSettings): + H265 codec settings. + + This field is a member of `oneof`_ ``codec_settings``. + vp9 (google.cloud.video.transcoder_v1.types.VideoStream.Vp9CodecSettings): + VP9 codec settings. + + This field is a member of `oneof`_ ``codec_settings``. + """ + + class H264CodecSettings(proto.Message): + r"""H264 codec settings. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + width_pixels (int): + The width of the video in pixels. Must be an + even integer. When not specified, the width is + adjusted to match the specified height and input + aspect ratio. If both are omitted, the input + width is used. + For portrait videos that contain horizontal ASR + and rotation metadata, provide the width, in + pixels, per the horizontal ASR. The API + calculates the height per the horizontal ASR. + The API detects any rotation metadata and swaps + the requested height and width for the output. + height_pixels (int): + The height of the video in pixels. Must be an + even integer. When not specified, the height is + adjusted to match the specified width and input + aspect ratio. If both are omitted, the input + height is used. + For portrait videos that contain horizontal ASR + and rotation metadata, provide the height, in + pixels, per the horizontal ASR. The API + calculates the width per the horizontal ASR. The + API detects any rotation metadata and swaps the + requested height and width for the output. + frame_rate (float): + Required. The target video frame rate in frames per second + (FPS). Must be less than or equal to 120. Will default to + the input frame rate if larger than the input frame rate. + The API will generate an output FPS that is divisible by the + input FPS, and smaller or equal to the target FPS. See + `Calculating frame + rate `__ + for more information. + bitrate_bps (int): + Required. The video bitrate in bits per + second. The minimum value is 1,000. The maximum + value is 800,000,000. + pixel_format (str): + Pixel format to use. The default is ``yuv420p``. + + Supported pixel formats: + + - ``yuv420p`` pixel format + - ``yuv422p`` pixel format + - ``yuv444p`` pixel format + - ``yuv420p10`` 10-bit HDR pixel format + - ``yuv422p10`` 10-bit HDR pixel format + - ``yuv444p10`` 10-bit HDR pixel format + - ``yuv420p12`` 12-bit HDR pixel format + - ``yuv422p12`` 12-bit HDR pixel format + - ``yuv444p12`` 12-bit HDR pixel format + rate_control_mode (str): + Specify the ``rate_control_mode``. The default is ``vbr``. + + Supported rate control modes: + + - ``vbr`` - variable bitrate + - ``crf`` - constant rate factor + crf_level (int): + Target CRF level. Must be between 10 and 36, + where 10 is the highest quality and 36 is the + most efficient compression. The default is 21. + allow_open_gop (bool): + Specifies whether an open Group of Pictures (GOP) structure + should be allowed or not. The default is ``false``. + gop_frame_count (int): + Select the GOP size based on the specified + frame count. Must be greater than zero. + + This field is a member of `oneof`_ ``gop_mode``. + gop_duration (google.protobuf.duration_pb2.Duration): + Select the GOP size based on the specified duration. The + default is ``3s``. Note that ``gopDuration`` must be less + than or equal to ```segmentDuration`` <#SegmentSettings>`__, + and ```segmentDuration`` <#SegmentSettings>`__ must be + divisible by ``gopDuration``. + + This field is a member of `oneof`_ ``gop_mode``. + enable_two_pass (bool): + Use two-pass encoding strategy to achieve better video + quality. ``VideoStream.rate_control_mode`` must be ``vbr``. + The default is ``false``. + vbv_size_bits (int): + Size of the Video Buffering Verifier (VBV) buffer in bits. + Must be greater than zero. The default is equal to + ``VideoStream.bitrate_bps``. + vbv_fullness_bits (int): + Initial fullness of the Video Buffering Verifier (VBV) + buffer in bits. Must be greater than zero. The default is + equal to 90% of ``VideoStream.vbv_size_bits``. + entropy_coder (str): + The entropy coder to use. The default is ``cabac``. + + Supported entropy coders: + + - ``cavlc`` + - ``cabac`` + b_pyramid (bool): + Allow B-pyramid for reference frame selection. This may not + be supported on all decoders. The default is ``false``. + b_frame_count (int): + The number of consecutive B-frames. Must be greater than or + equal to zero. Must be less than + ``VideoStream.gop_frame_count`` if set. The default is 0. + aq_strength (float): + Specify the intensity of the adaptive + quantizer (AQ). Must be between 0 and 1, where 0 + disables the quantizer and 1 maximizes the + quantizer. A higher value equals a lower bitrate + but smoother image. The default is 0. + profile (str): + Enforces the specified codec profile. The following profiles + are supported: + + - ``baseline`` + - ``main`` + - ``high`` (default) + + The available options are + `FFmpeg-compatible `__. + Note that certain values for this field may cause the + transcoder to override other fields you set in the + ``H264CodecSettings`` message. + tune (str): + Enforces the specified codec tune. The available options are + `FFmpeg-compatible `__. + Note that certain values for this field may cause the + transcoder to override other fields you set in the + ``H264CodecSettings`` message. + preset (str): + Enforces the specified codec preset. The default is + ``veryfast``. The available options are + `FFmpeg-compatible `__. + Note that certain values for this field may cause the + transcoder to override other fields you set in the + ``H264CodecSettings`` message. + """ + + width_pixels: int = proto.Field( + proto.INT32, + number=1, + ) + height_pixels: int = proto.Field( + proto.INT32, + number=2, + ) + frame_rate: float = proto.Field( + proto.DOUBLE, + number=3, + ) + bitrate_bps: int = proto.Field( + proto.INT32, + number=4, + ) + pixel_format: str = proto.Field( + proto.STRING, + number=5, + ) + rate_control_mode: str = proto.Field( + proto.STRING, + number=6, + ) + crf_level: int = proto.Field( + proto.INT32, + number=7, + ) + allow_open_gop: bool = proto.Field( + proto.BOOL, + number=8, + ) + gop_frame_count: int = proto.Field( + proto.INT32, + number=9, + oneof='gop_mode', + ) + gop_duration: duration_pb2.Duration = proto.Field( + proto.MESSAGE, + number=10, + oneof='gop_mode', + message=duration_pb2.Duration, + ) + enable_two_pass: bool = proto.Field( + proto.BOOL, + number=11, + ) + vbv_size_bits: int = proto.Field( + proto.INT32, + number=12, + ) + vbv_fullness_bits: int = proto.Field( + proto.INT32, + number=13, + ) + entropy_coder: str = proto.Field( + proto.STRING, + number=14, + ) + b_pyramid: bool = proto.Field( + proto.BOOL, + number=15, + ) + b_frame_count: int = proto.Field( + proto.INT32, + number=16, + ) + aq_strength: float = proto.Field( + proto.DOUBLE, + number=17, + ) + profile: str = proto.Field( + proto.STRING, + number=18, + ) + tune: str = proto.Field( + proto.STRING, + number=19, + ) + preset: str = proto.Field( + proto.STRING, + number=20, + ) + + class H265CodecSettings(proto.Message): + r"""H265 codec settings. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + width_pixels (int): + The width of the video in pixels. Must be an + even integer. When not specified, the width is + adjusted to match the specified height and input + aspect ratio. If both are omitted, the input + width is used. + For portrait videos that contain horizontal ASR + and rotation metadata, provide the width, in + pixels, per the horizontal ASR. The API + calculates the height per the horizontal ASR. + The API detects any rotation metadata and swaps + the requested height and width for the output. + height_pixels (int): + The height of the video in pixels. Must be an + even integer. When not specified, the height is + adjusted to match the specified width and input + aspect ratio. If both are omitted, the input + height is used. + For portrait videos that contain horizontal ASR + and rotation metadata, provide the height, in + pixels, per the horizontal ASR. The API + calculates the width per the horizontal ASR. The + API detects any rotation metadata and swaps the + requested height and width for the output. + frame_rate (float): + Required. The target video frame rate in frames per second + (FPS). Must be less than or equal to 120. Will default to + the input frame rate if larger than the input frame rate. + The API will generate an output FPS that is divisible by the + input FPS, and smaller or equal to the target FPS. See + `Calculating frame + rate `__ + for more information. + bitrate_bps (int): + Required. The video bitrate in bits per + second. The minimum value is 1,000. The maximum + value is 800,000,000. + pixel_format (str): + Pixel format to use. The default is ``yuv420p``. + + Supported pixel formats: + + - ``yuv420p`` pixel format + - ``yuv422p`` pixel format + - ``yuv444p`` pixel format + - ``yuv420p10`` 10-bit HDR pixel format + - ``yuv422p10`` 10-bit HDR pixel format + - ``yuv444p10`` 10-bit HDR pixel format + - ``yuv420p12`` 12-bit HDR pixel format + - ``yuv422p12`` 12-bit HDR pixel format + - ``yuv444p12`` 12-bit HDR pixel format + rate_control_mode (str): + Specify the ``rate_control_mode``. The default is ``vbr``. + + Supported rate control modes: + + - ``vbr`` - variable bitrate + - ``crf`` - constant rate factor + crf_level (int): + Target CRF level. Must be between 10 and 36, + where 10 is the highest quality and 36 is the + most efficient compression. The default is 21. + allow_open_gop (bool): + Specifies whether an open Group of Pictures (GOP) structure + should be allowed or not. The default is ``false``. + gop_frame_count (int): + Select the GOP size based on the specified + frame count. Must be greater than zero. + + This field is a member of `oneof`_ ``gop_mode``. + gop_duration (google.protobuf.duration_pb2.Duration): + Select the GOP size based on the specified duration. The + default is ``3s``. Note that ``gopDuration`` must be less + than or equal to ```segmentDuration`` <#SegmentSettings>`__, + and ```segmentDuration`` <#SegmentSettings>`__ must be + divisible by ``gopDuration``. + + This field is a member of `oneof`_ ``gop_mode``. + enable_two_pass (bool): + Use two-pass encoding strategy to achieve better video + quality. ``VideoStream.rate_control_mode`` must be ``vbr``. + The default is ``false``. + vbv_size_bits (int): + Size of the Video Buffering Verifier (VBV) buffer in bits. + Must be greater than zero. The default is equal to + ``VideoStream.bitrate_bps``. + vbv_fullness_bits (int): + Initial fullness of the Video Buffering Verifier (VBV) + buffer in bits. Must be greater than zero. The default is + equal to 90% of ``VideoStream.vbv_size_bits``. + b_pyramid (bool): + Allow B-pyramid for reference frame selection. This may not + be supported on all decoders. The default is ``false``. + b_frame_count (int): + The number of consecutive B-frames. Must be greater than or + equal to zero. Must be less than + ``VideoStream.gop_frame_count`` if set. The default is 0. + aq_strength (float): + Specify the intensity of the adaptive + quantizer (AQ). Must be between 0 and 1, where 0 + disables the quantizer and 1 maximizes the + quantizer. A higher value equals a lower bitrate + but smoother image. The default is 0. + profile (str): + Enforces the specified codec profile. The following profiles + are supported: + + - 8-bit profiles + + - ``main`` (default) + - ``main-intra`` + - ``mainstillpicture`` + + - 10-bit profiles + + - ``main10`` (default) + - ``main10-intra`` + - ``main422-10`` + - ``main422-10-intra`` + - ``main444-10`` + - ``main444-10-intra`` + + - 12-bit profiles + + - ``main12`` (default) + - ``main12-intra`` + - ``main422-12`` + - ``main422-12-intra`` + - ``main444-12`` + - ``main444-12-intra`` + + The available options are + `FFmpeg-compatible `__. Note + that certain values for this field may cause the transcoder + to override other fields you set in the + ``H265CodecSettings`` message. + tune (str): + Enforces the specified codec tune. The available options are + `FFmpeg-compatible `__. + Note that certain values for this field may cause the + transcoder to override other fields you set in the + ``H265CodecSettings`` message. + preset (str): + Enforces the specified codec preset. The default is + ``veryfast``. The available options are + `FFmpeg-compatible `__. + Note that certain values for this field may cause the + transcoder to override other fields you set in the + ``H265CodecSettings`` message. + """ + + width_pixels: int = proto.Field( + proto.INT32, + number=1, + ) + height_pixels: int = proto.Field( + proto.INT32, + number=2, + ) + frame_rate: float = proto.Field( + proto.DOUBLE, + number=3, + ) + bitrate_bps: int = proto.Field( + proto.INT32, + number=4, + ) + pixel_format: str = proto.Field( + proto.STRING, + number=5, + ) + rate_control_mode: str = proto.Field( + proto.STRING, + number=6, + ) + crf_level: int = proto.Field( + proto.INT32, + number=7, + ) + allow_open_gop: bool = proto.Field( + proto.BOOL, + number=8, + ) + gop_frame_count: int = proto.Field( + proto.INT32, + number=9, + oneof='gop_mode', + ) + gop_duration: duration_pb2.Duration = proto.Field( + proto.MESSAGE, + number=10, + oneof='gop_mode', + message=duration_pb2.Duration, + ) + enable_two_pass: bool = proto.Field( + proto.BOOL, + number=11, + ) + vbv_size_bits: int = proto.Field( + proto.INT32, + number=12, + ) + vbv_fullness_bits: int = proto.Field( + proto.INT32, + number=13, + ) + b_pyramid: bool = proto.Field( + proto.BOOL, + number=14, + ) + b_frame_count: int = proto.Field( + proto.INT32, + number=15, + ) + aq_strength: float = proto.Field( + proto.DOUBLE, + number=16, + ) + profile: str = proto.Field( + proto.STRING, + number=17, + ) + tune: str = proto.Field( + proto.STRING, + number=18, + ) + preset: str = proto.Field( + proto.STRING, + number=19, + ) + + class Vp9CodecSettings(proto.Message): + r"""VP9 codec settings. + + This message has `oneof`_ fields (mutually exclusive fields). + For each oneof, at most one member field can be set at the same time. + Setting any member of the oneof automatically clears all other + members. + + .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields + + Attributes: + width_pixels (int): + The width of the video in pixels. Must be an + even integer. When not specified, the width is + adjusted to match the specified height and input + aspect ratio. If both are omitted, the input + width is used. + For portrait videos that contain horizontal ASR + and rotation metadata, provide the width, in + pixels, per the horizontal ASR. The API + calculates the height per the horizontal ASR. + The API detects any rotation metadata and swaps + the requested height and width for the output. + height_pixels (int): + The height of the video in pixels. Must be an + even integer. When not specified, the height is + adjusted to match the specified width and input + aspect ratio. If both are omitted, the input + height is used. + For portrait videos that contain horizontal ASR + and rotation metadata, provide the height, in + pixels, per the horizontal ASR. The API + calculates the width per the horizontal ASR. The + API detects any rotation metadata and swaps the + requested height and width for the output. + frame_rate (float): + Required. The target video frame rate in frames per second + (FPS). Must be less than or equal to 120. Will default to + the input frame rate if larger than the input frame rate. + The API will generate an output FPS that is divisible by the + input FPS, and smaller or equal to the target FPS. See + `Calculating frame + rate `__ + for more information. + bitrate_bps (int): + Required. The video bitrate in bits per + second. The minimum value is 1,000. The maximum + value is 480,000,000. + pixel_format (str): + Pixel format to use. The default is ``yuv420p``. + + Supported pixel formats: + + - ``yuv420p`` pixel format + - ``yuv422p`` pixel format + - ``yuv444p`` pixel format + - ``yuv420p10`` 10-bit HDR pixel format + - ``yuv422p10`` 10-bit HDR pixel format + - ``yuv444p10`` 10-bit HDR pixel format + - ``yuv420p12`` 12-bit HDR pixel format + - ``yuv422p12`` 12-bit HDR pixel format + - ``yuv444p12`` 12-bit HDR pixel format + rate_control_mode (str): + Specify the ``rate_control_mode``. The default is ``vbr``. + + Supported rate control modes: + + - ``vbr`` - variable bitrate + crf_level (int): + Target CRF level. Must be between 10 and 36, where 10 is the + highest quality and 36 is the most efficient compression. + The default is 21. + + **Note:** This field is not supported. + gop_frame_count (int): + Select the GOP size based on the specified + frame count. Must be greater than zero. + + This field is a member of `oneof`_ ``gop_mode``. + gop_duration (google.protobuf.duration_pb2.Duration): + Select the GOP size based on the specified duration. The + default is ``3s``. Note that ``gopDuration`` must be less + than or equal to ```segmentDuration`` <#SegmentSettings>`__, + and ```segmentDuration`` <#SegmentSettings>`__ must be + divisible by ``gopDuration``. + + This field is a member of `oneof`_ ``gop_mode``. + profile (str): + Enforces the specified codec profile. The following profiles + are supported: + + - ``profile0`` (default) + - ``profile1`` + - ``profile2`` + - ``profile3`` + + The available options are + `WebM-compatible `__. + Note that certain values for this field may cause the + transcoder to override other fields you set in the + ``Vp9CodecSettings`` message. + """ + + width_pixels: int = proto.Field( + proto.INT32, + number=1, + ) + height_pixels: int = proto.Field( + proto.INT32, + number=2, + ) + frame_rate: float = proto.Field( + proto.DOUBLE, + number=3, + ) + bitrate_bps: int = proto.Field( + proto.INT32, + number=4, + ) + pixel_format: str = proto.Field( + proto.STRING, + number=5, + ) + rate_control_mode: str = proto.Field( + proto.STRING, + number=6, + ) + crf_level: int = proto.Field( + proto.INT32, + number=7, + ) + gop_frame_count: int = proto.Field( + proto.INT32, + number=8, + oneof='gop_mode', + ) + gop_duration: duration_pb2.Duration = proto.Field( + proto.MESSAGE, + number=9, + oneof='gop_mode', + message=duration_pb2.Duration, + ) + profile: str = proto.Field( + proto.STRING, + number=10, + ) + + h264: H264CodecSettings = proto.Field( + proto.MESSAGE, + number=1, + oneof='codec_settings', + message=H264CodecSettings, + ) + h265: H265CodecSettings = proto.Field( + proto.MESSAGE, + number=2, + oneof='codec_settings', + message=H265CodecSettings, + ) + vp9: Vp9CodecSettings = proto.Field( + proto.MESSAGE, + number=3, + oneof='codec_settings', + message=Vp9CodecSettings, + ) + + +class AudioStream(proto.Message): + r"""Audio stream resource. + + Attributes: + codec (str): + The codec for this audio stream. The default is ``aac``. + + Supported audio codecs: + + - ``aac`` + - ``aac-he`` + - ``aac-he-v2`` + - ``mp3`` + - ``ac3`` + - ``eac3`` + bitrate_bps (int): + Required. Audio bitrate in bits per second. + Must be between 1 and 10,000,000. + channel_count (int): + Number of audio channels. Must be between 1 + and 6. The default is 2. + channel_layout (MutableSequence[str]): + A list of channel names specifying layout of the audio + channels. This only affects the metadata embedded in the + container headers, if supported by the specified format. The + default is ``["fl", "fr"]``. + + Supported channel names: + + - ``fl`` - Front left channel + - ``fr`` - Front right channel + - ``sl`` - Side left channel + - ``sr`` - Side right channel + - ``fc`` - Front center channel + - ``lfe`` - Low frequency + mapping_ (MutableSequence[google.cloud.video.transcoder_v1.types.AudioStream.AudioMapping]): + The mapping for the ``Job.edit_list`` atoms with audio + ``EditAtom.inputs``. + sample_rate_hertz (int): + The audio sample rate in Hertz. The default + is 48000 Hertz. + """ + + class AudioMapping(proto.Message): + r"""The mapping for the ``Job.edit_list`` atoms with audio + ``EditAtom.inputs``. + + Attributes: + atom_key (str): + Required. The ``EditAtom.key`` that references the atom with + audio inputs in the ``Job.edit_list``. + input_key (str): + Required. The ``Input.key`` that identifies the input file. + input_track (int): + Required. The zero-based index of the track + in the input file. + input_channel (int): + Required. The zero-based index of the channel + in the input audio stream. + output_channel (int): + Required. The zero-based index of the channel + in the output audio stream. + gain_db (float): + Audio volume control in dB. Negative values + decrease volume, positive values increase. The + default is 0. + """ + + atom_key: str = proto.Field( + proto.STRING, + number=1, + ) + input_key: str = proto.Field( + proto.STRING, + number=2, + ) + input_track: int = proto.Field( + proto.INT32, + number=3, + ) + input_channel: int = proto.Field( + proto.INT32, + number=4, + ) + output_channel: int = proto.Field( + proto.INT32, + number=5, + ) + gain_db: float = proto.Field( + proto.DOUBLE, + number=6, + ) + + codec: str = proto.Field( + proto.STRING, + number=1, + ) + bitrate_bps: int = proto.Field( + proto.INT32, + number=2, + ) + channel_count: int = proto.Field( + proto.INT32, + number=3, + ) + channel_layout: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=4, + ) + mapping_: MutableSequence[AudioMapping] = proto.RepeatedField( + proto.MESSAGE, + number=5, + message=AudioMapping, + ) + sample_rate_hertz: int = proto.Field( + proto.INT32, + number=6, + ) + + +class TextStream(proto.Message): + r"""Encoding of a text stream. For example, closed captions or + subtitles. + + Attributes: + codec (str): + The codec for this text stream. The default is ``webvtt``. + + Supported text codecs: + + - ``srt`` + - ``ttml`` + - ``cea608`` + - ``cea708`` + - ``webvtt`` + mapping_ (MutableSequence[google.cloud.video.transcoder_v1.types.TextStream.TextMapping]): + The mapping for the ``Job.edit_list`` atoms with text + ``EditAtom.inputs``. + """ + + class TextMapping(proto.Message): + r"""The mapping for the ``Job.edit_list`` atoms with text + ``EditAtom.inputs``. + + Attributes: + atom_key (str): + Required. The ``EditAtom.key`` that references atom with + text inputs in the ``Job.edit_list``. + input_key (str): + Required. The ``Input.key`` that identifies the input file. + input_track (int): + Required. The zero-based index of the track + in the input file. + """ + + atom_key: str = proto.Field( + proto.STRING, + number=1, + ) + input_key: str = proto.Field( + proto.STRING, + number=2, + ) + input_track: int = proto.Field( + proto.INT32, + number=3, + ) + + codec: str = proto.Field( + proto.STRING, + number=1, + ) + mapping_: MutableSequence[TextMapping] = proto.RepeatedField( + proto.MESSAGE, + number=3, + message=TextMapping, + ) + + +class SegmentSettings(proto.Message): + r"""Segment settings for ``ts``, ``fmp4`` and ``vtt``. + + Attributes: + segment_duration (google.protobuf.duration_pb2.Duration): + Duration of the segments in seconds. The default is + ``6.0s``. Note that ``segmentDuration`` must be greater than + or equal to ```gopDuration`` <#videostream>`__, and + ``segmentDuration`` must be divisible by + ```gopDuration`` <#videostream>`__. + individual_segments (bool): + Required. Create an individual segment file. The default is + ``false``. + """ + + segment_duration: duration_pb2.Duration = proto.Field( + proto.MESSAGE, + number=1, + message=duration_pb2.Duration, + ) + individual_segments: bool = proto.Field( + proto.BOOL, + number=3, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/v1/google/cloud/video/transcoder_v1/types/services.py b/owl-bot-staging/v1/google/cloud/video/transcoder_v1/types/services.py new file mode 100644 index 0000000..2ad601f --- /dev/null +++ b/owl-bot-staging/v1/google/cloud/video/transcoder_v1/types/services.py @@ -0,0 +1,325 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +from typing import MutableMapping, MutableSequence + +import proto # type: ignore + +from google.cloud.video.transcoder_v1.types import resources + + +__protobuf__ = proto.module( + package='google.cloud.video.transcoder.v1', + manifest={ + 'CreateJobRequest', + 'ListJobsRequest', + 'GetJobRequest', + 'DeleteJobRequest', + 'ListJobsResponse', + 'CreateJobTemplateRequest', + 'ListJobTemplatesRequest', + 'GetJobTemplateRequest', + 'DeleteJobTemplateRequest', + 'ListJobTemplatesResponse', + }, +) + + +class CreateJobRequest(proto.Message): + r"""Request message for ``TranscoderService.CreateJob``. + + Attributes: + parent (str): + Required. The parent location to create and process this + job. Format: ``projects/{project}/locations/{location}`` + job (google.cloud.video.transcoder_v1.types.Job): + Required. Parameters for creating transcoding + job. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + job: resources.Job = proto.Field( + proto.MESSAGE, + number=2, + message=resources.Job, + ) + + +class ListJobsRequest(proto.Message): + r"""Request message for ``TranscoderService.ListJobs``. The parent + location from which to retrieve the collection of jobs. + + Attributes: + parent (str): + Required. Format: + ``projects/{project}/locations/{location}`` + page_size (int): + The maximum number of items to return. + page_token (str): + The ``next_page_token`` value returned from a previous List + request, if any. + filter (str): + The filter expression, following the syntax + outlined in https://google.aip.dev/160. + order_by (str): + One or more fields to compare and use to sort + the output. See + https://google.aip.dev/132#ordering. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + filter: str = proto.Field( + proto.STRING, + number=4, + ) + order_by: str = proto.Field( + proto.STRING, + number=5, + ) + + +class GetJobRequest(proto.Message): + r"""Request message for ``TranscoderService.GetJob``. + + Attributes: + name (str): + Required. The name of the job to retrieve. Format: + ``projects/{project}/locations/{location}/jobs/{job}`` + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class DeleteJobRequest(proto.Message): + r"""Request message for ``TranscoderService.DeleteJob``. + + Attributes: + name (str): + Required. The name of the job to delete. Format: + ``projects/{project}/locations/{location}/jobs/{job}`` + allow_missing (bool): + If set to true, and the job is not found, the + request will succeed but no action will be taken + on the server. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + allow_missing: bool = proto.Field( + proto.BOOL, + number=2, + ) + + +class ListJobsResponse(proto.Message): + r"""Response message for ``TranscoderService.ListJobs``. + + Attributes: + jobs (MutableSequence[google.cloud.video.transcoder_v1.types.Job]): + List of jobs in the specified region. + next_page_token (str): + The pagination token. + unreachable (MutableSequence[str]): + List of regions that could not be reached. + """ + + @property + def raw_page(self): + return self + + jobs: MutableSequence[resources.Job] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=resources.Job, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + unreachable: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + + +class CreateJobTemplateRequest(proto.Message): + r"""Request message for ``TranscoderService.CreateJobTemplate``. + + Attributes: + parent (str): + Required. The parent location to create this job template. + Format: ``projects/{project}/locations/{location}`` + job_template (google.cloud.video.transcoder_v1.types.JobTemplate): + Required. Parameters for creating job + template. + job_template_id (str): + Required. The ID to use for the job template, which will + become the final component of the job template's resource + name. + + This value should be 4-63 characters, and valid characters + must match the regular expression + ``[a-zA-Z][a-zA-Z0-9_-]*``. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + job_template: resources.JobTemplate = proto.Field( + proto.MESSAGE, + number=2, + message=resources.JobTemplate, + ) + job_template_id: str = proto.Field( + proto.STRING, + number=3, + ) + + +class ListJobTemplatesRequest(proto.Message): + r"""Request message for ``TranscoderService.ListJobTemplates``. + + Attributes: + parent (str): + Required. The parent location from which to retrieve the + collection of job templates. Format: + ``projects/{project}/locations/{location}`` + page_size (int): + The maximum number of items to return. + page_token (str): + The ``next_page_token`` value returned from a previous List + request, if any. + filter (str): + The filter expression, following the syntax + outlined in https://google.aip.dev/160. + order_by (str): + One or more fields to compare and use to sort + the output. See + https://google.aip.dev/132#ordering. + """ + + parent: str = proto.Field( + proto.STRING, + number=1, + ) + page_size: int = proto.Field( + proto.INT32, + number=2, + ) + page_token: str = proto.Field( + proto.STRING, + number=3, + ) + filter: str = proto.Field( + proto.STRING, + number=4, + ) + order_by: str = proto.Field( + proto.STRING, + number=5, + ) + + +class GetJobTemplateRequest(proto.Message): + r"""Request message for ``TranscoderService.GetJobTemplate``. + + Attributes: + name (str): + Required. The name of the job template to retrieve. Format: + ``projects/{project}/locations/{location}/jobTemplates/{job_template}`` + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + + +class DeleteJobTemplateRequest(proto.Message): + r"""Request message for ``TranscoderService.DeleteJobTemplate``. + + Attributes: + name (str): + Required. The name of the job template to delete. + ``projects/{project}/locations/{location}/jobTemplates/{job_template}`` + allow_missing (bool): + If set to true, and the job template is not + found, the request will succeed but no action + will be taken on the server. + """ + + name: str = proto.Field( + proto.STRING, + number=1, + ) + allow_missing: bool = proto.Field( + proto.BOOL, + number=2, + ) + + +class ListJobTemplatesResponse(proto.Message): + r"""Response message for ``TranscoderService.ListJobTemplates``. + + Attributes: + job_templates (MutableSequence[google.cloud.video.transcoder_v1.types.JobTemplate]): + List of job templates in the specified + region. + next_page_token (str): + The pagination token. + unreachable (MutableSequence[str]): + List of regions that could not be reached. + """ + + @property + def raw_page(self): + return self + + job_templates: MutableSequence[resources.JobTemplate] = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=resources.JobTemplate, + ) + next_page_token: str = proto.Field( + proto.STRING, + number=2, + ) + unreachable: MutableSequence[str] = proto.RepeatedField( + proto.STRING, + number=3, + ) + + +__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/v1/mypy.ini b/owl-bot-staging/v1/mypy.ini new file mode 100644 index 0000000..574c5ae --- /dev/null +++ b/owl-bot-staging/v1/mypy.ini @@ -0,0 +1,3 @@ +[mypy] +python_version = 3.7 +namespace_packages = True diff --git a/owl-bot-staging/v1/noxfile.py b/owl-bot-staging/v1/noxfile.py new file mode 100644 index 0000000..974c78e --- /dev/null +++ b/owl-bot-staging/v1/noxfile.py @@ -0,0 +1,184 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os +import pathlib +import shutil +import subprocess +import sys + + +import nox # type: ignore + +ALL_PYTHON = [ + "3.7", + "3.8", + "3.9", + "3.10", + "3.11", +] + +CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() + +LOWER_BOUND_CONSTRAINTS_FILE = CURRENT_DIRECTORY / "constraints.txt" +PACKAGE_NAME = subprocess.check_output([sys.executable, "setup.py", "--name"], encoding="utf-8") + +BLACK_VERSION = "black==22.3.0" +BLACK_PATHS = ["docs", "google", "tests", "samples", "noxfile.py", "setup.py"] +DEFAULT_PYTHON_VERSION = "3.11" + +nox.sessions = [ + "unit", + "cover", + "mypy", + "check_lower_bounds" + # exclude update_lower_bounds from default + "docs", + "blacken", + "lint", + "lint_setup_py", +] + +@nox.session(python=ALL_PYTHON) +def unit(session): + """Run the unit test suite.""" + + session.install('coverage', 'pytest', 'pytest-cov', 'pytest-asyncio', 'asyncmock; python_version < "3.8"') + session.install('-e', '.') + + session.run( + 'py.test', + '--quiet', + '--cov=google/cloud/video/transcoder_v1/', + '--cov=tests/', + '--cov-config=.coveragerc', + '--cov-report=term', + '--cov-report=html', + os.path.join('tests', 'unit', ''.join(session.posargs)) + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def cover(session): + """Run the final coverage report. + This outputs the coverage report aggregating coverage from the unit + test runs (not system test runs), and then erases coverage data. + """ + session.install("coverage", "pytest-cov") + session.run("coverage", "report", "--show-missing", "--fail-under=100") + + session.run("coverage", "erase") + + +@nox.session(python=ALL_PYTHON) +def mypy(session): + """Run the type checker.""" + session.install( + 'mypy', + 'types-requests', + 'types-protobuf' + ) + session.install('.') + session.run( + 'mypy', + '--explicit-package-bases', + 'google', + ) + + +@nox.session +def update_lower_bounds(session): + """Update lower bounds in constraints.txt to match setup.py""" + session.install('google-cloud-testutils') + session.install('.') + + session.run( + 'lower-bound-checker', + 'update', + '--package-name', + PACKAGE_NAME, + '--constraints-file', + str(LOWER_BOUND_CONSTRAINTS_FILE), + ) + + +@nox.session +def check_lower_bounds(session): + """Check lower bounds in setup.py are reflected in constraints file""" + session.install('google-cloud-testutils') + session.install('.') + + session.run( + 'lower-bound-checker', + 'check', + '--package-name', + PACKAGE_NAME, + '--constraints-file', + str(LOWER_BOUND_CONSTRAINTS_FILE), + ) + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def docs(session): + """Build the docs for this library.""" + + session.install("-e", ".") + session.install("sphinx==4.0.1", "alabaster", "recommonmark") + + shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) + session.run( + "sphinx-build", + "-W", # warnings as errors + "-T", # show full traceback on exception + "-N", # no colors + "-b", + "html", + "-d", + os.path.join("docs", "_build", "doctrees", ""), + os.path.join("docs", ""), + os.path.join("docs", "_build", "html", ""), + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def lint(session): + """Run linters. + + Returns a failure if the linters find linting errors or sufficiently + serious code quality issues. + """ + session.install("flake8", BLACK_VERSION) + session.run( + "black", + "--check", + *BLACK_PATHS, + ) + session.run("flake8", "google", "tests", "samples") + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def blacken(session): + """Run black. Format code to uniform standard.""" + session.install(BLACK_VERSION) + session.run( + "black", + *BLACK_PATHS, + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def lint_setup_py(session): + """Verify that setup.py is valid (including RST check).""" + session.install("docutils", "pygments") + session.run("python", "setup.py", "check", "--restructuredtext", "--strict") diff --git a/owl-bot-staging/v1/samples/generated_samples/snippet_metadata_google.cloud.video.transcoder.v1.json b/owl-bot-staging/v1/samples/generated_samples/snippet_metadata_google.cloud.video.transcoder.v1.json new file mode 100644 index 0000000..2c7572e --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/snippet_metadata_google.cloud.video.transcoder.v1.json @@ -0,0 +1,1315 @@ +{ + "clientLibrary": { + "apis": [ + { + "id": "google.cloud.video.transcoder.v1", + "version": "v1" + } + ], + "language": "PYTHON", + "name": "google-cloud-video-transcoder", + "version": "0.1.0" + }, + "snippets": [ + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.video.transcoder_v1.TranscoderServiceAsyncClient", + "shortName": "TranscoderServiceAsyncClient" + }, + "fullName": "google.cloud.video.transcoder_v1.TranscoderServiceAsyncClient.create_job_template", + "method": { + "fullName": "google.cloud.video.transcoder.v1.TranscoderService.CreateJobTemplate", + "service": { + "fullName": "google.cloud.video.transcoder.v1.TranscoderService", + "shortName": "TranscoderService" + }, + "shortName": "CreateJobTemplate" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.video.transcoder_v1.types.CreateJobTemplateRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "job_template", + "type": "google.cloud.video.transcoder_v1.types.JobTemplate" + }, + { + "name": "job_template_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.video.transcoder_v1.types.JobTemplate", + "shortName": "create_job_template" + }, + "description": "Sample for CreateJobTemplate", + "file": "transcoder_v1_generated_transcoder_service_create_job_template_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "transcoder_v1_generated_TranscoderService_CreateJobTemplate_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "transcoder_v1_generated_transcoder_service_create_job_template_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.video.transcoder_v1.TranscoderServiceClient", + "shortName": "TranscoderServiceClient" + }, + "fullName": "google.cloud.video.transcoder_v1.TranscoderServiceClient.create_job_template", + "method": { + "fullName": "google.cloud.video.transcoder.v1.TranscoderService.CreateJobTemplate", + "service": { + "fullName": "google.cloud.video.transcoder.v1.TranscoderService", + "shortName": "TranscoderService" + }, + "shortName": "CreateJobTemplate" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.video.transcoder_v1.types.CreateJobTemplateRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "job_template", + "type": "google.cloud.video.transcoder_v1.types.JobTemplate" + }, + { + "name": "job_template_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.video.transcoder_v1.types.JobTemplate", + "shortName": "create_job_template" + }, + "description": "Sample for CreateJobTemplate", + "file": "transcoder_v1_generated_transcoder_service_create_job_template_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "transcoder_v1_generated_TranscoderService_CreateJobTemplate_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 46, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 49, + "start": 47, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "transcoder_v1_generated_transcoder_service_create_job_template_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.video.transcoder_v1.TranscoderServiceAsyncClient", + "shortName": "TranscoderServiceAsyncClient" + }, + "fullName": "google.cloud.video.transcoder_v1.TranscoderServiceAsyncClient.create_job", + "method": { + "fullName": "google.cloud.video.transcoder.v1.TranscoderService.CreateJob", + "service": { + "fullName": "google.cloud.video.transcoder.v1.TranscoderService", + "shortName": "TranscoderService" + }, + "shortName": "CreateJob" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.video.transcoder_v1.types.CreateJobRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "job", + "type": "google.cloud.video.transcoder_v1.types.Job" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.video.transcoder_v1.types.Job", + "shortName": "create_job" + }, + "description": "Sample for CreateJob", + "file": "transcoder_v1_generated_transcoder_service_create_job_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "transcoder_v1_generated_TranscoderService_CreateJob_async", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 49, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 50, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "transcoder_v1_generated_transcoder_service_create_job_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.video.transcoder_v1.TranscoderServiceClient", + "shortName": "TranscoderServiceClient" + }, + "fullName": "google.cloud.video.transcoder_v1.TranscoderServiceClient.create_job", + "method": { + "fullName": "google.cloud.video.transcoder.v1.TranscoderService.CreateJob", + "service": { + "fullName": "google.cloud.video.transcoder.v1.TranscoderService", + "shortName": "TranscoderService" + }, + "shortName": "CreateJob" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.video.transcoder_v1.types.CreateJobRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "job", + "type": "google.cloud.video.transcoder_v1.types.Job" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.video.transcoder_v1.types.Job", + "shortName": "create_job" + }, + "description": "Sample for CreateJob", + "file": "transcoder_v1_generated_transcoder_service_create_job_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "transcoder_v1_generated_TranscoderService_CreateJob_sync", + "segments": [ + { + "end": 55, + "start": 27, + "type": "FULL" + }, + { + "end": 55, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 49, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 52, + "start": 50, + "type": "REQUEST_EXECUTION" + }, + { + "end": 56, + "start": 53, + "type": "RESPONSE_HANDLING" + } + ], + "title": "transcoder_v1_generated_transcoder_service_create_job_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.video.transcoder_v1.TranscoderServiceAsyncClient", + "shortName": "TranscoderServiceAsyncClient" + }, + "fullName": "google.cloud.video.transcoder_v1.TranscoderServiceAsyncClient.delete_job_template", + "method": { + "fullName": "google.cloud.video.transcoder.v1.TranscoderService.DeleteJobTemplate", + "service": { + "fullName": "google.cloud.video.transcoder.v1.TranscoderService", + "shortName": "TranscoderService" + }, + "shortName": "DeleteJobTemplate" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.video.transcoder_v1.types.DeleteJobTemplateRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "delete_job_template" + }, + "description": "Sample for DeleteJobTemplate", + "file": "transcoder_v1_generated_transcoder_service_delete_job_template_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "transcoder_v1_generated_TranscoderService_DeleteJobTemplate_async", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "transcoder_v1_generated_transcoder_service_delete_job_template_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.video.transcoder_v1.TranscoderServiceClient", + "shortName": "TranscoderServiceClient" + }, + "fullName": "google.cloud.video.transcoder_v1.TranscoderServiceClient.delete_job_template", + "method": { + "fullName": "google.cloud.video.transcoder.v1.TranscoderService.DeleteJobTemplate", + "service": { + "fullName": "google.cloud.video.transcoder.v1.TranscoderService", + "shortName": "TranscoderService" + }, + "shortName": "DeleteJobTemplate" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.video.transcoder_v1.types.DeleteJobTemplateRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "delete_job_template" + }, + "description": "Sample for DeleteJobTemplate", + "file": "transcoder_v1_generated_transcoder_service_delete_job_template_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "transcoder_v1_generated_TranscoderService_DeleteJobTemplate_sync", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "transcoder_v1_generated_transcoder_service_delete_job_template_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.video.transcoder_v1.TranscoderServiceAsyncClient", + "shortName": "TranscoderServiceAsyncClient" + }, + "fullName": "google.cloud.video.transcoder_v1.TranscoderServiceAsyncClient.delete_job", + "method": { + "fullName": "google.cloud.video.transcoder.v1.TranscoderService.DeleteJob", + "service": { + "fullName": "google.cloud.video.transcoder.v1.TranscoderService", + "shortName": "TranscoderService" + }, + "shortName": "DeleteJob" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.video.transcoder_v1.types.DeleteJobRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "delete_job" + }, + "description": "Sample for DeleteJob", + "file": "transcoder_v1_generated_transcoder_service_delete_job_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "transcoder_v1_generated_TranscoderService_DeleteJob_async", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "transcoder_v1_generated_transcoder_service_delete_job_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.video.transcoder_v1.TranscoderServiceClient", + "shortName": "TranscoderServiceClient" + }, + "fullName": "google.cloud.video.transcoder_v1.TranscoderServiceClient.delete_job", + "method": { + "fullName": "google.cloud.video.transcoder.v1.TranscoderService.DeleteJob", + "service": { + "fullName": "google.cloud.video.transcoder.v1.TranscoderService", + "shortName": "TranscoderService" + }, + "shortName": "DeleteJob" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.video.transcoder_v1.types.DeleteJobRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "delete_job" + }, + "description": "Sample for DeleteJob", + "file": "transcoder_v1_generated_transcoder_service_delete_job_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "transcoder_v1_generated_TranscoderService_DeleteJob_sync", + "segments": [ + { + "end": 49, + "start": 27, + "type": "FULL" + }, + { + "end": 49, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 50, + "type": "RESPONSE_HANDLING" + } + ], + "title": "transcoder_v1_generated_transcoder_service_delete_job_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.video.transcoder_v1.TranscoderServiceAsyncClient", + "shortName": "TranscoderServiceAsyncClient" + }, + "fullName": "google.cloud.video.transcoder_v1.TranscoderServiceAsyncClient.get_job_template", + "method": { + "fullName": "google.cloud.video.transcoder.v1.TranscoderService.GetJobTemplate", + "service": { + "fullName": "google.cloud.video.transcoder.v1.TranscoderService", + "shortName": "TranscoderService" + }, + "shortName": "GetJobTemplate" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.video.transcoder_v1.types.GetJobTemplateRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.video.transcoder_v1.types.JobTemplate", + "shortName": "get_job_template" + }, + "description": "Sample for GetJobTemplate", + "file": "transcoder_v1_generated_transcoder_service_get_job_template_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "transcoder_v1_generated_TranscoderService_GetJobTemplate_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "transcoder_v1_generated_transcoder_service_get_job_template_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.video.transcoder_v1.TranscoderServiceClient", + "shortName": "TranscoderServiceClient" + }, + "fullName": "google.cloud.video.transcoder_v1.TranscoderServiceClient.get_job_template", + "method": { + "fullName": "google.cloud.video.transcoder.v1.TranscoderService.GetJobTemplate", + "service": { + "fullName": "google.cloud.video.transcoder.v1.TranscoderService", + "shortName": "TranscoderService" + }, + "shortName": "GetJobTemplate" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.video.transcoder_v1.types.GetJobTemplateRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.video.transcoder_v1.types.JobTemplate", + "shortName": "get_job_template" + }, + "description": "Sample for GetJobTemplate", + "file": "transcoder_v1_generated_transcoder_service_get_job_template_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "transcoder_v1_generated_TranscoderService_GetJobTemplate_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "transcoder_v1_generated_transcoder_service_get_job_template_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.video.transcoder_v1.TranscoderServiceAsyncClient", + "shortName": "TranscoderServiceAsyncClient" + }, + "fullName": "google.cloud.video.transcoder_v1.TranscoderServiceAsyncClient.get_job", + "method": { + "fullName": "google.cloud.video.transcoder.v1.TranscoderService.GetJob", + "service": { + "fullName": "google.cloud.video.transcoder.v1.TranscoderService", + "shortName": "TranscoderService" + }, + "shortName": "GetJob" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.video.transcoder_v1.types.GetJobRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.video.transcoder_v1.types.Job", + "shortName": "get_job" + }, + "description": "Sample for GetJob", + "file": "transcoder_v1_generated_transcoder_service_get_job_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "transcoder_v1_generated_TranscoderService_GetJob_async", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "transcoder_v1_generated_transcoder_service_get_job_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.video.transcoder_v1.TranscoderServiceClient", + "shortName": "TranscoderServiceClient" + }, + "fullName": "google.cloud.video.transcoder_v1.TranscoderServiceClient.get_job", + "method": { + "fullName": "google.cloud.video.transcoder.v1.TranscoderService.GetJob", + "service": { + "fullName": "google.cloud.video.transcoder.v1.TranscoderService", + "shortName": "TranscoderService" + }, + "shortName": "GetJob" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.video.transcoder_v1.types.GetJobRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.video.transcoder_v1.types.Job", + "shortName": "get_job" + }, + "description": "Sample for GetJob", + "file": "transcoder_v1_generated_transcoder_service_get_job_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "transcoder_v1_generated_TranscoderService_GetJob_sync", + "segments": [ + { + "end": 51, + "start": 27, + "type": "FULL" + }, + { + "end": 51, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 52, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "transcoder_v1_generated_transcoder_service_get_job_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.video.transcoder_v1.TranscoderServiceAsyncClient", + "shortName": "TranscoderServiceAsyncClient" + }, + "fullName": "google.cloud.video.transcoder_v1.TranscoderServiceAsyncClient.list_job_templates", + "method": { + "fullName": "google.cloud.video.transcoder.v1.TranscoderService.ListJobTemplates", + "service": { + "fullName": "google.cloud.video.transcoder.v1.TranscoderService", + "shortName": "TranscoderService" + }, + "shortName": "ListJobTemplates" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.video.transcoder_v1.types.ListJobTemplatesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.video.transcoder_v1.services.transcoder_service.pagers.ListJobTemplatesAsyncPager", + "shortName": "list_job_templates" + }, + "description": "Sample for ListJobTemplates", + "file": "transcoder_v1_generated_transcoder_service_list_job_templates_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "transcoder_v1_generated_TranscoderService_ListJobTemplates_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "transcoder_v1_generated_transcoder_service_list_job_templates_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.video.transcoder_v1.TranscoderServiceClient", + "shortName": "TranscoderServiceClient" + }, + "fullName": "google.cloud.video.transcoder_v1.TranscoderServiceClient.list_job_templates", + "method": { + "fullName": "google.cloud.video.transcoder.v1.TranscoderService.ListJobTemplates", + "service": { + "fullName": "google.cloud.video.transcoder.v1.TranscoderService", + "shortName": "TranscoderService" + }, + "shortName": "ListJobTemplates" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.video.transcoder_v1.types.ListJobTemplatesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.video.transcoder_v1.services.transcoder_service.pagers.ListJobTemplatesPager", + "shortName": "list_job_templates" + }, + "description": "Sample for ListJobTemplates", + "file": "transcoder_v1_generated_transcoder_service_list_job_templates_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "transcoder_v1_generated_TranscoderService_ListJobTemplates_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "transcoder_v1_generated_transcoder_service_list_job_templates_sync.py" + }, + { + "canonical": true, + "clientMethod": { + "async": true, + "client": { + "fullName": "google.cloud.video.transcoder_v1.TranscoderServiceAsyncClient", + "shortName": "TranscoderServiceAsyncClient" + }, + "fullName": "google.cloud.video.transcoder_v1.TranscoderServiceAsyncClient.list_jobs", + "method": { + "fullName": "google.cloud.video.transcoder.v1.TranscoderService.ListJobs", + "service": { + "fullName": "google.cloud.video.transcoder.v1.TranscoderService", + "shortName": "TranscoderService" + }, + "shortName": "ListJobs" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.video.transcoder_v1.types.ListJobsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.video.transcoder_v1.services.transcoder_service.pagers.ListJobsAsyncPager", + "shortName": "list_jobs" + }, + "description": "Sample for ListJobs", + "file": "transcoder_v1_generated_transcoder_service_list_jobs_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "transcoder_v1_generated_TranscoderService_ListJobs_async", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "transcoder_v1_generated_transcoder_service_list_jobs_async.py" + }, + { + "canonical": true, + "clientMethod": { + "client": { + "fullName": "google.cloud.video.transcoder_v1.TranscoderServiceClient", + "shortName": "TranscoderServiceClient" + }, + "fullName": "google.cloud.video.transcoder_v1.TranscoderServiceClient.list_jobs", + "method": { + "fullName": "google.cloud.video.transcoder.v1.TranscoderService.ListJobs", + "service": { + "fullName": "google.cloud.video.transcoder.v1.TranscoderService", + "shortName": "TranscoderService" + }, + "shortName": "ListJobs" + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.video.transcoder_v1.types.ListJobsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.video.transcoder_v1.services.transcoder_service.pagers.ListJobsPager", + "shortName": "list_jobs" + }, + "description": "Sample for ListJobs", + "file": "transcoder_v1_generated_transcoder_service_list_jobs_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", + "regionTag": "transcoder_v1_generated_TranscoderService_ListJobs_sync", + "segments": [ + { + "end": 52, + "start": 27, + "type": "FULL" + }, + { + "end": 52, + "start": 27, + "type": "SHORT" + }, + { + "end": 40, + "start": 38, + "type": "CLIENT_INITIALIZATION" + }, + { + "end": 45, + "start": 41, + "type": "REQUEST_INITIALIZATION" + }, + { + "end": 48, + "start": 46, + "type": "REQUEST_EXECUTION" + }, + { + "end": 53, + "start": 49, + "type": "RESPONSE_HANDLING" + } + ], + "title": "transcoder_v1_generated_transcoder_service_list_jobs_sync.py" + } + ] +} diff --git a/owl-bot-staging/v1/samples/generated_samples/transcoder_v1_generated_transcoder_service_create_job_async.py b/owl-bot-staging/v1/samples/generated_samples/transcoder_v1_generated_transcoder_service_create_job_async.py new file mode 100644 index 0000000..521cfc2 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/transcoder_v1_generated_transcoder_service_create_job_async.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateJob +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-video-transcoder + + +# [START transcoder_v1_generated_TranscoderService_CreateJob_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud.video import transcoder_v1 + + +async def sample_create_job(): + # Create a client + client = transcoder_v1.TranscoderServiceAsyncClient() + + # Initialize request argument(s) + job = transcoder_v1.Job() + job.template_id = "template_id_value" + + request = transcoder_v1.CreateJobRequest( + parent="parent_value", + job=job, + ) + + # Make the request + response = await client.create_job(request=request) + + # Handle the response + print(response) + +# [END transcoder_v1_generated_TranscoderService_CreateJob_async] diff --git a/owl-bot-staging/v1/samples/generated_samples/transcoder_v1_generated_transcoder_service_create_job_sync.py b/owl-bot-staging/v1/samples/generated_samples/transcoder_v1_generated_transcoder_service_create_job_sync.py new file mode 100644 index 0000000..739f5e3 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/transcoder_v1_generated_transcoder_service_create_job_sync.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateJob +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-video-transcoder + + +# [START transcoder_v1_generated_TranscoderService_CreateJob_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud.video import transcoder_v1 + + +def sample_create_job(): + # Create a client + client = transcoder_v1.TranscoderServiceClient() + + # Initialize request argument(s) + job = transcoder_v1.Job() + job.template_id = "template_id_value" + + request = transcoder_v1.CreateJobRequest( + parent="parent_value", + job=job, + ) + + # Make the request + response = client.create_job(request=request) + + # Handle the response + print(response) + +# [END transcoder_v1_generated_TranscoderService_CreateJob_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/transcoder_v1_generated_transcoder_service_create_job_template_async.py b/owl-bot-staging/v1/samples/generated_samples/transcoder_v1_generated_transcoder_service_create_job_template_async.py new file mode 100644 index 0000000..609d3ae --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/transcoder_v1_generated_transcoder_service_create_job_template_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateJobTemplate +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-video-transcoder + + +# [START transcoder_v1_generated_TranscoderService_CreateJobTemplate_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud.video import transcoder_v1 + + +async def sample_create_job_template(): + # Create a client + client = transcoder_v1.TranscoderServiceAsyncClient() + + # Initialize request argument(s) + request = transcoder_v1.CreateJobTemplateRequest( + parent="parent_value", + job_template_id="job_template_id_value", + ) + + # Make the request + response = await client.create_job_template(request=request) + + # Handle the response + print(response) + +# [END transcoder_v1_generated_TranscoderService_CreateJobTemplate_async] diff --git a/owl-bot-staging/v1/samples/generated_samples/transcoder_v1_generated_transcoder_service_create_job_template_sync.py b/owl-bot-staging/v1/samples/generated_samples/transcoder_v1_generated_transcoder_service_create_job_template_sync.py new file mode 100644 index 0000000..8c47c3c --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/transcoder_v1_generated_transcoder_service_create_job_template_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for CreateJobTemplate +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-video-transcoder + + +# [START transcoder_v1_generated_TranscoderService_CreateJobTemplate_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud.video import transcoder_v1 + + +def sample_create_job_template(): + # Create a client + client = transcoder_v1.TranscoderServiceClient() + + # Initialize request argument(s) + request = transcoder_v1.CreateJobTemplateRequest( + parent="parent_value", + job_template_id="job_template_id_value", + ) + + # Make the request + response = client.create_job_template(request=request) + + # Handle the response + print(response) + +# [END transcoder_v1_generated_TranscoderService_CreateJobTemplate_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/transcoder_v1_generated_transcoder_service_delete_job_async.py b/owl-bot-staging/v1/samples/generated_samples/transcoder_v1_generated_transcoder_service_delete_job_async.py new file mode 100644 index 0000000..60c9709 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/transcoder_v1_generated_transcoder_service_delete_job_async.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteJob +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-video-transcoder + + +# [START transcoder_v1_generated_TranscoderService_DeleteJob_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud.video import transcoder_v1 + + +async def sample_delete_job(): + # Create a client + client = transcoder_v1.TranscoderServiceAsyncClient() + + # Initialize request argument(s) + request = transcoder_v1.DeleteJobRequest( + name="name_value", + ) + + # Make the request + await client.delete_job(request=request) + + +# [END transcoder_v1_generated_TranscoderService_DeleteJob_async] diff --git a/owl-bot-staging/v1/samples/generated_samples/transcoder_v1_generated_transcoder_service_delete_job_sync.py b/owl-bot-staging/v1/samples/generated_samples/transcoder_v1_generated_transcoder_service_delete_job_sync.py new file mode 100644 index 0000000..5735b2e --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/transcoder_v1_generated_transcoder_service_delete_job_sync.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteJob +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-video-transcoder + + +# [START transcoder_v1_generated_TranscoderService_DeleteJob_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud.video import transcoder_v1 + + +def sample_delete_job(): + # Create a client + client = transcoder_v1.TranscoderServiceClient() + + # Initialize request argument(s) + request = transcoder_v1.DeleteJobRequest( + name="name_value", + ) + + # Make the request + client.delete_job(request=request) + + +# [END transcoder_v1_generated_TranscoderService_DeleteJob_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/transcoder_v1_generated_transcoder_service_delete_job_template_async.py b/owl-bot-staging/v1/samples/generated_samples/transcoder_v1_generated_transcoder_service_delete_job_template_async.py new file mode 100644 index 0000000..1168d4b --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/transcoder_v1_generated_transcoder_service_delete_job_template_async.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteJobTemplate +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-video-transcoder + + +# [START transcoder_v1_generated_TranscoderService_DeleteJobTemplate_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud.video import transcoder_v1 + + +async def sample_delete_job_template(): + # Create a client + client = transcoder_v1.TranscoderServiceAsyncClient() + + # Initialize request argument(s) + request = transcoder_v1.DeleteJobTemplateRequest( + name="name_value", + ) + + # Make the request + await client.delete_job_template(request=request) + + +# [END transcoder_v1_generated_TranscoderService_DeleteJobTemplate_async] diff --git a/owl-bot-staging/v1/samples/generated_samples/transcoder_v1_generated_transcoder_service_delete_job_template_sync.py b/owl-bot-staging/v1/samples/generated_samples/transcoder_v1_generated_transcoder_service_delete_job_template_sync.py new file mode 100644 index 0000000..2a9924e --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/transcoder_v1_generated_transcoder_service_delete_job_template_sync.py @@ -0,0 +1,50 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for DeleteJobTemplate +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-video-transcoder + + +# [START transcoder_v1_generated_TranscoderService_DeleteJobTemplate_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud.video import transcoder_v1 + + +def sample_delete_job_template(): + # Create a client + client = transcoder_v1.TranscoderServiceClient() + + # Initialize request argument(s) + request = transcoder_v1.DeleteJobTemplateRequest( + name="name_value", + ) + + # Make the request + client.delete_job_template(request=request) + + +# [END transcoder_v1_generated_TranscoderService_DeleteJobTemplate_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/transcoder_v1_generated_transcoder_service_get_job_async.py b/owl-bot-staging/v1/samples/generated_samples/transcoder_v1_generated_transcoder_service_get_job_async.py new file mode 100644 index 0000000..41e2bb5 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/transcoder_v1_generated_transcoder_service_get_job_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetJob +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-video-transcoder + + +# [START transcoder_v1_generated_TranscoderService_GetJob_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud.video import transcoder_v1 + + +async def sample_get_job(): + # Create a client + client = transcoder_v1.TranscoderServiceAsyncClient() + + # Initialize request argument(s) + request = transcoder_v1.GetJobRequest( + name="name_value", + ) + + # Make the request + response = await client.get_job(request=request) + + # Handle the response + print(response) + +# [END transcoder_v1_generated_TranscoderService_GetJob_async] diff --git a/owl-bot-staging/v1/samples/generated_samples/transcoder_v1_generated_transcoder_service_get_job_sync.py b/owl-bot-staging/v1/samples/generated_samples/transcoder_v1_generated_transcoder_service_get_job_sync.py new file mode 100644 index 0000000..dbfed87 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/transcoder_v1_generated_transcoder_service_get_job_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetJob +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-video-transcoder + + +# [START transcoder_v1_generated_TranscoderService_GetJob_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud.video import transcoder_v1 + + +def sample_get_job(): + # Create a client + client = transcoder_v1.TranscoderServiceClient() + + # Initialize request argument(s) + request = transcoder_v1.GetJobRequest( + name="name_value", + ) + + # Make the request + response = client.get_job(request=request) + + # Handle the response + print(response) + +# [END transcoder_v1_generated_TranscoderService_GetJob_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/transcoder_v1_generated_transcoder_service_get_job_template_async.py b/owl-bot-staging/v1/samples/generated_samples/transcoder_v1_generated_transcoder_service_get_job_template_async.py new file mode 100644 index 0000000..4360a61 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/transcoder_v1_generated_transcoder_service_get_job_template_async.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetJobTemplate +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-video-transcoder + + +# [START transcoder_v1_generated_TranscoderService_GetJobTemplate_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud.video import transcoder_v1 + + +async def sample_get_job_template(): + # Create a client + client = transcoder_v1.TranscoderServiceAsyncClient() + + # Initialize request argument(s) + request = transcoder_v1.GetJobTemplateRequest( + name="name_value", + ) + + # Make the request + response = await client.get_job_template(request=request) + + # Handle the response + print(response) + +# [END transcoder_v1_generated_TranscoderService_GetJobTemplate_async] diff --git a/owl-bot-staging/v1/samples/generated_samples/transcoder_v1_generated_transcoder_service_get_job_template_sync.py b/owl-bot-staging/v1/samples/generated_samples/transcoder_v1_generated_transcoder_service_get_job_template_sync.py new file mode 100644 index 0000000..242fd23 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/transcoder_v1_generated_transcoder_service_get_job_template_sync.py @@ -0,0 +1,52 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for GetJobTemplate +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-video-transcoder + + +# [START transcoder_v1_generated_TranscoderService_GetJobTemplate_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud.video import transcoder_v1 + + +def sample_get_job_template(): + # Create a client + client = transcoder_v1.TranscoderServiceClient() + + # Initialize request argument(s) + request = transcoder_v1.GetJobTemplateRequest( + name="name_value", + ) + + # Make the request + response = client.get_job_template(request=request) + + # Handle the response + print(response) + +# [END transcoder_v1_generated_TranscoderService_GetJobTemplate_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/transcoder_v1_generated_transcoder_service_list_job_templates_async.py b/owl-bot-staging/v1/samples/generated_samples/transcoder_v1_generated_transcoder_service_list_job_templates_async.py new file mode 100644 index 0000000..f1621bb --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/transcoder_v1_generated_transcoder_service_list_job_templates_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListJobTemplates +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-video-transcoder + + +# [START transcoder_v1_generated_TranscoderService_ListJobTemplates_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud.video import transcoder_v1 + + +async def sample_list_job_templates(): + # Create a client + client = transcoder_v1.TranscoderServiceAsyncClient() + + # Initialize request argument(s) + request = transcoder_v1.ListJobTemplatesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_job_templates(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END transcoder_v1_generated_TranscoderService_ListJobTemplates_async] diff --git a/owl-bot-staging/v1/samples/generated_samples/transcoder_v1_generated_transcoder_service_list_job_templates_sync.py b/owl-bot-staging/v1/samples/generated_samples/transcoder_v1_generated_transcoder_service_list_job_templates_sync.py new file mode 100644 index 0000000..6a1b7d5 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/transcoder_v1_generated_transcoder_service_list_job_templates_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListJobTemplates +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-video-transcoder + + +# [START transcoder_v1_generated_TranscoderService_ListJobTemplates_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud.video import transcoder_v1 + + +def sample_list_job_templates(): + # Create a client + client = transcoder_v1.TranscoderServiceClient() + + # Initialize request argument(s) + request = transcoder_v1.ListJobTemplatesRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_job_templates(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END transcoder_v1_generated_TranscoderService_ListJobTemplates_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/transcoder_v1_generated_transcoder_service_list_jobs_async.py b/owl-bot-staging/v1/samples/generated_samples/transcoder_v1_generated_transcoder_service_list_jobs_async.py new file mode 100644 index 0000000..a559323 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/transcoder_v1_generated_transcoder_service_list_jobs_async.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListJobs +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-video-transcoder + + +# [START transcoder_v1_generated_TranscoderService_ListJobs_async] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud.video import transcoder_v1 + + +async def sample_list_jobs(): + # Create a client + client = transcoder_v1.TranscoderServiceAsyncClient() + + # Initialize request argument(s) + request = transcoder_v1.ListJobsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_jobs(request=request) + + # Handle the response + async for response in page_result: + print(response) + +# [END transcoder_v1_generated_TranscoderService_ListJobs_async] diff --git a/owl-bot-staging/v1/samples/generated_samples/transcoder_v1_generated_transcoder_service_list_jobs_sync.py b/owl-bot-staging/v1/samples/generated_samples/transcoder_v1_generated_transcoder_service_list_jobs_sync.py new file mode 100644 index 0000000..badb770 --- /dev/null +++ b/owl-bot-staging/v1/samples/generated_samples/transcoder_v1_generated_transcoder_service_list_jobs_sync.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Generated code. DO NOT EDIT! +# +# Snippet for ListJobs +# NOTE: This snippet has been automatically generated for illustrative purposes only. +# It may require modifications to work in your environment. + +# To install the latest published package dependency, execute the following: +# python3 -m pip install google-cloud-video-transcoder + + +# [START transcoder_v1_generated_TranscoderService_ListJobs_sync] +# This snippet has been automatically generated and should be regarded as a +# code template only. +# It will require modifications to work: +# - It may require correct/in-range values for request initialization. +# - It may require specifying regional endpoints when creating the service +# client as shown in: +# https://googleapis.dev/python/google-api-core/latest/client_options.html +from google.cloud.video import transcoder_v1 + + +def sample_list_jobs(): + # Create a client + client = transcoder_v1.TranscoderServiceClient() + + # Initialize request argument(s) + request = transcoder_v1.ListJobsRequest( + parent="parent_value", + ) + + # Make the request + page_result = client.list_jobs(request=request) + + # Handle the response + for response in page_result: + print(response) + +# [END transcoder_v1_generated_TranscoderService_ListJobs_sync] diff --git a/owl-bot-staging/v1/scripts/fixup_transcoder_v1_keywords.py b/owl-bot-staging/v1/scripts/fixup_transcoder_v1_keywords.py new file mode 100644 index 0000000..667e582 --- /dev/null +++ b/owl-bot-staging/v1/scripts/fixup_transcoder_v1_keywords.py @@ -0,0 +1,183 @@ +#! /usr/bin/env python3 +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import argparse +import os +import libcst as cst +import pathlib +import sys +from typing import (Any, Callable, Dict, List, Sequence, Tuple) + + +def partition( + predicate: Callable[[Any], bool], + iterator: Sequence[Any] +) -> Tuple[List[Any], List[Any]]: + """A stable, out-of-place partition.""" + results = ([], []) + + for i in iterator: + results[int(predicate(i))].append(i) + + # Returns trueList, falseList + return results[1], results[0] + + +class transcoderCallTransformer(cst.CSTTransformer): + CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') + METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { + 'create_job': ('parent', 'job', ), + 'create_job_template': ('parent', 'job_template', 'job_template_id', ), + 'delete_job': ('name', 'allow_missing', ), + 'delete_job_template': ('name', 'allow_missing', ), + 'get_job': ('name', ), + 'get_job_template': ('name', ), + 'list_jobs': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), + 'list_job_templates': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), + } + + def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: + try: + key = original.func.attr.value + kword_params = self.METHOD_TO_PARAMS[key] + except (AttributeError, KeyError): + # Either not a method from the API or too convoluted to be sure. + return updated + + # If the existing code is valid, keyword args come after positional args. + # Therefore, all positional args must map to the first parameters. + args, kwargs = partition(lambda a: not bool(a.keyword), updated.args) + if any(k.keyword.value == "request" for k in kwargs): + # We've already fixed this file, don't fix it again. + return updated + + kwargs, ctrl_kwargs = partition( + lambda a: a.keyword.value not in self.CTRL_PARAMS, + kwargs + ) + + args, ctrl_args = args[:len(kword_params)], args[len(kword_params):] + ctrl_kwargs.extend(cst.Arg(value=a.value, keyword=cst.Name(value=ctrl)) + for a, ctrl in zip(ctrl_args, self.CTRL_PARAMS)) + + request_arg = cst.Arg( + value=cst.Dict([ + cst.DictElement( + cst.SimpleString("'{}'".format(name)), +cst.Element(value=arg.value) + ) + # Note: the args + kwargs looks silly, but keep in mind that + # the control parameters had to be stripped out, and that + # those could have been passed positionally or by keyword. + for name, arg in zip(kword_params, args + kwargs)]), + keyword=cst.Name("request") + ) + + return updated.with_changes( + args=[request_arg] + ctrl_kwargs + ) + + +def fix_files( + in_dir: pathlib.Path, + out_dir: pathlib.Path, + *, + transformer=transcoderCallTransformer(), +): + """Duplicate the input dir to the output dir, fixing file method calls. + + Preconditions: + * in_dir is a real directory + * out_dir is a real, empty directory + """ + pyfile_gen = ( + pathlib.Path(os.path.join(root, f)) + for root, _, files in os.walk(in_dir) + for f in files if os.path.splitext(f)[1] == ".py" + ) + + for fpath in pyfile_gen: + with open(fpath, 'r') as f: + src = f.read() + + # Parse the code and insert method call fixes. + tree = cst.parse_module(src) + updated = tree.visit(transformer) + + # Create the path and directory structure for the new file. + updated_path = out_dir.joinpath(fpath.relative_to(in_dir)) + updated_path.parent.mkdir(parents=True, exist_ok=True) + + # Generate the updated source file at the corresponding path. + with open(updated_path, 'w') as f: + f.write(updated.code) + + +if __name__ == '__main__': + parser = argparse.ArgumentParser( + description="""Fix up source that uses the transcoder client library. + +The existing sources are NOT overwritten but are copied to output_dir with changes made. + +Note: This tool operates at a best-effort level at converting positional + parameters in client method calls to keyword based parameters. + Cases where it WILL FAIL include + A) * or ** expansion in a method call. + B) Calls via function or method alias (includes free function calls) + C) Indirect or dispatched calls (e.g. the method is looked up dynamically) + + These all constitute false negatives. The tool will also detect false + positives when an API method shares a name with another method. +""") + parser.add_argument( + '-d', + '--input-directory', + required=True, + dest='input_dir', + help='the input directory to walk for python files to fix up', + ) + parser.add_argument( + '-o', + '--output-directory', + required=True, + dest='output_dir', + help='the directory to output files fixed via un-flattening', + ) + args = parser.parse_args() + input_dir = pathlib.Path(args.input_dir) + output_dir = pathlib.Path(args.output_dir) + if not input_dir.is_dir(): + print( + f"input directory '{input_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if not output_dir.is_dir(): + print( + f"output directory '{output_dir}' does not exist or is not a directory", + file=sys.stderr, + ) + sys.exit(-1) + + if os.listdir(output_dir): + print( + f"output directory '{output_dir}' is not empty", + file=sys.stderr, + ) + sys.exit(-1) + + fix_files(input_dir, output_dir) diff --git a/owl-bot-staging/v1/setup.py b/owl-bot-staging/v1/setup.py new file mode 100644 index 0000000..b2a3a81 --- /dev/null +++ b/owl-bot-staging/v1/setup.py @@ -0,0 +1,92 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import io +import os + +import setuptools # type: ignore + +package_root = os.path.abspath(os.path.dirname(__file__)) + +name = 'google-cloud-video-transcoder' + + +description = "Google Cloud Video Transcoder API client library" + +version = {} +with open(os.path.join(package_root, 'google/cloud/video/transcoder/gapic_version.py')) as fp: + exec(fp.read(), version) +version = version["__version__"] + +if version[0] == "0": + release_status = "Development Status :: 4 - Beta" +else: + release_status = "Development Status :: 5 - Production/Stable" + +dependencies = [ + "google-api-core[grpc] >= 1.34.0, <3.0.0dev,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,!=2.10.*", + "proto-plus >= 1.22.0, <2.0.0dev", + "proto-plus >= 1.22.2, <2.0.0dev; python_version>='3.11'", + "protobuf>=3.19.5,<5.0.0dev,!=3.20.0,!=3.20.1,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", +] +url = "https://github.com/googleapis/python-video-transcoder" + +package_root = os.path.abspath(os.path.dirname(__file__)) + +readme_filename = os.path.join(package_root, "README.rst") +with io.open(readme_filename, encoding="utf-8") as readme_file: + readme = readme_file.read() + +packages = [ + package + for package in setuptools.PEP420PackageFinder.find() + if package.startswith("google") +] + +namespaces = ["google"] +if "google.cloud" in packages: + namespaces.append("google.cloud") + +setuptools.setup( + name=name, + version=version, + description=description, + long_description=readme, + author="Google LLC", + author_email="googleapis-packages@google.com", + license="Apache 2.0", + url=url, + classifiers=[ + release_status, + "Intended Audience :: Developers", + "License :: OSI Approved :: Apache Software License", + "Programming Language :: Python", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.7", + "Programming Language :: Python :: 3.8", + "Programming Language :: Python :: 3.9", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", + "Operating System :: OS Independent", + "Topic :: Internet", + ], + platforms="Posix; MacOS X; Windows", + packages=packages, + python_requires=">=3.7", + namespace_packages=namespaces, + install_requires=dependencies, + include_package_data=True, + zip_safe=False, +) diff --git a/owl-bot-staging/v1/testing/constraints-3.10.txt b/owl-bot-staging/v1/testing/constraints-3.10.txt new file mode 100644 index 0000000..ed7f9ae --- /dev/null +++ b/owl-bot-staging/v1/testing/constraints-3.10.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/owl-bot-staging/v1/testing/constraints-3.11.txt b/owl-bot-staging/v1/testing/constraints-3.11.txt new file mode 100644 index 0000000..ed7f9ae --- /dev/null +++ b/owl-bot-staging/v1/testing/constraints-3.11.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/owl-bot-staging/v1/testing/constraints-3.12.txt b/owl-bot-staging/v1/testing/constraints-3.12.txt new file mode 100644 index 0000000..ed7f9ae --- /dev/null +++ b/owl-bot-staging/v1/testing/constraints-3.12.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/owl-bot-staging/v1/testing/constraints-3.7.txt b/owl-bot-staging/v1/testing/constraints-3.7.txt new file mode 100644 index 0000000..6c44adf --- /dev/null +++ b/owl-bot-staging/v1/testing/constraints-3.7.txt @@ -0,0 +1,9 @@ +# This constraints file is used to check that lower bounds +# are correct in setup.py +# List all library dependencies and extras in this file. +# Pin the version to the lower bound. +# e.g., if setup.py has "google-cloud-foo >= 1.14.0, < 2.0.0dev", +# Then this file should have google-cloud-foo==1.14.0 +google-api-core==1.34.0 +proto-plus==1.22.0 +protobuf==3.19.5 diff --git a/owl-bot-staging/v1/testing/constraints-3.8.txt b/owl-bot-staging/v1/testing/constraints-3.8.txt new file mode 100644 index 0000000..ed7f9ae --- /dev/null +++ b/owl-bot-staging/v1/testing/constraints-3.8.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/owl-bot-staging/v1/testing/constraints-3.9.txt b/owl-bot-staging/v1/testing/constraints-3.9.txt new file mode 100644 index 0000000..ed7f9ae --- /dev/null +++ b/owl-bot-staging/v1/testing/constraints-3.9.txt @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# This constraints file is required for unit tests. +# List all library dependencies and extras in this file. +google-api-core +proto-plus +protobuf diff --git a/owl-bot-staging/v1/tests/__init__.py b/owl-bot-staging/v1/tests/__init__.py new file mode 100644 index 0000000..231bc12 --- /dev/null +++ b/owl-bot-staging/v1/tests/__init__.py @@ -0,0 +1,16 @@ + +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/owl-bot-staging/v1/tests/unit/__init__.py b/owl-bot-staging/v1/tests/unit/__init__.py new file mode 100644 index 0000000..231bc12 --- /dev/null +++ b/owl-bot-staging/v1/tests/unit/__init__.py @@ -0,0 +1,16 @@ + +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/owl-bot-staging/v1/tests/unit/gapic/__init__.py b/owl-bot-staging/v1/tests/unit/gapic/__init__.py new file mode 100644 index 0000000..231bc12 --- /dev/null +++ b/owl-bot-staging/v1/tests/unit/gapic/__init__.py @@ -0,0 +1,16 @@ + +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/owl-bot-staging/v1/tests/unit/gapic/transcoder_v1/__init__.py b/owl-bot-staging/v1/tests/unit/gapic/transcoder_v1/__init__.py new file mode 100644 index 0000000..231bc12 --- /dev/null +++ b/owl-bot-staging/v1/tests/unit/gapic/transcoder_v1/__init__.py @@ -0,0 +1,16 @@ + +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# diff --git a/owl-bot-staging/v1/tests/unit/gapic/transcoder_v1/test_transcoder_service.py b/owl-bot-staging/v1/tests/unit/gapic/transcoder_v1/test_transcoder_service.py new file mode 100644 index 0000000..e714ff7 --- /dev/null +++ b/owl-bot-staging/v1/tests/unit/gapic/transcoder_v1/test_transcoder_service.py @@ -0,0 +1,3484 @@ +# -*- coding: utf-8 -*- +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +import os +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock # pragma: NO COVER +except ImportError: # pragma: NO COVER + import mock + +import grpc +from grpc.experimental import aio +import math +import pytest +from proto.marshal.rules.dates import DurationRule, TimestampRule +from proto.marshal.rules import wrappers + +from google.api_core import client_options +from google.api_core import exceptions as core_exceptions +from google.api_core import gapic_v1 +from google.api_core import grpc_helpers +from google.api_core import grpc_helpers_async +from google.api_core import path_template +from google.auth import credentials as ga_credentials +from google.auth.exceptions import MutualTLSChannelError +from google.cloud.video.transcoder_v1.services.transcoder_service import TranscoderServiceAsyncClient +from google.cloud.video.transcoder_v1.services.transcoder_service import TranscoderServiceClient +from google.cloud.video.transcoder_v1.services.transcoder_service import pagers +from google.cloud.video.transcoder_v1.services.transcoder_service import transports +from google.cloud.video.transcoder_v1.types import resources +from google.cloud.video.transcoder_v1.types import services +from google.oauth2 import service_account +from google.protobuf import any_pb2 # type: ignore +from google.protobuf import duration_pb2 # type: ignore +from google.protobuf import timestamp_pb2 # type: ignore +from google.rpc import status_pb2 # type: ignore +import google.auth + + +def client_cert_source_callback(): + return b"cert bytes", b"key bytes" + + +# If default endpoint is localhost, then default mtls endpoint will be the same. +# This method modifies the default endpoint so the client can produce a different +# mtls endpoint for endpoint testing purposes. +def modify_default_endpoint(client): + return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT + + +def test__get_default_mtls_endpoint(): + api_endpoint = "example.googleapis.com" + api_mtls_endpoint = "example.mtls.googleapis.com" + sandbox_endpoint = "example.sandbox.googleapis.com" + sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" + non_googleapi = "api.example.com" + + assert TranscoderServiceClient._get_default_mtls_endpoint(None) is None + assert TranscoderServiceClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint + assert TranscoderServiceClient._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint + assert TranscoderServiceClient._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint + assert TranscoderServiceClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint + assert TranscoderServiceClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi + + +@pytest.mark.parametrize("client_class,transport_name", [ + (TranscoderServiceClient, "grpc"), + (TranscoderServiceAsyncClient, "grpc_asyncio"), +]) +def test_transcoder_service_client_from_service_account_info(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: + factory.return_value = creds + info = {"valid": True} + client = client_class.from_service_account_info(info, transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + 'transcoder.googleapis.com:443' + ) + + +@pytest.mark.parametrize("transport_class,transport_name", [ + (transports.TranscoderServiceGrpcTransport, "grpc"), + (transports.TranscoderServiceGrpcAsyncIOTransport, "grpc_asyncio"), +]) +def test_transcoder_service_client_service_account_always_use_jwt(transport_class, transport_name): + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=True) + use_jwt.assert_called_once_with(True) + + with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: + creds = service_account.Credentials(None, None, None) + transport = transport_class(credentials=creds, always_use_jwt_access=False) + use_jwt.assert_not_called() + + +@pytest.mark.parametrize("client_class,transport_name", [ + (TranscoderServiceClient, "grpc"), + (TranscoderServiceAsyncClient, "grpc_asyncio"), +]) +def test_transcoder_service_client_from_service_account_file(client_class, transport_name): + creds = ga_credentials.AnonymousCredentials() + with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: + factory.return_value = creds + client = client_class.from_service_account_file("dummy/file/path.json", transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + client = client_class.from_service_account_json("dummy/file/path.json", transport=transport_name) + assert client.transport._credentials == creds + assert isinstance(client, client_class) + + assert client.transport._host == ( + 'transcoder.googleapis.com:443' + ) + + +def test_transcoder_service_client_get_transport_class(): + transport = TranscoderServiceClient.get_transport_class() + available_transports = [ + transports.TranscoderServiceGrpcTransport, + ] + assert transport in available_transports + + transport = TranscoderServiceClient.get_transport_class("grpc") + assert transport == transports.TranscoderServiceGrpcTransport + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (TranscoderServiceClient, transports.TranscoderServiceGrpcTransport, "grpc"), + (TranscoderServiceAsyncClient, transports.TranscoderServiceGrpcAsyncIOTransport, "grpc_asyncio"), +]) +@mock.patch.object(TranscoderServiceClient, "DEFAULT_ENDPOINT", modify_default_endpoint(TranscoderServiceClient)) +@mock.patch.object(TranscoderServiceAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(TranscoderServiceAsyncClient)) +def test_transcoder_service_client_client_options(client_class, transport_class, transport_name): + # Check that if channel is provided we won't create a new one. + with mock.patch.object(TranscoderServiceClient, 'get_transport_class') as gtc: + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials() + ) + client = client_class(transport=transport) + gtc.assert_not_called() + + # Check that if channel is provided via str we will create a new one. + with mock.patch.object(TranscoderServiceClient, 'get_transport_class') as gtc: + client = client_class(transport=transport_name) + gtc.assert_called() + + # Check the case api_endpoint is provided. + options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name, client_options=options) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is + # "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_MTLS_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has + # unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): + with pytest.raises(MutualTLSChannelError): + client = client_class(transport=transport_name) + + # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): + with pytest.raises(ValueError): + client = client_class(transport=transport_name) + + # Check the case quota_project_id is provided + options = client_options.ClientOptions(quota_project_id="octopus") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id="octopus", + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + # Check the case api_endpoint is provided + options = client_options.ClientOptions(api_audience="https://language.googleapis.com") + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience="https://language.googleapis.com" + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ + (TranscoderServiceClient, transports.TranscoderServiceGrpcTransport, "grpc", "true"), + (TranscoderServiceAsyncClient, transports.TranscoderServiceGrpcAsyncIOTransport, "grpc_asyncio", "true"), + (TranscoderServiceClient, transports.TranscoderServiceGrpcTransport, "grpc", "false"), + (TranscoderServiceAsyncClient, transports.TranscoderServiceGrpcAsyncIOTransport, "grpc_asyncio", "false"), +]) +@mock.patch.object(TranscoderServiceClient, "DEFAULT_ENDPOINT", modify_default_endpoint(TranscoderServiceClient)) +@mock.patch.object(TranscoderServiceAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(TranscoderServiceAsyncClient)) +@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) +def test_transcoder_service_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env): + # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default + # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. + + # Check the case client_cert_source is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + + if use_client_cert_env == "false": + expected_client_cert_source = None + expected_host = client.DEFAULT_ENDPOINT + else: + expected_client_cert_source = client_cert_source_callback + expected_host = client.DEFAULT_MTLS_ENDPOINT + + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case ADC client cert is provided. Whether client cert is used depends on + # GOOGLE_API_USE_CLIENT_CERTIFICATE value. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback): + if use_client_cert_env == "false": + expected_host = client.DEFAULT_ENDPOINT + expected_client_cert_source = None + else: + expected_host = client.DEFAULT_MTLS_ENDPOINT + expected_client_cert_source = client_cert_source_callback + + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=expected_host, + scopes=None, + client_cert_source_for_mtls=expected_client_cert_source, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # Check the case client_cert_source and ADC client cert are not provided. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): + with mock.patch.object(transport_class, '__init__') as patched: + with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False): + patched.return_value = None + client = client_class(transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("client_class", [ + TranscoderServiceClient, TranscoderServiceAsyncClient +]) +@mock.patch.object(TranscoderServiceClient, "DEFAULT_ENDPOINT", modify_default_endpoint(TranscoderServiceClient)) +@mock.patch.object(TranscoderServiceAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(TranscoderServiceAsyncClient)) +def test_transcoder_service_client_get_mtls_endpoint_and_cert_source(client_class): + mock_client_cert_source = mock.Mock() + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source == mock_client_cert_source + + # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): + mock_client_cert_source = mock.Mock() + mock_api_endpoint = "foo" + options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) + assert api_endpoint == mock_api_endpoint + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=False): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_ENDPOINT + assert cert_source is None + + # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. + with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): + with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): + with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_client_cert_source): + api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() + assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT + assert cert_source == mock_client_cert_source + + +@pytest.mark.parametrize("client_class,transport_class,transport_name", [ + (TranscoderServiceClient, transports.TranscoderServiceGrpcTransport, "grpc"), + (TranscoderServiceAsyncClient, transports.TranscoderServiceGrpcAsyncIOTransport, "grpc_asyncio"), +]) +def test_transcoder_service_client_client_options_scopes(client_class, transport_class, transport_name): + # Check the case scopes are provided. + options = client_options.ClientOptions( + scopes=["1", "2"], + ) + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=["1", "2"], + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + +@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ + (TranscoderServiceClient, transports.TranscoderServiceGrpcTransport, "grpc", grpc_helpers), + (TranscoderServiceAsyncClient, transports.TranscoderServiceGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), +]) +def test_transcoder_service_client_client_options_credentials_file(client_class, transport_class, transport_name, grpc_helpers): + # Check the case credentials file is provided. + options = client_options.ClientOptions( + credentials_file="credentials.json" + ) + + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + +def test_transcoder_service_client_client_options_from_dict(): + with mock.patch('google.cloud.video.transcoder_v1.services.transcoder_service.transports.TranscoderServiceGrpcTransport.__init__') as grpc_transport: + grpc_transport.return_value = None + client = TranscoderServiceClient( + client_options={'api_endpoint': 'squid.clam.whelk'} + ) + grpc_transport.assert_called_once_with( + credentials=None, + credentials_file=None, + host="squid.clam.whelk", + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + +@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ + (TranscoderServiceClient, transports.TranscoderServiceGrpcTransport, "grpc", grpc_helpers), + (TranscoderServiceAsyncClient, transports.TranscoderServiceGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), +]) +def test_transcoder_service_client_create_channel_credentials_file(client_class, transport_class, transport_name, grpc_helpers): + # Check the case credentials file is provided. + options = client_options.ClientOptions( + credentials_file="credentials.json" + ) + + with mock.patch.object(transport_class, '__init__') as patched: + patched.return_value = None + client = client_class(client_options=options, transport=transport_name) + patched.assert_called_once_with( + credentials=None, + credentials_file="credentials.json", + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) + + # test that the credentials from file are saved and used as the credentials. + with mock.patch.object( + google.auth, "load_credentials_from_file", autospec=True + ) as load_creds, mock.patch.object( + google.auth, "default", autospec=True + ) as adc, mock.patch.object( + grpc_helpers, "create_channel" + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + file_creds = ga_credentials.AnonymousCredentials() + load_creds.return_value = (file_creds, None) + adc.return_value = (creds, None) + client = client_class(client_options=options, transport=transport_name) + create_channel.assert_called_with( + "transcoder.googleapis.com:443", + credentials=file_creds, + credentials_file=None, + quota_project_id=None, + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', +), + scopes=None, + default_host="transcoder.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize("request_type", [ + services.CreateJobRequest, + dict, +]) +def test_create_job(request_type, transport: str = 'grpc'): + client = TranscoderServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_job), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = resources.Job( + name='name_value', + input_uri='input_uri_value', + output_uri='output_uri_value', + state=resources.Job.ProcessingState.PENDING, + ttl_after_completion_days=2670, + template_id='template_id_value', + ) + response = client.create_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == services.CreateJobRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, resources.Job) + assert response.name == 'name_value' + assert response.input_uri == 'input_uri_value' + assert response.output_uri == 'output_uri_value' + assert response.state == resources.Job.ProcessingState.PENDING + assert response.ttl_after_completion_days == 2670 + + +def test_create_job_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = TranscoderServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_job), + '__call__') as call: + client.create_job() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == services.CreateJobRequest() + +@pytest.mark.asyncio +async def test_create_job_async(transport: str = 'grpc_asyncio', request_type=services.CreateJobRequest): + client = TranscoderServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_job), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(resources.Job( + name='name_value', + input_uri='input_uri_value', + output_uri='output_uri_value', + state=resources.Job.ProcessingState.PENDING, + ttl_after_completion_days=2670, + )) + response = await client.create_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == services.CreateJobRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, resources.Job) + assert response.name == 'name_value' + assert response.input_uri == 'input_uri_value' + assert response.output_uri == 'output_uri_value' + assert response.state == resources.Job.ProcessingState.PENDING + assert response.ttl_after_completion_days == 2670 + + +@pytest.mark.asyncio +async def test_create_job_async_from_dict(): + await test_create_job_async(request_type=dict) + + +def test_create_job_field_headers(): + client = TranscoderServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = services.CreateJobRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_job), + '__call__') as call: + call.return_value = resources.Job() + client.create_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_create_job_field_headers_async(): + client = TranscoderServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = services.CreateJobRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_job), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.Job()) + await client.create_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +def test_create_job_flattened(): + client = TranscoderServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_job), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = resources.Job() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_job( + parent='parent_value', + job=resources.Job(name='name_value'), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + arg = args[0].job + mock_val = resources.Job(name='name_value') + assert arg == mock_val + + +def test_create_job_flattened_error(): + client = TranscoderServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_job( + services.CreateJobRequest(), + parent='parent_value', + job=resources.Job(name='name_value'), + ) + +@pytest.mark.asyncio +async def test_create_job_flattened_async(): + client = TranscoderServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_job), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = resources.Job() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.Job()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_job( + parent='parent_value', + job=resources.Job(name='name_value'), + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + arg = args[0].job + mock_val = resources.Job(name='name_value') + assert arg == mock_val + +@pytest.mark.asyncio +async def test_create_job_flattened_error_async(): + client = TranscoderServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_job( + services.CreateJobRequest(), + parent='parent_value', + job=resources.Job(name='name_value'), + ) + + +@pytest.mark.parametrize("request_type", [ + services.ListJobsRequest, + dict, +]) +def test_list_jobs(request_type, transport: str = 'grpc'): + client = TranscoderServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_jobs), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = services.ListJobsResponse( + next_page_token='next_page_token_value', + unreachable=['unreachable_value'], + ) + response = client.list_jobs(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == services.ListJobsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListJobsPager) + assert response.next_page_token == 'next_page_token_value' + assert response.unreachable == ['unreachable_value'] + + +def test_list_jobs_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = TranscoderServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_jobs), + '__call__') as call: + client.list_jobs() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == services.ListJobsRequest() + +@pytest.mark.asyncio +async def test_list_jobs_async(transport: str = 'grpc_asyncio', request_type=services.ListJobsRequest): + client = TranscoderServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_jobs), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(services.ListJobsResponse( + next_page_token='next_page_token_value', + unreachable=['unreachable_value'], + )) + response = await client.list_jobs(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == services.ListJobsRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListJobsAsyncPager) + assert response.next_page_token == 'next_page_token_value' + assert response.unreachable == ['unreachable_value'] + + +@pytest.mark.asyncio +async def test_list_jobs_async_from_dict(): + await test_list_jobs_async(request_type=dict) + + +def test_list_jobs_field_headers(): + client = TranscoderServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = services.ListJobsRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_jobs), + '__call__') as call: + call.return_value = services.ListJobsResponse() + client.list_jobs(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_list_jobs_field_headers_async(): + client = TranscoderServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = services.ListJobsRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_jobs), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(services.ListJobsResponse()) + await client.list_jobs(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +def test_list_jobs_flattened(): + client = TranscoderServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_jobs), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = services.ListJobsResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_jobs( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + + +def test_list_jobs_flattened_error(): + client = TranscoderServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_jobs( + services.ListJobsRequest(), + parent='parent_value', + ) + +@pytest.mark.asyncio +async def test_list_jobs_flattened_async(): + client = TranscoderServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_jobs), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = services.ListJobsResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(services.ListJobsResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_jobs( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_list_jobs_flattened_error_async(): + client = TranscoderServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_jobs( + services.ListJobsRequest(), + parent='parent_value', + ) + + +def test_list_jobs_pager(transport_name: str = "grpc"): + client = TranscoderServiceClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_jobs), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + services.ListJobsResponse( + jobs=[ + resources.Job(), + resources.Job(), + resources.Job(), + ], + next_page_token='abc', + ), + services.ListJobsResponse( + jobs=[], + next_page_token='def', + ), + services.ListJobsResponse( + jobs=[ + resources.Job(), + ], + next_page_token='ghi', + ), + services.ListJobsResponse( + jobs=[ + resources.Job(), + resources.Job(), + ], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ('parent', ''), + )), + ) + pager = client.list_jobs(request={}) + + assert pager._metadata == metadata + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, resources.Job) + for i in results) +def test_list_jobs_pages(transport_name: str = "grpc"): + client = TranscoderServiceClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_jobs), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + services.ListJobsResponse( + jobs=[ + resources.Job(), + resources.Job(), + resources.Job(), + ], + next_page_token='abc', + ), + services.ListJobsResponse( + jobs=[], + next_page_token='def', + ), + services.ListJobsResponse( + jobs=[ + resources.Job(), + ], + next_page_token='ghi', + ), + services.ListJobsResponse( + jobs=[ + resources.Job(), + resources.Job(), + ], + ), + RuntimeError, + ) + pages = list(client.list_jobs(request={}).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.asyncio +async def test_list_jobs_async_pager(): + client = TranscoderServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_jobs), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + services.ListJobsResponse( + jobs=[ + resources.Job(), + resources.Job(), + resources.Job(), + ], + next_page_token='abc', + ), + services.ListJobsResponse( + jobs=[], + next_page_token='def', + ), + services.ListJobsResponse( + jobs=[ + resources.Job(), + ], + next_page_token='ghi', + ), + services.ListJobsResponse( + jobs=[ + resources.Job(), + resources.Job(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_jobs(request={},) + assert async_pager.next_page_token == 'abc' + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, resources.Job) + for i in responses) + + +@pytest.mark.asyncio +async def test_list_jobs_async_pages(): + client = TranscoderServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_jobs), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + services.ListJobsResponse( + jobs=[ + resources.Job(), + resources.Job(), + resources.Job(), + ], + next_page_token='abc', + ), + services.ListJobsResponse( + jobs=[], + next_page_token='def', + ), + services.ListJobsResponse( + jobs=[ + resources.Job(), + ], + next_page_token='ghi', + ), + services.ListJobsResponse( + jobs=[ + resources.Job(), + resources.Job(), + ], + ), + RuntimeError, + ) + pages = [] + async for page_ in (await client.list_jobs(request={})).pages: # pragma: no branch + pages.append(page_) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.parametrize("request_type", [ + services.GetJobRequest, + dict, +]) +def test_get_job(request_type, transport: str = 'grpc'): + client = TranscoderServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_job), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = resources.Job( + name='name_value', + input_uri='input_uri_value', + output_uri='output_uri_value', + state=resources.Job.ProcessingState.PENDING, + ttl_after_completion_days=2670, + template_id='template_id_value', + ) + response = client.get_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == services.GetJobRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, resources.Job) + assert response.name == 'name_value' + assert response.input_uri == 'input_uri_value' + assert response.output_uri == 'output_uri_value' + assert response.state == resources.Job.ProcessingState.PENDING + assert response.ttl_after_completion_days == 2670 + + +def test_get_job_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = TranscoderServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_job), + '__call__') as call: + client.get_job() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == services.GetJobRequest() + +@pytest.mark.asyncio +async def test_get_job_async(transport: str = 'grpc_asyncio', request_type=services.GetJobRequest): + client = TranscoderServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_job), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(resources.Job( + name='name_value', + input_uri='input_uri_value', + output_uri='output_uri_value', + state=resources.Job.ProcessingState.PENDING, + ttl_after_completion_days=2670, + )) + response = await client.get_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == services.GetJobRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, resources.Job) + assert response.name == 'name_value' + assert response.input_uri == 'input_uri_value' + assert response.output_uri == 'output_uri_value' + assert response.state == resources.Job.ProcessingState.PENDING + assert response.ttl_after_completion_days == 2670 + + +@pytest.mark.asyncio +async def test_get_job_async_from_dict(): + await test_get_job_async(request_type=dict) + + +def test_get_job_field_headers(): + client = TranscoderServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = services.GetJobRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_job), + '__call__') as call: + call.return_value = resources.Job() + client.get_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_get_job_field_headers_async(): + client = TranscoderServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = services.GetJobRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_job), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.Job()) + await client.get_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +def test_get_job_flattened(): + client = TranscoderServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_job), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = resources.Job() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_job( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + + +def test_get_job_flattened_error(): + client = TranscoderServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_job( + services.GetJobRequest(), + name='name_value', + ) + +@pytest.mark.asyncio +async def test_get_job_flattened_async(): + client = TranscoderServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_job), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = resources.Job() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.Job()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_job( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_get_job_flattened_error_async(): + client = TranscoderServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_job( + services.GetJobRequest(), + name='name_value', + ) + + +@pytest.mark.parametrize("request_type", [ + services.DeleteJobRequest, + dict, +]) +def test_delete_job(request_type, transport: str = 'grpc'): + client = TranscoderServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_job), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.delete_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == services.DeleteJobRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_job_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = TranscoderServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_job), + '__call__') as call: + client.delete_job() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == services.DeleteJobRequest() + +@pytest.mark.asyncio +async def test_delete_job_async(transport: str = 'grpc_asyncio', request_type=services.DeleteJobRequest): + client = TranscoderServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_job), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == services.DeleteJobRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_delete_job_async_from_dict(): + await test_delete_job_async(request_type=dict) + + +def test_delete_job_field_headers(): + client = TranscoderServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = services.DeleteJobRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_job), + '__call__') as call: + call.return_value = None + client.delete_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_delete_job_field_headers_async(): + client = TranscoderServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = services.DeleteJobRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_job), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_job(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +def test_delete_job_flattened(): + client = TranscoderServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_job), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_job( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + + +def test_delete_job_flattened_error(): + client = TranscoderServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_job( + services.DeleteJobRequest(), + name='name_value', + ) + +@pytest.mark.asyncio +async def test_delete_job_flattened_async(): + client = TranscoderServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_job), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_job( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_delete_job_flattened_error_async(): + client = TranscoderServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_job( + services.DeleteJobRequest(), + name='name_value', + ) + + +@pytest.mark.parametrize("request_type", [ + services.CreateJobTemplateRequest, + dict, +]) +def test_create_job_template(request_type, transport: str = 'grpc'): + client = TranscoderServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_job_template), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = resources.JobTemplate( + name='name_value', + ) + response = client.create_job_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == services.CreateJobTemplateRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, resources.JobTemplate) + assert response.name == 'name_value' + + +def test_create_job_template_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = TranscoderServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_job_template), + '__call__') as call: + client.create_job_template() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == services.CreateJobTemplateRequest() + +@pytest.mark.asyncio +async def test_create_job_template_async(transport: str = 'grpc_asyncio', request_type=services.CreateJobTemplateRequest): + client = TranscoderServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_job_template), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(resources.JobTemplate( + name='name_value', + )) + response = await client.create_job_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == services.CreateJobTemplateRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, resources.JobTemplate) + assert response.name == 'name_value' + + +@pytest.mark.asyncio +async def test_create_job_template_async_from_dict(): + await test_create_job_template_async(request_type=dict) + + +def test_create_job_template_field_headers(): + client = TranscoderServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = services.CreateJobTemplateRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_job_template), + '__call__') as call: + call.return_value = resources.JobTemplate() + client.create_job_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_create_job_template_field_headers_async(): + client = TranscoderServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = services.CreateJobTemplateRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_job_template), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.JobTemplate()) + await client.create_job_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +def test_create_job_template_flattened(): + client = TranscoderServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_job_template), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = resources.JobTemplate() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.create_job_template( + parent='parent_value', + job_template=resources.JobTemplate(name='name_value'), + job_template_id='job_template_id_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + arg = args[0].job_template + mock_val = resources.JobTemplate(name='name_value') + assert arg == mock_val + arg = args[0].job_template_id + mock_val = 'job_template_id_value' + assert arg == mock_val + + +def test_create_job_template_flattened_error(): + client = TranscoderServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.create_job_template( + services.CreateJobTemplateRequest(), + parent='parent_value', + job_template=resources.JobTemplate(name='name_value'), + job_template_id='job_template_id_value', + ) + +@pytest.mark.asyncio +async def test_create_job_template_flattened_async(): + client = TranscoderServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.create_job_template), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = resources.JobTemplate() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.JobTemplate()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.create_job_template( + parent='parent_value', + job_template=resources.JobTemplate(name='name_value'), + job_template_id='job_template_id_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + arg = args[0].job_template + mock_val = resources.JobTemplate(name='name_value') + assert arg == mock_val + arg = args[0].job_template_id + mock_val = 'job_template_id_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_create_job_template_flattened_error_async(): + client = TranscoderServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.create_job_template( + services.CreateJobTemplateRequest(), + parent='parent_value', + job_template=resources.JobTemplate(name='name_value'), + job_template_id='job_template_id_value', + ) + + +@pytest.mark.parametrize("request_type", [ + services.ListJobTemplatesRequest, + dict, +]) +def test_list_job_templates(request_type, transport: str = 'grpc'): + client = TranscoderServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_job_templates), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = services.ListJobTemplatesResponse( + next_page_token='next_page_token_value', + unreachable=['unreachable_value'], + ) + response = client.list_job_templates(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == services.ListJobTemplatesRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListJobTemplatesPager) + assert response.next_page_token == 'next_page_token_value' + assert response.unreachable == ['unreachable_value'] + + +def test_list_job_templates_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = TranscoderServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_job_templates), + '__call__') as call: + client.list_job_templates() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == services.ListJobTemplatesRequest() + +@pytest.mark.asyncio +async def test_list_job_templates_async(transport: str = 'grpc_asyncio', request_type=services.ListJobTemplatesRequest): + client = TranscoderServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_job_templates), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(services.ListJobTemplatesResponse( + next_page_token='next_page_token_value', + unreachable=['unreachable_value'], + )) + response = await client.list_job_templates(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == services.ListJobTemplatesRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, pagers.ListJobTemplatesAsyncPager) + assert response.next_page_token == 'next_page_token_value' + assert response.unreachable == ['unreachable_value'] + + +@pytest.mark.asyncio +async def test_list_job_templates_async_from_dict(): + await test_list_job_templates_async(request_type=dict) + + +def test_list_job_templates_field_headers(): + client = TranscoderServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = services.ListJobTemplatesRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_job_templates), + '__call__') as call: + call.return_value = services.ListJobTemplatesResponse() + client.list_job_templates(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_list_job_templates_field_headers_async(): + client = TranscoderServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = services.ListJobTemplatesRequest() + + request.parent = 'parent_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_job_templates), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(services.ListJobTemplatesResponse()) + await client.list_job_templates(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'parent=parent_value', + ) in kw['metadata'] + + +def test_list_job_templates_flattened(): + client = TranscoderServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_job_templates), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = services.ListJobTemplatesResponse() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.list_job_templates( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + + +def test_list_job_templates_flattened_error(): + client = TranscoderServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.list_job_templates( + services.ListJobTemplatesRequest(), + parent='parent_value', + ) + +@pytest.mark.asyncio +async def test_list_job_templates_flattened_async(): + client = TranscoderServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_job_templates), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = services.ListJobTemplatesResponse() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(services.ListJobTemplatesResponse()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.list_job_templates( + parent='parent_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].parent + mock_val = 'parent_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_list_job_templates_flattened_error_async(): + client = TranscoderServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.list_job_templates( + services.ListJobTemplatesRequest(), + parent='parent_value', + ) + + +def test_list_job_templates_pager(transport_name: str = "grpc"): + client = TranscoderServiceClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_job_templates), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + services.ListJobTemplatesResponse( + job_templates=[ + resources.JobTemplate(), + resources.JobTemplate(), + resources.JobTemplate(), + ], + next_page_token='abc', + ), + services.ListJobTemplatesResponse( + job_templates=[], + next_page_token='def', + ), + services.ListJobTemplatesResponse( + job_templates=[ + resources.JobTemplate(), + ], + next_page_token='ghi', + ), + services.ListJobTemplatesResponse( + job_templates=[ + resources.JobTemplate(), + resources.JobTemplate(), + ], + ), + RuntimeError, + ) + + metadata = () + metadata = tuple(metadata) + ( + gapic_v1.routing_header.to_grpc_metadata(( + ('parent', ''), + )), + ) + pager = client.list_job_templates(request={}) + + assert pager._metadata == metadata + + results = list(pager) + assert len(results) == 6 + assert all(isinstance(i, resources.JobTemplate) + for i in results) +def test_list_job_templates_pages(transport_name: str = "grpc"): + client = TranscoderServiceClient( + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_job_templates), + '__call__') as call: + # Set the response to a series of pages. + call.side_effect = ( + services.ListJobTemplatesResponse( + job_templates=[ + resources.JobTemplate(), + resources.JobTemplate(), + resources.JobTemplate(), + ], + next_page_token='abc', + ), + services.ListJobTemplatesResponse( + job_templates=[], + next_page_token='def', + ), + services.ListJobTemplatesResponse( + job_templates=[ + resources.JobTemplate(), + ], + next_page_token='ghi', + ), + services.ListJobTemplatesResponse( + job_templates=[ + resources.JobTemplate(), + resources.JobTemplate(), + ], + ), + RuntimeError, + ) + pages = list(client.list_job_templates(request={}).pages) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.asyncio +async def test_list_job_templates_async_pager(): + client = TranscoderServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_job_templates), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + services.ListJobTemplatesResponse( + job_templates=[ + resources.JobTemplate(), + resources.JobTemplate(), + resources.JobTemplate(), + ], + next_page_token='abc', + ), + services.ListJobTemplatesResponse( + job_templates=[], + next_page_token='def', + ), + services.ListJobTemplatesResponse( + job_templates=[ + resources.JobTemplate(), + ], + next_page_token='ghi', + ), + services.ListJobTemplatesResponse( + job_templates=[ + resources.JobTemplate(), + resources.JobTemplate(), + ], + ), + RuntimeError, + ) + async_pager = await client.list_job_templates(request={},) + assert async_pager.next_page_token == 'abc' + responses = [] + async for response in async_pager: # pragma: no branch + responses.append(response) + + assert len(responses) == 6 + assert all(isinstance(i, resources.JobTemplate) + for i in responses) + + +@pytest.mark.asyncio +async def test_list_job_templates_async_pages(): + client = TranscoderServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials, + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.list_job_templates), + '__call__', new_callable=mock.AsyncMock) as call: + # Set the response to a series of pages. + call.side_effect = ( + services.ListJobTemplatesResponse( + job_templates=[ + resources.JobTemplate(), + resources.JobTemplate(), + resources.JobTemplate(), + ], + next_page_token='abc', + ), + services.ListJobTemplatesResponse( + job_templates=[], + next_page_token='def', + ), + services.ListJobTemplatesResponse( + job_templates=[ + resources.JobTemplate(), + ], + next_page_token='ghi', + ), + services.ListJobTemplatesResponse( + job_templates=[ + resources.JobTemplate(), + resources.JobTemplate(), + ], + ), + RuntimeError, + ) + pages = [] + async for page_ in (await client.list_job_templates(request={})).pages: # pragma: no branch + pages.append(page_) + for page_, token in zip(pages, ['abc','def','ghi', '']): + assert page_.raw_page.next_page_token == token + +@pytest.mark.parametrize("request_type", [ + services.GetJobTemplateRequest, + dict, +]) +def test_get_job_template(request_type, transport: str = 'grpc'): + client = TranscoderServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_job_template), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = resources.JobTemplate( + name='name_value', + ) + response = client.get_job_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == services.GetJobTemplateRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, resources.JobTemplate) + assert response.name == 'name_value' + + +def test_get_job_template_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = TranscoderServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_job_template), + '__call__') as call: + client.get_job_template() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == services.GetJobTemplateRequest() + +@pytest.mark.asyncio +async def test_get_job_template_async(transport: str = 'grpc_asyncio', request_type=services.GetJobTemplateRequest): + client = TranscoderServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_job_template), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(resources.JobTemplate( + name='name_value', + )) + response = await client.get_job_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == services.GetJobTemplateRequest() + + # Establish that the response is the type that we expect. + assert isinstance(response, resources.JobTemplate) + assert response.name == 'name_value' + + +@pytest.mark.asyncio +async def test_get_job_template_async_from_dict(): + await test_get_job_template_async(request_type=dict) + + +def test_get_job_template_field_headers(): + client = TranscoderServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = services.GetJobTemplateRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_job_template), + '__call__') as call: + call.return_value = resources.JobTemplate() + client.get_job_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_get_job_template_field_headers_async(): + client = TranscoderServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = services.GetJobTemplateRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_job_template), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.JobTemplate()) + await client.get_job_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +def test_get_job_template_flattened(): + client = TranscoderServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_job_template), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = resources.JobTemplate() + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.get_job_template( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + + +def test_get_job_template_flattened_error(): + client = TranscoderServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.get_job_template( + services.GetJobTemplateRequest(), + name='name_value', + ) + +@pytest.mark.asyncio +async def test_get_job_template_flattened_async(): + client = TranscoderServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.get_job_template), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = resources.JobTemplate() + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.JobTemplate()) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.get_job_template( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_get_job_template_flattened_error_async(): + client = TranscoderServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.get_job_template( + services.GetJobTemplateRequest(), + name='name_value', + ) + + +@pytest.mark.parametrize("request_type", [ + services.DeleteJobTemplateRequest, + dict, +]) +def test_delete_job_template(request_type, transport: str = 'grpc'): + client = TranscoderServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_job_template), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + response = client.delete_job_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == services.DeleteJobTemplateRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +def test_delete_job_template_empty_call(): + # This test is a coverage failsafe to make sure that totally empty calls, + # i.e. request == None and no flattened fields passed, work. + client = TranscoderServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport='grpc', + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_job_template), + '__call__') as call: + client.delete_job_template() + call.assert_called() + _, args, _ = call.mock_calls[0] + assert args[0] == services.DeleteJobTemplateRequest() + +@pytest.mark.asyncio +async def test_delete_job_template_async(transport: str = 'grpc_asyncio', request_type=services.DeleteJobTemplateRequest): + client = TranscoderServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # Everything is optional in proto3 as far as the runtime is concerned, + # and we are mocking out the actual API, so just send an empty request. + request = request_type() + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_job_template), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + response = await client.delete_job_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == services.DeleteJobTemplateRequest() + + # Establish that the response is the type that we expect. + assert response is None + + +@pytest.mark.asyncio +async def test_delete_job_template_async_from_dict(): + await test_delete_job_template_async(request_type=dict) + + +def test_delete_job_template_field_headers(): + client = TranscoderServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = services.DeleteJobTemplateRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_job_template), + '__call__') as call: + call.return_value = None + client.delete_job_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +@pytest.mark.asyncio +async def test_delete_job_template_field_headers_async(): + client = TranscoderServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Any value that is part of the HTTP/1.1 URI should be sent as + # a field header. Set these to a non-empty value. + request = services.DeleteJobTemplateRequest() + + request.name = 'name_value' + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_job_template), + '__call__') as call: + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + await client.delete_job_template(request) + + # Establish that the underlying gRPC stub method was called. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + assert args[0] == request + + # Establish that the field header was sent. + _, _, kw = call.mock_calls[0] + assert ( + 'x-goog-request-params', + 'name=name_value', + ) in kw['metadata'] + + +def test_delete_job_template_flattened(): + client = TranscoderServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_job_template), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + client.delete_job_template( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) == 1 + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + + +def test_delete_job_template_flattened_error(): + client = TranscoderServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + client.delete_job_template( + services.DeleteJobTemplateRequest(), + name='name_value', + ) + +@pytest.mark.asyncio +async def test_delete_job_template_flattened_async(): + client = TranscoderServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Mock the actual call within the gRPC stub, and fake the request. + with mock.patch.object( + type(client.transport.delete_job_template), + '__call__') as call: + # Designate an appropriate return value for the call. + call.return_value = None + + call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) + # Call the method with a truthy value for each flattened field, + # using the keyword arguments to the method. + response = await client.delete_job_template( + name='name_value', + ) + + # Establish that the underlying call was made with the expected + # request object values. + assert len(call.mock_calls) + _, args, _ = call.mock_calls[0] + arg = args[0].name + mock_val = 'name_value' + assert arg == mock_val + +@pytest.mark.asyncio +async def test_delete_job_template_flattened_error_async(): + client = TranscoderServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Attempting to call a method with both a request object and flattened + # fields is an error. + with pytest.raises(ValueError): + await client.delete_job_template( + services.DeleteJobTemplateRequest(), + name='name_value', + ) + + +def test_credentials_transport_error(): + # It is an error to provide credentials and a transport instance. + transport = transports.TranscoderServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = TranscoderServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, + ) + + # It is an error to provide a credentials file and a transport instance. + transport = transports.TranscoderServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = TranscoderServiceClient( + client_options={"credentials_file": "credentials.json"}, + transport=transport, + ) + + # It is an error to provide an api_key and a transport instance. + transport = transports.TranscoderServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + options = client_options.ClientOptions() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = TranscoderServiceClient( + client_options=options, + transport=transport, + ) + + # It is an error to provide an api_key and a credential. + options = mock.Mock() + options.api_key = "api_key" + with pytest.raises(ValueError): + client = TranscoderServiceClient( + client_options=options, + credentials=ga_credentials.AnonymousCredentials() + ) + + # It is an error to provide scopes and a transport instance. + transport = transports.TranscoderServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + with pytest.raises(ValueError): + client = TranscoderServiceClient( + client_options={"scopes": ["1", "2"]}, + transport=transport, + ) + + +def test_transport_instance(): + # A client may be instantiated with a custom transport instance. + transport = transports.TranscoderServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + client = TranscoderServiceClient(transport=transport) + assert client.transport is transport + +def test_transport_get_channel(): + # A client may be instantiated with a custom transport instance. + transport = transports.TranscoderServiceGrpcTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + + transport = transports.TranscoderServiceGrpcAsyncIOTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + channel = transport.grpc_channel + assert channel + +@pytest.mark.parametrize("transport_class", [ + transports.TranscoderServiceGrpcTransport, + transports.TranscoderServiceGrpcAsyncIOTransport, +]) +def test_transport_adc(transport_class): + # Test default credentials are used if not provided. + with mock.patch.object(google.auth, 'default') as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class() + adc.assert_called_once() + +@pytest.mark.parametrize("transport_name", [ + "grpc", +]) +def test_transport_kind(transport_name): + transport = TranscoderServiceClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + +def test_transport_grpc_default(): + # A client should use the gRPC transport by default. + client = TranscoderServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.TranscoderServiceGrpcTransport, + ) + +def test_transcoder_service_base_transport_error(): + # Passing both a credentials object and credentials_file should raise an error + with pytest.raises(core_exceptions.DuplicateCredentialArgs): + transport = transports.TranscoderServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + credentials_file="credentials.json" + ) + + +def test_transcoder_service_base_transport(): + # Instantiate the base transport. + with mock.patch('google.cloud.video.transcoder_v1.services.transcoder_service.transports.TranscoderServiceTransport.__init__') as Transport: + Transport.return_value = None + transport = transports.TranscoderServiceTransport( + credentials=ga_credentials.AnonymousCredentials(), + ) + + # Every method on the transport should just blindly + # raise NotImplementedError. + methods = ( + 'create_job', + 'list_jobs', + 'get_job', + 'delete_job', + 'create_job_template', + 'list_job_templates', + 'get_job_template', + 'delete_job_template', + ) + for method in methods: + with pytest.raises(NotImplementedError): + getattr(transport, method)(request=object()) + + with pytest.raises(NotImplementedError): + transport.close() + + # Catch all for all remaining methods and properties + remainder = [ + 'kind', + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + + +def test_transcoder_service_base_transport_with_credentials_file(): + # Instantiate the base transport with a credentials file + with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.video.transcoder_v1.services.transcoder_service.transports.TranscoderServiceTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.TranscoderServiceTransport( + credentials_file="credentials.json", + quota_project_id="octopus", + ) + load_creds.assert_called_once_with("credentials.json", + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', +), + quota_project_id="octopus", + ) + + +def test_transcoder_service_base_transport_with_adc(): + # Test the default credentials are used if credentials and credentials_file are None. + with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.video.transcoder_v1.services.transcoder_service.transports.TranscoderServiceTransport._prep_wrapped_messages') as Transport: + Transport.return_value = None + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport = transports.TranscoderServiceTransport() + adc.assert_called_once() + + +def test_transcoder_service_auth_adc(): + # If no credentials are provided, we should use ADC credentials. + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + TranscoderServiceClient() + adc.assert_called_once_with( + scopes=None, + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', +), + quota_project_id=None, + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.TranscoderServiceGrpcTransport, + transports.TranscoderServiceGrpcAsyncIOTransport, + ], +) +def test_transcoder_service_transport_auth_adc(transport_class): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + adc.return_value = (ga_credentials.AnonymousCredentials(), None) + transport_class(quota_project_id="octopus", scopes=["1", "2"]) + adc.assert_called_once_with( + scopes=["1", "2"], + default_scopes=( 'https://www.googleapis.com/auth/cloud-platform',), + quota_project_id="octopus", + ) + + +@pytest.mark.parametrize( + "transport_class", + [ + transports.TranscoderServiceGrpcTransport, + transports.TranscoderServiceGrpcAsyncIOTransport, + ], +) +def test_transcoder_service_transport_auth_gdch_credentials(transport_class): + host = 'https://language.com' + api_audience_tests = [None, 'https://language2.com'] + api_audience_expect = [host, 'https://language2.com'] + for t, e in zip(api_audience_tests, api_audience_expect): + with mock.patch.object(google.auth, 'default', autospec=True) as adc: + gdch_mock = mock.MagicMock() + type(gdch_mock).with_gdch_audience = mock.PropertyMock(return_value=gdch_mock) + adc.return_value = (gdch_mock, None) + transport_class(host=host, api_audience=t) + gdch_mock.with_gdch_audience.assert_called_once_with( + e + ) + + +@pytest.mark.parametrize( + "transport_class,grpc_helpers", + [ + (transports.TranscoderServiceGrpcTransport, grpc_helpers), + (transports.TranscoderServiceGrpcAsyncIOTransport, grpc_helpers_async) + ], +) +def test_transcoder_service_transport_create_channel(transport_class, grpc_helpers): + # If credentials and host are not provided, the transport class should use + # ADC credentials. + with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch.object( + grpc_helpers, "create_channel", autospec=True + ) as create_channel: + creds = ga_credentials.AnonymousCredentials() + adc.return_value = (creds, None) + transport_class( + quota_project_id="octopus", + scopes=["1", "2"] + ) + + create_channel.assert_called_with( + "transcoder.googleapis.com:443", + credentials=creds, + credentials_file=None, + quota_project_id="octopus", + default_scopes=( + 'https://www.googleapis.com/auth/cloud-platform', +), + scopes=["1", "2"], + default_host="transcoder.googleapis.com", + ssl_credentials=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + +@pytest.mark.parametrize("transport_class", [transports.TranscoderServiceGrpcTransport, transports.TranscoderServiceGrpcAsyncIOTransport]) +def test_transcoder_service_grpc_transport_client_cert_source_for_mtls( + transport_class +): + cred = ga_credentials.AnonymousCredentials() + + # Check ssl_channel_credentials is used if provided. + with mock.patch.object(transport_class, "create_channel") as mock_create_channel: + mock_ssl_channel_creds = mock.Mock() + transport_class( + host="squid.clam.whelk", + credentials=cred, + ssl_channel_credentials=mock_ssl_channel_creds + ) + mock_create_channel.assert_called_once_with( + "squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_channel_creds, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + + # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls + # is used. + with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): + with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: + transport_class( + credentials=cred, + client_cert_source_for_mtls=client_cert_source_callback + ) + expected_cert, expected_key = client_cert_source_callback() + mock_ssl_cred.assert_called_once_with( + certificate_chain=expected_cert, + private_key=expected_key + ) + + +@pytest.mark.parametrize("transport_name", [ + "grpc", + "grpc_asyncio", +]) +def test_transcoder_service_host_no_port(transport_name): + client = TranscoderServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='transcoder.googleapis.com'), + transport=transport_name, + ) + assert client.transport._host == ( + 'transcoder.googleapis.com:443' + ) + +@pytest.mark.parametrize("transport_name", [ + "grpc", + "grpc_asyncio", +]) +def test_transcoder_service_host_with_port(transport_name): + client = TranscoderServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_options=client_options.ClientOptions(api_endpoint='transcoder.googleapis.com:8000'), + transport=transport_name, + ) + assert client.transport._host == ( + 'transcoder.googleapis.com:8000' + ) + +def test_transcoder_service_grpc_transport_channel(): + channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.TranscoderServiceGrpcTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +def test_transcoder_service_grpc_asyncio_transport_channel(): + channel = aio.secure_channel('http://localhost/', grpc.local_channel_credentials()) + + # Check that channel is used if provided. + transport = transports.TranscoderServiceGrpcAsyncIOTransport( + host="squid.clam.whelk", + channel=channel, + ) + assert transport.grpc_channel == channel + assert transport._host == "squid.clam.whelk:443" + assert transport._ssl_channel_credentials == None + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize("transport_class", [transports.TranscoderServiceGrpcTransport, transports.TranscoderServiceGrpcAsyncIOTransport]) +def test_transcoder_service_transport_channel_mtls_with_client_cert_source( + transport_class +): + with mock.patch("grpc.ssl_channel_credentials", autospec=True) as grpc_ssl_channel_cred: + with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: + mock_ssl_cred = mock.Mock() + grpc_ssl_channel_cred.return_value = mock_ssl_cred + + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + + cred = ga_credentials.AnonymousCredentials() + with pytest.warns(DeprecationWarning): + with mock.patch.object(google.auth, 'default') as adc: + adc.return_value = (cred, None) + transport = transport_class( + host="squid.clam.whelk", + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=client_cert_source_callback, + ) + adc.assert_called_once() + + grpc_ssl_channel_cred.assert_called_once_with( + certificate_chain=b"cert bytes", private_key=b"key bytes" + ) + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + assert transport._ssl_channel_credentials == mock_ssl_cred + + +# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are +# removed from grpc/grpc_asyncio transport constructor. +@pytest.mark.parametrize("transport_class", [transports.TranscoderServiceGrpcTransport, transports.TranscoderServiceGrpcAsyncIOTransport]) +def test_transcoder_service_transport_channel_mtls_with_adc( + transport_class +): + mock_ssl_cred = mock.Mock() + with mock.patch.multiple( + "google.auth.transport.grpc.SslCredentials", + __init__=mock.Mock(return_value=None), + ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), + ): + with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: + mock_grpc_channel = mock.Mock() + grpc_create_channel.return_value = mock_grpc_channel + mock_cred = mock.Mock() + + with pytest.warns(DeprecationWarning): + transport = transport_class( + host="squid.clam.whelk", + credentials=mock_cred, + api_mtls_endpoint="mtls.squid.clam.whelk", + client_cert_source=None, + ) + + grpc_create_channel.assert_called_once_with( + "mtls.squid.clam.whelk:443", + credentials=mock_cred, + credentials_file=None, + scopes=None, + ssl_credentials=mock_ssl_cred, + quota_project_id=None, + options=[ + ("grpc.max_send_message_length", -1), + ("grpc.max_receive_message_length", -1), + ], + ) + assert transport.grpc_channel == mock_grpc_channel + + +def test_job_path(): + project = "squid" + location = "clam" + job = "whelk" + expected = "projects/{project}/locations/{location}/jobs/{job}".format(project=project, location=location, job=job, ) + actual = TranscoderServiceClient.job_path(project, location, job) + assert expected == actual + + +def test_parse_job_path(): + expected = { + "project": "octopus", + "location": "oyster", + "job": "nudibranch", + } + path = TranscoderServiceClient.job_path(**expected) + + # Check that the path construction is reversible. + actual = TranscoderServiceClient.parse_job_path(path) + assert expected == actual + +def test_job_template_path(): + project = "cuttlefish" + location = "mussel" + job_template = "winkle" + expected = "projects/{project}/locations/{location}/jobTemplates/{job_template}".format(project=project, location=location, job_template=job_template, ) + actual = TranscoderServiceClient.job_template_path(project, location, job_template) + assert expected == actual + + +def test_parse_job_template_path(): + expected = { + "project": "nautilus", + "location": "scallop", + "job_template": "abalone", + } + path = TranscoderServiceClient.job_template_path(**expected) + + # Check that the path construction is reversible. + actual = TranscoderServiceClient.parse_job_template_path(path) + assert expected == actual + +def test_common_billing_account_path(): + billing_account = "squid" + expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) + actual = TranscoderServiceClient.common_billing_account_path(billing_account) + assert expected == actual + + +def test_parse_common_billing_account_path(): + expected = { + "billing_account": "clam", + } + path = TranscoderServiceClient.common_billing_account_path(**expected) + + # Check that the path construction is reversible. + actual = TranscoderServiceClient.parse_common_billing_account_path(path) + assert expected == actual + +def test_common_folder_path(): + folder = "whelk" + expected = "folders/{folder}".format(folder=folder, ) + actual = TranscoderServiceClient.common_folder_path(folder) + assert expected == actual + + +def test_parse_common_folder_path(): + expected = { + "folder": "octopus", + } + path = TranscoderServiceClient.common_folder_path(**expected) + + # Check that the path construction is reversible. + actual = TranscoderServiceClient.parse_common_folder_path(path) + assert expected == actual + +def test_common_organization_path(): + organization = "oyster" + expected = "organizations/{organization}".format(organization=organization, ) + actual = TranscoderServiceClient.common_organization_path(organization) + assert expected == actual + + +def test_parse_common_organization_path(): + expected = { + "organization": "nudibranch", + } + path = TranscoderServiceClient.common_organization_path(**expected) + + # Check that the path construction is reversible. + actual = TranscoderServiceClient.parse_common_organization_path(path) + assert expected == actual + +def test_common_project_path(): + project = "cuttlefish" + expected = "projects/{project}".format(project=project, ) + actual = TranscoderServiceClient.common_project_path(project) + assert expected == actual + + +def test_parse_common_project_path(): + expected = { + "project": "mussel", + } + path = TranscoderServiceClient.common_project_path(**expected) + + # Check that the path construction is reversible. + actual = TranscoderServiceClient.parse_common_project_path(path) + assert expected == actual + +def test_common_location_path(): + project = "winkle" + location = "nautilus" + expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) + actual = TranscoderServiceClient.common_location_path(project, location) + assert expected == actual + + +def test_parse_common_location_path(): + expected = { + "project": "scallop", + "location": "abalone", + } + path = TranscoderServiceClient.common_location_path(**expected) + + # Check that the path construction is reversible. + actual = TranscoderServiceClient.parse_common_location_path(path) + assert expected == actual + + +def test_client_with_default_client_info(): + client_info = gapic_v1.client_info.ClientInfo() + + with mock.patch.object(transports.TranscoderServiceTransport, '_prep_wrapped_messages') as prep: + client = TranscoderServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + + with mock.patch.object(transports.TranscoderServiceTransport, '_prep_wrapped_messages') as prep: + transport_class = TranscoderServiceClient.get_transport_class() + transport = transport_class( + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, + ) + prep.assert_called_once_with(client_info) + +@pytest.mark.asyncio +async def test_transport_close_async(): + client = TranscoderServiceAsyncClient( + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", + ) + with mock.patch.object(type(getattr(client.transport, "grpc_channel")), "close") as close: + async with client: + close.assert_not_called() + close.assert_called_once() + + +def test_transport_close(): + transports = { + "grpc": "_grpc_channel", + } + + for transport, close_name in transports.items(): + client = TranscoderServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport + ) + with mock.patch.object(type(getattr(client.transport, close_name)), "close") as close: + with client: + close.assert_not_called() + close.assert_called_once() + +def test_client_ctx(): + transports = [ + 'grpc', + ] + for transport in transports: + client = TranscoderServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + transport=transport + ) + # Test client calls underlying transport. + with mock.patch.object(type(client.transport), "close") as close: + close.assert_not_called() + with client: + pass + close.assert_called() + +@pytest.mark.parametrize("client_class,transport_class", [ + (TranscoderServiceClient, transports.TranscoderServiceGrpcTransport), + (TranscoderServiceAsyncClient, transports.TranscoderServiceGrpcAsyncIOTransport), +]) +def test_api_key_credentials(client_class, transport_class): + with mock.patch.object( + google.auth._default, "get_api_key_credentials", create=True + ) as get_api_key_credentials: + mock_cred = mock.Mock() + get_api_key_credentials.return_value = mock_cred + options = client_options.ClientOptions() + options.api_key = "api_key" + with mock.patch.object(transport_class, "__init__") as patched: + patched.return_value = None + client = client_class(client_options=options) + patched.assert_called_once_with( + credentials=mock_cred, + credentials_file=None, + host=client.DEFAULT_ENDPOINT, + scopes=None, + client_cert_source_for_mtls=None, + quota_project_id=None, + client_info=transports.base.DEFAULT_CLIENT_INFO, + always_use_jwt_access=True, + api_audience=None, + ) From 01e25c1caecccaa760b53665495b492e131e18d2 Mon Sep 17 00:00:00 2001 From: Owl Bot Date: Mon, 9 Jan 2023 22:57:47 +0000 Subject: [PATCH 2/2] =?UTF-8?q?=F0=9F=A6=89=20Updates=20from=20OwlBot=20po?= =?UTF-8?q?st-processor?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md --- .../transcoder_service/async_client.py | 2 +- .../services/transcoder_service/client.py | 2 +- owl-bot-staging/v1/.coveragerc | 12 - owl-bot-staging/v1/.flake8 | 33 - owl-bot-staging/v1/MANIFEST.in | 2 - owl-bot-staging/v1/README.rst | 49 - owl-bot-staging/v1/docs/conf.py | 376 -- owl-bot-staging/v1/docs/index.rst | 7 - .../v1/docs/transcoder_v1/services.rst | 6 - .../docs/transcoder_v1/transcoder_service.rst | 10 - .../v1/docs/transcoder_v1/types.rst | 6 - .../google/cloud/video/transcoder/__init__.py | 83 - .../cloud/video/transcoder/gapic_version.py | 16 - .../v1/google/cloud/video/transcoder/py.typed | 2 - .../cloud/video/transcoder_v1/__init__.py | 84 - .../video/transcoder_v1/gapic_metadata.json | 103 - .../video/transcoder_v1/gapic_version.py | 16 - .../google/cloud/video/transcoder_v1/py.typed | 2 - .../video/transcoder_v1/services/__init__.py | 15 - .../services/transcoder_service/__init__.py | 22 - .../transcoder_service/async_client.py | 1054 ----- .../services/transcoder_service/client.py | 1265 ------ .../services/transcoder_service/pagers.py | 261 -- .../transcoder_service/transports/__init__.py | 33 - .../transcoder_service/transports/base.py | 248 -- .../transcoder_service/transports/grpc.py | 454 --- .../transports/grpc_asyncio.py | 453 --- .../video/transcoder_v1/types/__init__.py | 78 - .../video/transcoder_v1/types/resources.py | 2172 ---------- .../video/transcoder_v1/types/services.py | 325 -- owl-bot-staging/v1/mypy.ini | 3 - owl-bot-staging/v1/noxfile.py | 184 - ...data_google.cloud.video.transcoder.v1.json | 1315 ------- ...ted_transcoder_service_create_job_async.py | 56 - ...ated_transcoder_service_create_job_sync.py | 56 - ...coder_service_create_job_template_async.py | 53 - ...scoder_service_create_job_template_sync.py | 53 - ...ted_transcoder_service_delete_job_async.py | 50 - ...ated_transcoder_service_delete_job_sync.py | 50 - ...coder_service_delete_job_template_async.py | 50 - ...scoder_service_delete_job_template_sync.py | 50 - ...erated_transcoder_service_get_job_async.py | 52 - ...nerated_transcoder_service_get_job_sync.py | 52 - ...anscoder_service_get_job_template_async.py | 52 - ...ranscoder_service_get_job_template_sync.py | 52 - ...scoder_service_list_job_templates_async.py | 53 - ...nscoder_service_list_job_templates_sync.py | 53 - ...ated_transcoder_service_list_jobs_async.py | 53 - ...rated_transcoder_service_list_jobs_sync.py | 53 - .../scripts/fixup_transcoder_v1_keywords.py | 183 - owl-bot-staging/v1/setup.py | 92 - .../v1/testing/constraints-3.10.txt | 6 - .../v1/testing/constraints-3.11.txt | 6 - .../v1/testing/constraints-3.7.txt | 9 - .../v1/testing/constraints-3.8.txt | 6 - .../v1/testing/constraints-3.9.txt | 6 - owl-bot-staging/v1/tests/__init__.py | 16 - owl-bot-staging/v1/tests/unit/__init__.py | 16 - .../v1/tests/unit/gapic/__init__.py | 16 - .../unit/gapic/transcoder_v1/__init__.py | 16 - .../transcoder_v1/test_transcoder_service.py | 3484 ----------------- ...data_google.cloud.video.transcoder.v1.json | 2 +- setup.py | 2 + .../testing => testing}/constraints-3.12.txt | 0 64 files changed, 5 insertions(+), 13386 deletions(-) delete mode 100644 owl-bot-staging/v1/.coveragerc delete mode 100644 owl-bot-staging/v1/.flake8 delete mode 100644 owl-bot-staging/v1/MANIFEST.in delete mode 100644 owl-bot-staging/v1/README.rst delete mode 100644 owl-bot-staging/v1/docs/conf.py delete mode 100644 owl-bot-staging/v1/docs/index.rst delete mode 100644 owl-bot-staging/v1/docs/transcoder_v1/services.rst delete mode 100644 owl-bot-staging/v1/docs/transcoder_v1/transcoder_service.rst delete mode 100644 owl-bot-staging/v1/docs/transcoder_v1/types.rst delete mode 100644 owl-bot-staging/v1/google/cloud/video/transcoder/__init__.py delete mode 100644 owl-bot-staging/v1/google/cloud/video/transcoder/gapic_version.py delete mode 100644 owl-bot-staging/v1/google/cloud/video/transcoder/py.typed delete mode 100644 owl-bot-staging/v1/google/cloud/video/transcoder_v1/__init__.py delete mode 100644 owl-bot-staging/v1/google/cloud/video/transcoder_v1/gapic_metadata.json delete mode 100644 owl-bot-staging/v1/google/cloud/video/transcoder_v1/gapic_version.py delete mode 100644 owl-bot-staging/v1/google/cloud/video/transcoder_v1/py.typed delete mode 100644 owl-bot-staging/v1/google/cloud/video/transcoder_v1/services/__init__.py delete mode 100644 owl-bot-staging/v1/google/cloud/video/transcoder_v1/services/transcoder_service/__init__.py delete mode 100644 owl-bot-staging/v1/google/cloud/video/transcoder_v1/services/transcoder_service/async_client.py delete mode 100644 owl-bot-staging/v1/google/cloud/video/transcoder_v1/services/transcoder_service/client.py delete mode 100644 owl-bot-staging/v1/google/cloud/video/transcoder_v1/services/transcoder_service/pagers.py delete mode 100644 owl-bot-staging/v1/google/cloud/video/transcoder_v1/services/transcoder_service/transports/__init__.py delete mode 100644 owl-bot-staging/v1/google/cloud/video/transcoder_v1/services/transcoder_service/transports/base.py delete mode 100644 owl-bot-staging/v1/google/cloud/video/transcoder_v1/services/transcoder_service/transports/grpc.py delete mode 100644 owl-bot-staging/v1/google/cloud/video/transcoder_v1/services/transcoder_service/transports/grpc_asyncio.py delete mode 100644 owl-bot-staging/v1/google/cloud/video/transcoder_v1/types/__init__.py delete mode 100644 owl-bot-staging/v1/google/cloud/video/transcoder_v1/types/resources.py delete mode 100644 owl-bot-staging/v1/google/cloud/video/transcoder_v1/types/services.py delete mode 100644 owl-bot-staging/v1/mypy.ini delete mode 100644 owl-bot-staging/v1/noxfile.py delete mode 100644 owl-bot-staging/v1/samples/generated_samples/snippet_metadata_google.cloud.video.transcoder.v1.json delete mode 100644 owl-bot-staging/v1/samples/generated_samples/transcoder_v1_generated_transcoder_service_create_job_async.py delete mode 100644 owl-bot-staging/v1/samples/generated_samples/transcoder_v1_generated_transcoder_service_create_job_sync.py delete mode 100644 owl-bot-staging/v1/samples/generated_samples/transcoder_v1_generated_transcoder_service_create_job_template_async.py delete mode 100644 owl-bot-staging/v1/samples/generated_samples/transcoder_v1_generated_transcoder_service_create_job_template_sync.py delete mode 100644 owl-bot-staging/v1/samples/generated_samples/transcoder_v1_generated_transcoder_service_delete_job_async.py delete mode 100644 owl-bot-staging/v1/samples/generated_samples/transcoder_v1_generated_transcoder_service_delete_job_sync.py delete mode 100644 owl-bot-staging/v1/samples/generated_samples/transcoder_v1_generated_transcoder_service_delete_job_template_async.py delete mode 100644 owl-bot-staging/v1/samples/generated_samples/transcoder_v1_generated_transcoder_service_delete_job_template_sync.py delete mode 100644 owl-bot-staging/v1/samples/generated_samples/transcoder_v1_generated_transcoder_service_get_job_async.py delete mode 100644 owl-bot-staging/v1/samples/generated_samples/transcoder_v1_generated_transcoder_service_get_job_sync.py delete mode 100644 owl-bot-staging/v1/samples/generated_samples/transcoder_v1_generated_transcoder_service_get_job_template_async.py delete mode 100644 owl-bot-staging/v1/samples/generated_samples/transcoder_v1_generated_transcoder_service_get_job_template_sync.py delete mode 100644 owl-bot-staging/v1/samples/generated_samples/transcoder_v1_generated_transcoder_service_list_job_templates_async.py delete mode 100644 owl-bot-staging/v1/samples/generated_samples/transcoder_v1_generated_transcoder_service_list_job_templates_sync.py delete mode 100644 owl-bot-staging/v1/samples/generated_samples/transcoder_v1_generated_transcoder_service_list_jobs_async.py delete mode 100644 owl-bot-staging/v1/samples/generated_samples/transcoder_v1_generated_transcoder_service_list_jobs_sync.py delete mode 100644 owl-bot-staging/v1/scripts/fixup_transcoder_v1_keywords.py delete mode 100644 owl-bot-staging/v1/setup.py delete mode 100644 owl-bot-staging/v1/testing/constraints-3.10.txt delete mode 100644 owl-bot-staging/v1/testing/constraints-3.11.txt delete mode 100644 owl-bot-staging/v1/testing/constraints-3.7.txt delete mode 100644 owl-bot-staging/v1/testing/constraints-3.8.txt delete mode 100644 owl-bot-staging/v1/testing/constraints-3.9.txt delete mode 100644 owl-bot-staging/v1/tests/__init__.py delete mode 100644 owl-bot-staging/v1/tests/unit/__init__.py delete mode 100644 owl-bot-staging/v1/tests/unit/gapic/__init__.py delete mode 100644 owl-bot-staging/v1/tests/unit/gapic/transcoder_v1/__init__.py delete mode 100644 owl-bot-staging/v1/tests/unit/gapic/transcoder_v1/test_transcoder_service.py rename {owl-bot-staging/v1/testing => testing}/constraints-3.12.txt (100%) diff --git a/google/cloud/video/transcoder_v1/services/transcoder_service/async_client.py b/google/cloud/video/transcoder_v1/services/transcoder_service/async_client.py index 857ca88..2082bb0 100644 --- a/google/cloud/video/transcoder_v1/services/transcoder_service/async_client.py +++ b/google/cloud/video/transcoder_v1/services/transcoder_service/async_client.py @@ -148,7 +148,7 @@ def get_mtls_endpoint_and_cert_source( The API endpoint is determined in the following order: (1) if `client_options.api_endpoint` if provided, use the provided one. (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the - default mTLS endpoint; if the environment variabel is "never", use the default API + default mTLS endpoint; if the environment variable is "never", use the default API endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise use the default API endpoint. diff --git a/google/cloud/video/transcoder_v1/services/transcoder_service/client.py b/google/cloud/video/transcoder_v1/services/transcoder_service/client.py index 6780296..67c5bd5 100644 --- a/google/cloud/video/transcoder_v1/services/transcoder_service/client.py +++ b/google/cloud/video/transcoder_v1/services/transcoder_service/client.py @@ -322,7 +322,7 @@ def get_mtls_endpoint_and_cert_source( The API endpoint is determined in the following order: (1) if `client_options.api_endpoint` if provided, use the provided one. (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the - default mTLS endpoint; if the environment variabel is "never", use the default API + default mTLS endpoint; if the environment variable is "never", use the default API endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise use the default API endpoint. diff --git a/owl-bot-staging/v1/.coveragerc b/owl-bot-staging/v1/.coveragerc deleted file mode 100644 index 33d10d7..0000000 --- a/owl-bot-staging/v1/.coveragerc +++ /dev/null @@ -1,12 +0,0 @@ -[run] -branch = True - -[report] -show_missing = True -omit = - google/cloud/video/transcoder/__init__.py -exclude_lines = - # Re-enable the standard pragma - pragma: NO COVER - # Ignore debug-only repr - def __repr__ diff --git a/owl-bot-staging/v1/.flake8 b/owl-bot-staging/v1/.flake8 deleted file mode 100644 index 29227d4..0000000 --- a/owl-bot-staging/v1/.flake8 +++ /dev/null @@ -1,33 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Copyright 2020 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# https://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. - -# Generated by synthtool. DO NOT EDIT! -[flake8] -ignore = E203, E266, E501, W503 -exclude = - # Exclude generated code. - **/proto/** - **/gapic/** - **/services/** - **/types/** - *_pb2.py - - # Standard linting exemptions. - **/.nox/** - __pycache__, - .git, - *.pyc, - conf.py diff --git a/owl-bot-staging/v1/MANIFEST.in b/owl-bot-staging/v1/MANIFEST.in deleted file mode 100644 index da1cb61..0000000 --- a/owl-bot-staging/v1/MANIFEST.in +++ /dev/null @@ -1,2 +0,0 @@ -recursive-include google/cloud/video/transcoder *.py -recursive-include google/cloud/video/transcoder_v1 *.py diff --git a/owl-bot-staging/v1/README.rst b/owl-bot-staging/v1/README.rst deleted file mode 100644 index 43621a1..0000000 --- a/owl-bot-staging/v1/README.rst +++ /dev/null @@ -1,49 +0,0 @@ -Python Client for Google Cloud Video Transcoder API -================================================= - -Quick Start ------------ - -In order to use this library, you first need to go through the following steps: - -1. `Select or create a Cloud Platform project.`_ -2. `Enable billing for your project.`_ -3. Enable the Google Cloud Video Transcoder API. -4. `Setup Authentication.`_ - -.. _Select or create a Cloud Platform project.: https://console.cloud.google.com/project -.. _Enable billing for your project.: https://cloud.google.com/billing/docs/how-to/modify-project#enable_billing_for_a_project -.. _Setup Authentication.: https://googleapis.dev/python/google-api-core/latest/auth.html - -Installation -~~~~~~~~~~~~ - -Install this library in a `virtualenv`_ using pip. `virtualenv`_ is a tool to -create isolated Python environments. The basic problem it addresses is one of -dependencies and versions, and indirectly permissions. - -With `virtualenv`_, it's possible to install this library without needing system -install permissions, and without clashing with the installed system -dependencies. - -.. _`virtualenv`: https://virtualenv.pypa.io/en/latest/ - - -Mac/Linux -^^^^^^^^^ - -.. code-block:: console - - python3 -m venv - source /bin/activate - /bin/pip install /path/to/library - - -Windows -^^^^^^^ - -.. code-block:: console - - python3 -m venv - \Scripts\activate - \Scripts\pip.exe install \path\to\library diff --git a/owl-bot-staging/v1/docs/conf.py b/owl-bot-staging/v1/docs/conf.py deleted file mode 100644 index a906f71..0000000 --- a/owl-bot-staging/v1/docs/conf.py +++ /dev/null @@ -1,376 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# -# google-cloud-video-transcoder documentation build configuration file -# -# This file is execfile()d with the current directory set to its -# containing dir. -# -# Note that not all possible configuration values are present in this -# autogenerated file. -# -# All configuration values have a default; values that are commented out -# serve to show the default. - -import sys -import os -import shlex - -# If extensions (or modules to document with autodoc) are in another directory, -# add these directories to sys.path here. If the directory is relative to the -# documentation root, use os.path.abspath to make it absolute, like shown here. -sys.path.insert(0, os.path.abspath("..")) - -__version__ = "0.1.0" - -# -- General configuration ------------------------------------------------ - -# If your documentation needs a minimal Sphinx version, state it here. -needs_sphinx = "4.0.1" - -# Add any Sphinx extension module names here, as strings. They can be -# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom -# ones. -extensions = [ - "sphinx.ext.autodoc", - "sphinx.ext.autosummary", - "sphinx.ext.intersphinx", - "sphinx.ext.coverage", - "sphinx.ext.napoleon", - "sphinx.ext.todo", - "sphinx.ext.viewcode", -] - -# autodoc/autosummary flags -autoclass_content = "both" -autodoc_default_flags = ["members"] -autosummary_generate = True - - -# Add any paths that contain templates here, relative to this directory. -templates_path = ["_templates"] - -# Allow markdown includes (so releases.md can include CHANGLEOG.md) -# http://www.sphinx-doc.org/en/master/markdown.html -source_parsers = {".md": "recommonmark.parser.CommonMarkParser"} - -# The suffix(es) of source filenames. -# You can specify multiple suffix as a list of string: -source_suffix = [".rst", ".md"] - -# The encoding of source files. -# source_encoding = 'utf-8-sig' - -# The root toctree document. -root_doc = "index" - -# General information about the project. -project = u"google-cloud-video-transcoder" -copyright = u"2022, Google, LLC" -author = u"Google APIs" # TODO: autogenerate this bit - -# The version info for the project you're documenting, acts as replacement for -# |version| and |release|, also used in various other places throughout the -# built documents. -# -# The full version, including alpha/beta/rc tags. -release = __version__ -# The short X.Y version. -version = ".".join(release.split(".")[0:2]) - -# The language for content autogenerated by Sphinx. Refer to documentation -# for a list of supported languages. -# -# This is also used if you do content translation via gettext catalogs. -# Usually you set "language" from the command line for these cases. -language = None - -# There are two options for replacing |today|: either, you set today to some -# non-false value, then it is used: -# today = '' -# Else, today_fmt is used as the format for a strftime call. -# today_fmt = '%B %d, %Y' - -# List of patterns, relative to source directory, that match files and -# directories to ignore when looking for source files. -exclude_patterns = ["_build"] - -# The reST default role (used for this markup: `text`) to use for all -# documents. -# default_role = None - -# If true, '()' will be appended to :func: etc. cross-reference text. -# add_function_parentheses = True - -# If true, the current module name will be prepended to all description -# unit titles (such as .. function::). -# add_module_names = True - -# If true, sectionauthor and moduleauthor directives will be shown in the -# output. They are ignored by default. -# show_authors = False - -# The name of the Pygments (syntax highlighting) style to use. -pygments_style = "sphinx" - -# A list of ignored prefixes for module index sorting. -# modindex_common_prefix = [] - -# If true, keep warnings as "system message" paragraphs in the built documents. -# keep_warnings = False - -# If true, `todo` and `todoList` produce output, else they produce nothing. -todo_include_todos = True - - -# -- Options for HTML output ---------------------------------------------- - -# The theme to use for HTML and HTML Help pages. See the documentation for -# a list of builtin themes. -html_theme = "alabaster" - -# Theme options are theme-specific and customize the look and feel of a theme -# further. For a list of options available for each theme, see the -# documentation. -html_theme_options = { - "description": "Google Cloud Video Client Libraries for Python", - "github_user": "googleapis", - "github_repo": "google-cloud-python", - "github_banner": True, - "font_family": "'Roboto', Georgia, sans", - "head_font_family": "'Roboto', Georgia, serif", - "code_font_family": "'Roboto Mono', 'Consolas', monospace", -} - -# Add any paths that contain custom themes here, relative to this directory. -# html_theme_path = [] - -# The name for this set of Sphinx documents. If None, it defaults to -# " v documentation". -# html_title = None - -# A shorter title for the navigation bar. Default is the same as html_title. -# html_short_title = None - -# The name of an image file (relative to this directory) to place at the top -# of the sidebar. -# html_logo = None - -# The name of an image file (within the static path) to use as favicon of the -# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 -# pixels large. -# html_favicon = None - -# Add any paths that contain custom static files (such as style sheets) here, -# relative to this directory. They are copied after the builtin static files, -# so a file named "default.css" will overwrite the builtin "default.css". -html_static_path = ["_static"] - -# Add any extra paths that contain custom files (such as robots.txt or -# .htaccess) here, relative to this directory. These files are copied -# directly to the root of the documentation. -# html_extra_path = [] - -# If not '', a 'Last updated on:' timestamp is inserted at every page bottom, -# using the given strftime format. -# html_last_updated_fmt = '%b %d, %Y' - -# If true, SmartyPants will be used to convert quotes and dashes to -# typographically correct entities. -# html_use_smartypants = True - -# Custom sidebar templates, maps document names to template names. -# html_sidebars = {} - -# Additional templates that should be rendered to pages, maps page names to -# template names. -# html_additional_pages = {} - -# If false, no module index is generated. -# html_domain_indices = True - -# If false, no index is generated. -# html_use_index = True - -# If true, the index is split into individual pages for each letter. -# html_split_index = False - -# If true, links to the reST sources are added to the pages. -# html_show_sourcelink = True - -# If true, "Created using Sphinx" is shown in the HTML footer. Default is True. -# html_show_sphinx = True - -# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. -# html_show_copyright = True - -# If true, an OpenSearch description file will be output, and all pages will -# contain a tag referring to it. The value of this option must be the -# base URL from which the finished HTML is served. -# html_use_opensearch = '' - -# This is the file name suffix for HTML files (e.g. ".xhtml"). -# html_file_suffix = None - -# Language to be used for generating the HTML full-text search index. -# Sphinx supports the following languages: -# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja' -# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr' -# html_search_language = 'en' - -# A dictionary with options for the search language support, empty by default. -# Now only 'ja' uses this config value -# html_search_options = {'type': 'default'} - -# The name of a javascript file (relative to the configuration directory) that -# implements a search results scorer. If empty, the default will be used. -# html_search_scorer = 'scorer.js' - -# Output file base name for HTML help builder. -htmlhelp_basename = "google-cloud-video-transcoder-doc" - -# -- Options for warnings ------------------------------------------------------ - - -suppress_warnings = [ - # Temporarily suppress this to avoid "more than one target found for - # cross-reference" warning, which are intractable for us to avoid while in - # a mono-repo. - # See https://github.com/sphinx-doc/sphinx/blob - # /2a65ffeef5c107c19084fabdd706cdff3f52d93c/sphinx/domains/python.py#L843 - "ref.python" -] - -# -- Options for LaTeX output --------------------------------------------- - -latex_elements = { - # The paper size ('letterpaper' or 'a4paper'). - # 'papersize': 'letterpaper', - # The font size ('10pt', '11pt' or '12pt'). - # 'pointsize': '10pt', - # Additional stuff for the LaTeX preamble. - # 'preamble': '', - # Latex figure (float) alignment - # 'figure_align': 'htbp', -} - -# Grouping the document tree into LaTeX files. List of tuples -# (source start file, target name, title, -# author, documentclass [howto, manual, or own class]). -latex_documents = [ - ( - root_doc, - "google-cloud-video-transcoder.tex", - u"google-cloud-video-transcoder Documentation", - author, - "manual", - ) -] - -# The name of an image file (relative to this directory) to place at the top of -# the title page. -# latex_logo = None - -# For "manual" documents, if this is true, then toplevel headings are parts, -# not chapters. -# latex_use_parts = False - -# If true, show page references after internal links. -# latex_show_pagerefs = False - -# If true, show URL addresses after external links. -# latex_show_urls = False - -# Documents to append as an appendix to all manuals. -# latex_appendices = [] - -# If false, no module index is generated. -# latex_domain_indices = True - - -# -- Options for manual page output --------------------------------------- - -# One entry per manual page. List of tuples -# (source start file, name, description, authors, manual section). -man_pages = [ - ( - root_doc, - "google-cloud-video-transcoder", - u"Google Cloud Video Transcoder Documentation", - [author], - 1, - ) -] - -# If true, show URL addresses after external links. -# man_show_urls = False - - -# -- Options for Texinfo output ------------------------------------------- - -# Grouping the document tree into Texinfo files. List of tuples -# (source start file, target name, title, author, -# dir menu entry, description, category) -texinfo_documents = [ - ( - root_doc, - "google-cloud-video-transcoder", - u"google-cloud-video-transcoder Documentation", - author, - "google-cloud-video-transcoder", - "GAPIC library for Google Cloud Video Transcoder API", - "APIs", - ) -] - -# Documents to append as an appendix to all manuals. -# texinfo_appendices = [] - -# If false, no module index is generated. -# texinfo_domain_indices = True - -# How to display URL addresses: 'footnote', 'no', or 'inline'. -# texinfo_show_urls = 'footnote' - -# If true, do not generate a @detailmenu in the "Top" node's menu. -# texinfo_no_detailmenu = False - - -# Example configuration for intersphinx: refer to the Python standard library. -intersphinx_mapping = { - "python": ("http://python.readthedocs.org/en/latest/", None), - "gax": ("https://gax-python.readthedocs.org/en/latest/", None), - "google-auth": ("https://google-auth.readthedocs.io/en/stable", None), - "google-gax": ("https://gax-python.readthedocs.io/en/latest/", None), - "google.api_core": ("https://googleapis.dev/python/google-api-core/latest/", None), - "grpc": ("https://grpc.io/grpc/python/", None), - "requests": ("http://requests.kennethreitz.org/en/stable/", None), - "proto": ("https://proto-plus-python.readthedocs.io/en/stable", None), - "protobuf": ("https://googleapis.dev/python/protobuf/latest/", None), -} - - -# Napoleon settings -napoleon_google_docstring = True -napoleon_numpy_docstring = True -napoleon_include_private_with_doc = False -napoleon_include_special_with_doc = True -napoleon_use_admonition_for_examples = False -napoleon_use_admonition_for_notes = False -napoleon_use_admonition_for_references = False -napoleon_use_ivar = False -napoleon_use_param = True -napoleon_use_rtype = True diff --git a/owl-bot-staging/v1/docs/index.rst b/owl-bot-staging/v1/docs/index.rst deleted file mode 100644 index 0cfe564..0000000 --- a/owl-bot-staging/v1/docs/index.rst +++ /dev/null @@ -1,7 +0,0 @@ -API Reference -------------- -.. toctree:: - :maxdepth: 2 - - transcoder_v1/services - transcoder_v1/types diff --git a/owl-bot-staging/v1/docs/transcoder_v1/services.rst b/owl-bot-staging/v1/docs/transcoder_v1/services.rst deleted file mode 100644 index 1bd129e..0000000 --- a/owl-bot-staging/v1/docs/transcoder_v1/services.rst +++ /dev/null @@ -1,6 +0,0 @@ -Services for Google Cloud Video Transcoder v1 API -================================================= -.. toctree:: - :maxdepth: 2 - - transcoder_service diff --git a/owl-bot-staging/v1/docs/transcoder_v1/transcoder_service.rst b/owl-bot-staging/v1/docs/transcoder_v1/transcoder_service.rst deleted file mode 100644 index 5bf6bd8..0000000 --- a/owl-bot-staging/v1/docs/transcoder_v1/transcoder_service.rst +++ /dev/null @@ -1,10 +0,0 @@ -TranscoderService ------------------------------------ - -.. automodule:: google.cloud.video.transcoder_v1.services.transcoder_service - :members: - :inherited-members: - -.. automodule:: google.cloud.video.transcoder_v1.services.transcoder_service.pagers - :members: - :inherited-members: diff --git a/owl-bot-staging/v1/docs/transcoder_v1/types.rst b/owl-bot-staging/v1/docs/transcoder_v1/types.rst deleted file mode 100644 index 6fe8d2e..0000000 --- a/owl-bot-staging/v1/docs/transcoder_v1/types.rst +++ /dev/null @@ -1,6 +0,0 @@ -Types for Google Cloud Video Transcoder v1 API -============================================== - -.. automodule:: google.cloud.video.transcoder_v1.types - :members: - :show-inheritance: diff --git a/owl-bot-staging/v1/google/cloud/video/transcoder/__init__.py b/owl-bot-staging/v1/google/cloud/video/transcoder/__init__.py deleted file mode 100644 index 02a7e66..0000000 --- a/owl-bot-staging/v1/google/cloud/video/transcoder/__init__.py +++ /dev/null @@ -1,83 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from google.cloud.video.transcoder import gapic_version as package_version - -__version__ = package_version.__version__ - - -from google.cloud.video.transcoder_v1.services.transcoder_service.client import TranscoderServiceClient -from google.cloud.video.transcoder_v1.services.transcoder_service.async_client import TranscoderServiceAsyncClient - -from google.cloud.video.transcoder_v1.types.resources import AdBreak -from google.cloud.video.transcoder_v1.types.resources import AudioStream -from google.cloud.video.transcoder_v1.types.resources import EditAtom -from google.cloud.video.transcoder_v1.types.resources import ElementaryStream -from google.cloud.video.transcoder_v1.types.resources import Input -from google.cloud.video.transcoder_v1.types.resources import Job -from google.cloud.video.transcoder_v1.types.resources import JobConfig -from google.cloud.video.transcoder_v1.types.resources import JobTemplate -from google.cloud.video.transcoder_v1.types.resources import Manifest -from google.cloud.video.transcoder_v1.types.resources import MuxStream -from google.cloud.video.transcoder_v1.types.resources import Output -from google.cloud.video.transcoder_v1.types.resources import Overlay -from google.cloud.video.transcoder_v1.types.resources import PreprocessingConfig -from google.cloud.video.transcoder_v1.types.resources import PubsubDestination -from google.cloud.video.transcoder_v1.types.resources import SegmentSettings -from google.cloud.video.transcoder_v1.types.resources import SpriteSheet -from google.cloud.video.transcoder_v1.types.resources import TextStream -from google.cloud.video.transcoder_v1.types.resources import VideoStream -from google.cloud.video.transcoder_v1.types.services import CreateJobRequest -from google.cloud.video.transcoder_v1.types.services import CreateJobTemplateRequest -from google.cloud.video.transcoder_v1.types.services import DeleteJobRequest -from google.cloud.video.transcoder_v1.types.services import DeleteJobTemplateRequest -from google.cloud.video.transcoder_v1.types.services import GetJobRequest -from google.cloud.video.transcoder_v1.types.services import GetJobTemplateRequest -from google.cloud.video.transcoder_v1.types.services import ListJobsRequest -from google.cloud.video.transcoder_v1.types.services import ListJobsResponse -from google.cloud.video.transcoder_v1.types.services import ListJobTemplatesRequest -from google.cloud.video.transcoder_v1.types.services import ListJobTemplatesResponse - -__all__ = ('TranscoderServiceClient', - 'TranscoderServiceAsyncClient', - 'AdBreak', - 'AudioStream', - 'EditAtom', - 'ElementaryStream', - 'Input', - 'Job', - 'JobConfig', - 'JobTemplate', - 'Manifest', - 'MuxStream', - 'Output', - 'Overlay', - 'PreprocessingConfig', - 'PubsubDestination', - 'SegmentSettings', - 'SpriteSheet', - 'TextStream', - 'VideoStream', - 'CreateJobRequest', - 'CreateJobTemplateRequest', - 'DeleteJobRequest', - 'DeleteJobTemplateRequest', - 'GetJobRequest', - 'GetJobTemplateRequest', - 'ListJobsRequest', - 'ListJobsResponse', - 'ListJobTemplatesRequest', - 'ListJobTemplatesResponse', -) diff --git a/owl-bot-staging/v1/google/cloud/video/transcoder/gapic_version.py b/owl-bot-staging/v1/google/cloud/video/transcoder/gapic_version.py deleted file mode 100644 index 405b1ce..0000000 --- a/owl-bot-staging/v1/google/cloud/video/transcoder/gapic_version.py +++ /dev/null @@ -1,16 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -__version__ = "0.1.0" # {x-release-please-version} diff --git a/owl-bot-staging/v1/google/cloud/video/transcoder/py.typed b/owl-bot-staging/v1/google/cloud/video/transcoder/py.typed deleted file mode 100644 index a2716a6..0000000 --- a/owl-bot-staging/v1/google/cloud/video/transcoder/py.typed +++ /dev/null @@ -1,2 +0,0 @@ -# Marker file for PEP 561. -# The google-cloud-video-transcoder package uses inline types. diff --git a/owl-bot-staging/v1/google/cloud/video/transcoder_v1/__init__.py b/owl-bot-staging/v1/google/cloud/video/transcoder_v1/__init__.py deleted file mode 100644 index 95ed57b..0000000 --- a/owl-bot-staging/v1/google/cloud/video/transcoder_v1/__init__.py +++ /dev/null @@ -1,84 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from google.cloud.video.transcoder import gapic_version as package_version - -__version__ = package_version.__version__ - - -from .services.transcoder_service import TranscoderServiceClient -from .services.transcoder_service import TranscoderServiceAsyncClient - -from .types.resources import AdBreak -from .types.resources import AudioStream -from .types.resources import EditAtom -from .types.resources import ElementaryStream -from .types.resources import Input -from .types.resources import Job -from .types.resources import JobConfig -from .types.resources import JobTemplate -from .types.resources import Manifest -from .types.resources import MuxStream -from .types.resources import Output -from .types.resources import Overlay -from .types.resources import PreprocessingConfig -from .types.resources import PubsubDestination -from .types.resources import SegmentSettings -from .types.resources import SpriteSheet -from .types.resources import TextStream -from .types.resources import VideoStream -from .types.services import CreateJobRequest -from .types.services import CreateJobTemplateRequest -from .types.services import DeleteJobRequest -from .types.services import DeleteJobTemplateRequest -from .types.services import GetJobRequest -from .types.services import GetJobTemplateRequest -from .types.services import ListJobsRequest -from .types.services import ListJobsResponse -from .types.services import ListJobTemplatesRequest -from .types.services import ListJobTemplatesResponse - -__all__ = ( - 'TranscoderServiceAsyncClient', -'AdBreak', -'AudioStream', -'CreateJobRequest', -'CreateJobTemplateRequest', -'DeleteJobRequest', -'DeleteJobTemplateRequest', -'EditAtom', -'ElementaryStream', -'GetJobRequest', -'GetJobTemplateRequest', -'Input', -'Job', -'JobConfig', -'JobTemplate', -'ListJobTemplatesRequest', -'ListJobTemplatesResponse', -'ListJobsRequest', -'ListJobsResponse', -'Manifest', -'MuxStream', -'Output', -'Overlay', -'PreprocessingConfig', -'PubsubDestination', -'SegmentSettings', -'SpriteSheet', -'TextStream', -'TranscoderServiceClient', -'VideoStream', -) diff --git a/owl-bot-staging/v1/google/cloud/video/transcoder_v1/gapic_metadata.json b/owl-bot-staging/v1/google/cloud/video/transcoder_v1/gapic_metadata.json deleted file mode 100644 index 6651379..0000000 --- a/owl-bot-staging/v1/google/cloud/video/transcoder_v1/gapic_metadata.json +++ /dev/null @@ -1,103 +0,0 @@ - { - "comment": "This file maps proto services/RPCs to the corresponding library clients/methods", - "language": "python", - "libraryPackage": "google.cloud.video.transcoder_v1", - "protoPackage": "google.cloud.video.transcoder.v1", - "schema": "1.0", - "services": { - "TranscoderService": { - "clients": { - "grpc": { - "libraryClient": "TranscoderServiceClient", - "rpcs": { - "CreateJob": { - "methods": [ - "create_job" - ] - }, - "CreateJobTemplate": { - "methods": [ - "create_job_template" - ] - }, - "DeleteJob": { - "methods": [ - "delete_job" - ] - }, - "DeleteJobTemplate": { - "methods": [ - "delete_job_template" - ] - }, - "GetJob": { - "methods": [ - "get_job" - ] - }, - "GetJobTemplate": { - "methods": [ - "get_job_template" - ] - }, - "ListJobTemplates": { - "methods": [ - "list_job_templates" - ] - }, - "ListJobs": { - "methods": [ - "list_jobs" - ] - } - } - }, - "grpc-async": { - "libraryClient": "TranscoderServiceAsyncClient", - "rpcs": { - "CreateJob": { - "methods": [ - "create_job" - ] - }, - "CreateJobTemplate": { - "methods": [ - "create_job_template" - ] - }, - "DeleteJob": { - "methods": [ - "delete_job" - ] - }, - "DeleteJobTemplate": { - "methods": [ - "delete_job_template" - ] - }, - "GetJob": { - "methods": [ - "get_job" - ] - }, - "GetJobTemplate": { - "methods": [ - "get_job_template" - ] - }, - "ListJobTemplates": { - "methods": [ - "list_job_templates" - ] - }, - "ListJobs": { - "methods": [ - "list_jobs" - ] - } - } - } - } - } - } -} diff --git a/owl-bot-staging/v1/google/cloud/video/transcoder_v1/gapic_version.py b/owl-bot-staging/v1/google/cloud/video/transcoder_v1/gapic_version.py deleted file mode 100644 index 405b1ce..0000000 --- a/owl-bot-staging/v1/google/cloud/video/transcoder_v1/gapic_version.py +++ /dev/null @@ -1,16 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -__version__ = "0.1.0" # {x-release-please-version} diff --git a/owl-bot-staging/v1/google/cloud/video/transcoder_v1/py.typed b/owl-bot-staging/v1/google/cloud/video/transcoder_v1/py.typed deleted file mode 100644 index a2716a6..0000000 --- a/owl-bot-staging/v1/google/cloud/video/transcoder_v1/py.typed +++ /dev/null @@ -1,2 +0,0 @@ -# Marker file for PEP 561. -# The google-cloud-video-transcoder package uses inline types. diff --git a/owl-bot-staging/v1/google/cloud/video/transcoder_v1/services/__init__.py b/owl-bot-staging/v1/google/cloud/video/transcoder_v1/services/__init__.py deleted file mode 100644 index e8e1c38..0000000 --- a/owl-bot-staging/v1/google/cloud/video/transcoder_v1/services/__init__.py +++ /dev/null @@ -1,15 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/v1/google/cloud/video/transcoder_v1/services/transcoder_service/__init__.py b/owl-bot-staging/v1/google/cloud/video/transcoder_v1/services/transcoder_service/__init__.py deleted file mode 100644 index a27abd8..0000000 --- a/owl-bot-staging/v1/google/cloud/video/transcoder_v1/services/transcoder_service/__init__.py +++ /dev/null @@ -1,22 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from .client import TranscoderServiceClient -from .async_client import TranscoderServiceAsyncClient - -__all__ = ( - 'TranscoderServiceClient', - 'TranscoderServiceAsyncClient', -) diff --git a/owl-bot-staging/v1/google/cloud/video/transcoder_v1/services/transcoder_service/async_client.py b/owl-bot-staging/v1/google/cloud/video/transcoder_v1/services/transcoder_service/async_client.py deleted file mode 100644 index 2bfe811..0000000 --- a/owl-bot-staging/v1/google/cloud/video/transcoder_v1/services/transcoder_service/async_client.py +++ /dev/null @@ -1,1054 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from collections import OrderedDict -import functools -import re -from typing import Dict, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union - -from google.cloud.video.transcoder_v1 import gapic_version as package_version - -from google.api_core.client_options import ClientOptions -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import retry as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.oauth2 import service_account # type: ignore - -try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object] # type: ignore - -from google.cloud.video.transcoder_v1.services.transcoder_service import pagers -from google.cloud.video.transcoder_v1.types import resources -from google.cloud.video.transcoder_v1.types import services -from google.protobuf import timestamp_pb2 # type: ignore -from google.rpc import status_pb2 # type: ignore -from .transports.base import TranscoderServiceTransport, DEFAULT_CLIENT_INFO -from .transports.grpc_asyncio import TranscoderServiceGrpcAsyncIOTransport -from .client import TranscoderServiceClient - - -class TranscoderServiceAsyncClient: - """Using the Transcoder API, you can queue asynchronous jobs for - transcoding media into various output formats. Output formats - may include different streaming standards such as HTTP Live - Streaming (HLS) and Dynamic Adaptive Streaming over HTTP (DASH). - You can also customize jobs using advanced features such as - Digital Rights Management (DRM), audio equalization, content - concatenation, and digital ad-stitch ready content generation. - """ - - _client: TranscoderServiceClient - - DEFAULT_ENDPOINT = TranscoderServiceClient.DEFAULT_ENDPOINT - DEFAULT_MTLS_ENDPOINT = TranscoderServiceClient.DEFAULT_MTLS_ENDPOINT - - job_path = staticmethod(TranscoderServiceClient.job_path) - parse_job_path = staticmethod(TranscoderServiceClient.parse_job_path) - job_template_path = staticmethod(TranscoderServiceClient.job_template_path) - parse_job_template_path = staticmethod(TranscoderServiceClient.parse_job_template_path) - common_billing_account_path = staticmethod(TranscoderServiceClient.common_billing_account_path) - parse_common_billing_account_path = staticmethod(TranscoderServiceClient.parse_common_billing_account_path) - common_folder_path = staticmethod(TranscoderServiceClient.common_folder_path) - parse_common_folder_path = staticmethod(TranscoderServiceClient.parse_common_folder_path) - common_organization_path = staticmethod(TranscoderServiceClient.common_organization_path) - parse_common_organization_path = staticmethod(TranscoderServiceClient.parse_common_organization_path) - common_project_path = staticmethod(TranscoderServiceClient.common_project_path) - parse_common_project_path = staticmethod(TranscoderServiceClient.parse_common_project_path) - common_location_path = staticmethod(TranscoderServiceClient.common_location_path) - parse_common_location_path = staticmethod(TranscoderServiceClient.parse_common_location_path) - - @classmethod - def from_service_account_info(cls, info: dict, *args, **kwargs): - """Creates an instance of this client using the provided credentials - info. - - Args: - info (dict): The service account private key info. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - TranscoderServiceAsyncClient: The constructed client. - """ - return TranscoderServiceClient.from_service_account_info.__func__(TranscoderServiceAsyncClient, info, *args, **kwargs) # type: ignore - - @classmethod - def from_service_account_file(cls, filename: str, *args, **kwargs): - """Creates an instance of this client using the provided credentials - file. - - Args: - filename (str): The path to the service account private key json - file. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - TranscoderServiceAsyncClient: The constructed client. - """ - return TranscoderServiceClient.from_service_account_file.__func__(TranscoderServiceAsyncClient, filename, *args, **kwargs) # type: ignore - - from_service_account_json = from_service_account_file - - @classmethod - def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[ClientOptions] = None): - """Return the API endpoint and client cert source for mutual TLS. - - The client cert source is determined in the following order: - (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the - client cert source is None. - (2) if `client_options.client_cert_source` is provided, use the provided one; if the - default client cert source exists, use the default one; otherwise the client cert - source is None. - - The API endpoint is determined in the following order: - (1) if `client_options.api_endpoint` if provided, use the provided one. - (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the - default mTLS endpoint; if the environment variable is "never", use the default API - endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise - use the default API endpoint. - - More details can be found at https://google.aip.dev/auth/4114. - - Args: - client_options (google.api_core.client_options.ClientOptions): Custom options for the - client. Only the `api_endpoint` and `client_cert_source` properties may be used - in this method. - - Returns: - Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the - client cert source to use. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If any errors happen. - """ - return TranscoderServiceClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore - - @property - def transport(self) -> TranscoderServiceTransport: - """Returns the transport used by the client instance. - - Returns: - TranscoderServiceTransport: The transport used by the client instance. - """ - return self._client.transport - - get_transport_class = functools.partial(type(TranscoderServiceClient).get_transport_class, type(TranscoderServiceClient)) - - def __init__(self, *, - credentials: Optional[ga_credentials.Credentials] = None, - transport: Union[str, TranscoderServiceTransport] = "grpc_asyncio", - client_options: Optional[ClientOptions] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - ) -> None: - """Instantiates the transcoder service client. - - Args: - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - transport (Union[str, ~.TranscoderServiceTransport]): The - transport to use. If set to None, a transport is chosen - automatically. - client_options (ClientOptions): Custom options for the client. It - won't take effect if a ``transport`` instance is provided. - (1) The ``api_endpoint`` property can be used to override the - default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT - environment variable can also be used to override the endpoint: - "always" (always use the default mTLS endpoint), "never" (always - use the default regular endpoint) and "auto" (auto switch to the - default mTLS endpoint if client certificate is present, this is - the default value). However, the ``api_endpoint`` property takes - precedence if provided. - (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable - is "true", then the ``client_cert_source`` property can be used - to provide client certificate for mutual TLS transport. If - not provided, the default SSL client certificate will be used if - present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not - set, no client certificate will be used. - - Raises: - google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport - creation failed for any reason. - """ - self._client = TranscoderServiceClient( - credentials=credentials, - transport=transport, - client_options=client_options, - client_info=client_info, - - ) - - async def create_job(self, - request: Optional[Union[services.CreateJobRequest, dict]] = None, - *, - parent: Optional[str] = None, - job: Optional[resources.Job] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> resources.Job: - r"""Creates a job in the specified region. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud.video import transcoder_v1 - - async def sample_create_job(): - # Create a client - client = transcoder_v1.TranscoderServiceAsyncClient() - - # Initialize request argument(s) - job = transcoder_v1.Job() - job.template_id = "template_id_value" - - request = transcoder_v1.CreateJobRequest( - parent="parent_value", - job=job, - ) - - # Make the request - response = await client.create_job(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.video.transcoder_v1.types.CreateJobRequest, dict]]): - The request object. Request message for - `TranscoderService.CreateJob`. - parent (:class:`str`): - Required. The parent location to create and process this - job. Format: ``projects/{project}/locations/{location}`` - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - job (:class:`google.cloud.video.transcoder_v1.types.Job`): - Required. Parameters for creating - transcoding job. - - This corresponds to the ``job`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.video.transcoder_v1.types.Job: - Transcoding job resource. - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, job]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = services.CreateJobRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if job is not None: - request.job = job - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.create_job, - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def list_jobs(self, - request: Optional[Union[services.ListJobsRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListJobsAsyncPager: - r"""Lists jobs in the specified region. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud.video import transcoder_v1 - - async def sample_list_jobs(): - # Create a client - client = transcoder_v1.TranscoderServiceAsyncClient() - - # Initialize request argument(s) - request = transcoder_v1.ListJobsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_jobs(request=request) - - # Handle the response - async for response in page_result: - print(response) - - Args: - request (Optional[Union[google.cloud.video.transcoder_v1.types.ListJobsRequest, dict]]): - The request object. Request message for - `TranscoderService.ListJobs`. The parent location from - which to retrieve the collection of jobs. - parent (:class:`str`): - Required. Format: - ``projects/{project}/locations/{location}`` - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.video.transcoder_v1.services.transcoder_service.pagers.ListJobsAsyncPager: - Response message for TranscoderService.ListJobs. - - Iterating over this object will yield results and - resolve additional pages automatically. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = services.ListJobsRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.list_jobs, - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__aiter__` convenience method. - response = pagers.ListJobsAsyncPager( - method=rpc, - request=request, - response=response, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def get_job(self, - request: Optional[Union[services.GetJobRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> resources.Job: - r"""Returns the job data. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud.video import transcoder_v1 - - async def sample_get_job(): - # Create a client - client = transcoder_v1.TranscoderServiceAsyncClient() - - # Initialize request argument(s) - request = transcoder_v1.GetJobRequest( - name="name_value", - ) - - # Make the request - response = await client.get_job(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.video.transcoder_v1.types.GetJobRequest, dict]]): - The request object. Request message for - `TranscoderService.GetJob`. - name (:class:`str`): - Required. The name of the job to retrieve. Format: - ``projects/{project}/locations/{location}/jobs/{job}`` - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.video.transcoder_v1.types.Job: - Transcoding job resource. - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = services.GetJobRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.get_job, - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def delete_job(self, - request: Optional[Union[services.DeleteJobRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Deletes a job. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud.video import transcoder_v1 - - async def sample_delete_job(): - # Create a client - client = transcoder_v1.TranscoderServiceAsyncClient() - - # Initialize request argument(s) - request = transcoder_v1.DeleteJobRequest( - name="name_value", - ) - - # Make the request - await client.delete_job(request=request) - - Args: - request (Optional[Union[google.cloud.video.transcoder_v1.types.DeleteJobRequest, dict]]): - The request object. Request message for - `TranscoderService.DeleteJob`. - name (:class:`str`): - Required. The name of the job to delete. Format: - ``projects/{project}/locations/{location}/jobs/{job}`` - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = services.DeleteJobRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.delete_job, - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - async def create_job_template(self, - request: Optional[Union[services.CreateJobTemplateRequest, dict]] = None, - *, - parent: Optional[str] = None, - job_template: Optional[resources.JobTemplate] = None, - job_template_id: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> resources.JobTemplate: - r"""Creates a job template in the specified region. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud.video import transcoder_v1 - - async def sample_create_job_template(): - # Create a client - client = transcoder_v1.TranscoderServiceAsyncClient() - - # Initialize request argument(s) - request = transcoder_v1.CreateJobTemplateRequest( - parent="parent_value", - job_template_id="job_template_id_value", - ) - - # Make the request - response = await client.create_job_template(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.video.transcoder_v1.types.CreateJobTemplateRequest, dict]]): - The request object. Request message for - `TranscoderService.CreateJobTemplate`. - parent (:class:`str`): - Required. The parent location to create this job - template. Format: - ``projects/{project}/locations/{location}`` - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - job_template (:class:`google.cloud.video.transcoder_v1.types.JobTemplate`): - Required. Parameters for creating job - template. - - This corresponds to the ``job_template`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - job_template_id (:class:`str`): - Required. The ID to use for the job template, which will - become the final component of the job template's - resource name. - - This value should be 4-63 characters, and valid - characters must match the regular expression - ``[a-zA-Z][a-zA-Z0-9_-]*``. - - This corresponds to the ``job_template_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.video.transcoder_v1.types.JobTemplate: - Transcoding job template resource. - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, job_template, job_template_id]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = services.CreateJobTemplateRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if job_template is not None: - request.job_template = job_template - if job_template_id is not None: - request.job_template_id = job_template_id - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.create_job_template, - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def list_job_templates(self, - request: Optional[Union[services.ListJobTemplatesRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListJobTemplatesAsyncPager: - r"""Lists job templates in the specified region. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud.video import transcoder_v1 - - async def sample_list_job_templates(): - # Create a client - client = transcoder_v1.TranscoderServiceAsyncClient() - - # Initialize request argument(s) - request = transcoder_v1.ListJobTemplatesRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_job_templates(request=request) - - # Handle the response - async for response in page_result: - print(response) - - Args: - request (Optional[Union[google.cloud.video.transcoder_v1.types.ListJobTemplatesRequest, dict]]): - The request object. Request message for - `TranscoderService.ListJobTemplates`. - parent (:class:`str`): - Required. The parent location from which to retrieve the - collection of job templates. Format: - ``projects/{project}/locations/{location}`` - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.video.transcoder_v1.services.transcoder_service.pagers.ListJobTemplatesAsyncPager: - Response message for TranscoderService.ListJobTemplates. - - Iterating over this object will yield results and - resolve additional pages automatically. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = services.ListJobTemplatesRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.list_job_templates, - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__aiter__` convenience method. - response = pagers.ListJobTemplatesAsyncPager( - method=rpc, - request=request, - response=response, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def get_job_template(self, - request: Optional[Union[services.GetJobTemplateRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> resources.JobTemplate: - r"""Returns the job template data. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud.video import transcoder_v1 - - async def sample_get_job_template(): - # Create a client - client = transcoder_v1.TranscoderServiceAsyncClient() - - # Initialize request argument(s) - request = transcoder_v1.GetJobTemplateRequest( - name="name_value", - ) - - # Make the request - response = await client.get_job_template(request=request) - - # Handle the response - print(response) - - Args: - request (Optional[Union[google.cloud.video.transcoder_v1.types.GetJobTemplateRequest, dict]]): - The request object. Request message for - `TranscoderService.GetJobTemplate`. - name (:class:`str`): - Required. The name of the job template to retrieve. - Format: - ``projects/{project}/locations/{location}/jobTemplates/{job_template}`` - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.video.transcoder_v1.types.JobTemplate: - Transcoding job template resource. - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = services.GetJobTemplateRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.get_job_template, - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - async def delete_job_template(self, - request: Optional[Union[services.DeleteJobTemplateRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Deletes a job template. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud.video import transcoder_v1 - - async def sample_delete_job_template(): - # Create a client - client = transcoder_v1.TranscoderServiceAsyncClient() - - # Initialize request argument(s) - request = transcoder_v1.DeleteJobTemplateRequest( - name="name_value", - ) - - # Make the request - await client.delete_job_template(request=request) - - Args: - request (Optional[Union[google.cloud.video.transcoder_v1.types.DeleteJobTemplateRequest, dict]]): - The request object. Request message for - `TranscoderService.DeleteJobTemplate`. - name (:class:`str`): - Required. The name of the job template to delete. - ``projects/{project}/locations/{location}/jobTemplates/{job_template}`` - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError("If the `request` argument is set, then none of " - "the individual field arguments should be set.") - - request = services.DeleteJobTemplateRequest(request) - - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = gapic_v1.method_async.wrap_method( - self._client._transport.delete_job_template, - default_timeout=60.0, - client_info=DEFAULT_CLIENT_INFO, - ) - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - await rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - async def __aenter__(self): - return self - - async def __aexit__(self, exc_type, exc, tb): - await self.transport.close() - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) - - -__all__ = ( - "TranscoderServiceAsyncClient", -) diff --git a/owl-bot-staging/v1/google/cloud/video/transcoder_v1/services/transcoder_service/client.py b/owl-bot-staging/v1/google/cloud/video/transcoder_v1/services/transcoder_service/client.py deleted file mode 100644 index bffe916..0000000 --- a/owl-bot-staging/v1/google/cloud/video/transcoder_v1/services/transcoder_service/client.py +++ /dev/null @@ -1,1265 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from collections import OrderedDict -import os -import re -from typing import Dict, Mapping, MutableMapping, MutableSequence, Optional, Sequence, Tuple, Type, Union, cast - -from google.cloud.video.transcoder_v1 import gapic_version as package_version - -from google.api_core import client_options as client_options_lib -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import retry as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport import mtls # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore -from google.auth.exceptions import MutualTLSChannelError # type: ignore -from google.oauth2 import service_account # type: ignore - -try: - OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] -except AttributeError: # pragma: NO COVER - OptionalRetry = Union[retries.Retry, object] # type: ignore - -from google.cloud.video.transcoder_v1.services.transcoder_service import pagers -from google.cloud.video.transcoder_v1.types import resources -from google.cloud.video.transcoder_v1.types import services -from google.protobuf import timestamp_pb2 # type: ignore -from google.rpc import status_pb2 # type: ignore -from .transports.base import TranscoderServiceTransport, DEFAULT_CLIENT_INFO -from .transports.grpc import TranscoderServiceGrpcTransport -from .transports.grpc_asyncio import TranscoderServiceGrpcAsyncIOTransport - - -class TranscoderServiceClientMeta(type): - """Metaclass for the TranscoderService client. - - This provides class-level methods for building and retrieving - support objects (e.g. transport) without polluting the client instance - objects. - """ - _transport_registry = OrderedDict() # type: Dict[str, Type[TranscoderServiceTransport]] - _transport_registry["grpc"] = TranscoderServiceGrpcTransport - _transport_registry["grpc_asyncio"] = TranscoderServiceGrpcAsyncIOTransport - - def get_transport_class(cls, - label: Optional[str] = None, - ) -> Type[TranscoderServiceTransport]: - """Returns an appropriate transport class. - - Args: - label: The name of the desired transport. If none is - provided, then the first transport in the registry is used. - - Returns: - The transport class to use. - """ - # If a specific transport is requested, return that one. - if label: - return cls._transport_registry[label] - - # No transport is requested; return the default (that is, the first one - # in the dictionary). - return next(iter(cls._transport_registry.values())) - - -class TranscoderServiceClient(metaclass=TranscoderServiceClientMeta): - """Using the Transcoder API, you can queue asynchronous jobs for - transcoding media into various output formats. Output formats - may include different streaming standards such as HTTP Live - Streaming (HLS) and Dynamic Adaptive Streaming over HTTP (DASH). - You can also customize jobs using advanced features such as - Digital Rights Management (DRM), audio equalization, content - concatenation, and digital ad-stitch ready content generation. - """ - - @staticmethod - def _get_default_mtls_endpoint(api_endpoint): - """Converts api endpoint to mTLS endpoint. - - Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to - "*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively. - Args: - api_endpoint (Optional[str]): the api endpoint to convert. - Returns: - str: converted mTLS api endpoint. - """ - if not api_endpoint: - return api_endpoint - - mtls_endpoint_re = re.compile( - r"(?P[^.]+)(?P\.mtls)?(?P\.sandbox)?(?P\.googleapis\.com)?" - ) - - m = mtls_endpoint_re.match(api_endpoint) - name, mtls, sandbox, googledomain = m.groups() - if mtls or not googledomain: - return api_endpoint - - if sandbox: - return api_endpoint.replace( - "sandbox.googleapis.com", "mtls.sandbox.googleapis.com" - ) - - return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com") - - DEFAULT_ENDPOINT = "transcoder.googleapis.com" - DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore - DEFAULT_ENDPOINT - ) - - @classmethod - def from_service_account_info(cls, info: dict, *args, **kwargs): - """Creates an instance of this client using the provided credentials - info. - - Args: - info (dict): The service account private key info. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - TranscoderServiceClient: The constructed client. - """ - credentials = service_account.Credentials.from_service_account_info(info) - kwargs["credentials"] = credentials - return cls(*args, **kwargs) - - @classmethod - def from_service_account_file(cls, filename: str, *args, **kwargs): - """Creates an instance of this client using the provided credentials - file. - - Args: - filename (str): The path to the service account private key json - file. - args: Additional arguments to pass to the constructor. - kwargs: Additional arguments to pass to the constructor. - - Returns: - TranscoderServiceClient: The constructed client. - """ - credentials = service_account.Credentials.from_service_account_file( - filename) - kwargs["credentials"] = credentials - return cls(*args, **kwargs) - - from_service_account_json = from_service_account_file - - @property - def transport(self) -> TranscoderServiceTransport: - """Returns the transport used by the client instance. - - Returns: - TranscoderServiceTransport: The transport used by the client - instance. - """ - return self._transport - - @staticmethod - def job_path(project: str,location: str,job: str,) -> str: - """Returns a fully-qualified job string.""" - return "projects/{project}/locations/{location}/jobs/{job}".format(project=project, location=location, job=job, ) - - @staticmethod - def parse_job_path(path: str) -> Dict[str,str]: - """Parses a job path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/jobs/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def job_template_path(project: str,location: str,job_template: str,) -> str: - """Returns a fully-qualified job_template string.""" - return "projects/{project}/locations/{location}/jobTemplates/{job_template}".format(project=project, location=location, job_template=job_template, ) - - @staticmethod - def parse_job_template_path(path: str) -> Dict[str,str]: - """Parses a job_template path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)/jobTemplates/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_billing_account_path(billing_account: str, ) -> str: - """Returns a fully-qualified billing_account string.""" - return "billingAccounts/{billing_account}".format(billing_account=billing_account, ) - - @staticmethod - def parse_common_billing_account_path(path: str) -> Dict[str,str]: - """Parse a billing_account path into its component segments.""" - m = re.match(r"^billingAccounts/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_folder_path(folder: str, ) -> str: - """Returns a fully-qualified folder string.""" - return "folders/{folder}".format(folder=folder, ) - - @staticmethod - def parse_common_folder_path(path: str) -> Dict[str,str]: - """Parse a folder path into its component segments.""" - m = re.match(r"^folders/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_organization_path(organization: str, ) -> str: - """Returns a fully-qualified organization string.""" - return "organizations/{organization}".format(organization=organization, ) - - @staticmethod - def parse_common_organization_path(path: str) -> Dict[str,str]: - """Parse a organization path into its component segments.""" - m = re.match(r"^organizations/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_project_path(project: str, ) -> str: - """Returns a fully-qualified project string.""" - return "projects/{project}".format(project=project, ) - - @staticmethod - def parse_common_project_path(path: str) -> Dict[str,str]: - """Parse a project path into its component segments.""" - m = re.match(r"^projects/(?P.+?)$", path) - return m.groupdict() if m else {} - - @staticmethod - def common_location_path(project: str, location: str, ) -> str: - """Returns a fully-qualified location string.""" - return "projects/{project}/locations/{location}".format(project=project, location=location, ) - - @staticmethod - def parse_common_location_path(path: str) -> Dict[str,str]: - """Parse a location path into its component segments.""" - m = re.match(r"^projects/(?P.+?)/locations/(?P.+?)$", path) - return m.groupdict() if m else {} - - @classmethod - def get_mtls_endpoint_and_cert_source(cls, client_options: Optional[client_options_lib.ClientOptions] = None): - """Return the API endpoint and client cert source for mutual TLS. - - The client cert source is determined in the following order: - (1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the - client cert source is None. - (2) if `client_options.client_cert_source` is provided, use the provided one; if the - default client cert source exists, use the default one; otherwise the client cert - source is None. - - The API endpoint is determined in the following order: - (1) if `client_options.api_endpoint` if provided, use the provided one. - (2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the - default mTLS endpoint; if the environment variable is "never", use the default API - endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise - use the default API endpoint. - - More details can be found at https://google.aip.dev/auth/4114. - - Args: - client_options (google.api_core.client_options.ClientOptions): Custom options for the - client. Only the `api_endpoint` and `client_cert_source` properties may be used - in this method. - - Returns: - Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the - client cert source to use. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If any errors happen. - """ - if client_options is None: - client_options = client_options_lib.ClientOptions() - use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false") - use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto") - if use_client_cert not in ("true", "false"): - raise ValueError("Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`") - if use_mtls_endpoint not in ("auto", "never", "always"): - raise MutualTLSChannelError("Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`") - - # Figure out the client cert source to use. - client_cert_source = None - if use_client_cert == "true": - if client_options.client_cert_source: - client_cert_source = client_options.client_cert_source - elif mtls.has_default_client_cert_source(): - client_cert_source = mtls.default_client_cert_source() - - # Figure out which api endpoint to use. - if client_options.api_endpoint is not None: - api_endpoint = client_options.api_endpoint - elif use_mtls_endpoint == "always" or (use_mtls_endpoint == "auto" and client_cert_source): - api_endpoint = cls.DEFAULT_MTLS_ENDPOINT - else: - api_endpoint = cls.DEFAULT_ENDPOINT - - return api_endpoint, client_cert_source - - def __init__(self, *, - credentials: Optional[ga_credentials.Credentials] = None, - transport: Optional[Union[str, TranscoderServiceTransport]] = None, - client_options: Optional[Union[client_options_lib.ClientOptions, dict]] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - ) -> None: - """Instantiates the transcoder service client. - - Args: - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - transport (Union[str, TranscoderServiceTransport]): The - transport to use. If set to None, a transport is chosen - automatically. - client_options (Optional[Union[google.api_core.client_options.ClientOptions, dict]]): Custom options for the - client. It won't take effect if a ``transport`` instance is provided. - (1) The ``api_endpoint`` property can be used to override the - default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT - environment variable can also be used to override the endpoint: - "always" (always use the default mTLS endpoint), "never" (always - use the default regular endpoint) and "auto" (auto switch to the - default mTLS endpoint if client certificate is present, this is - the default value). However, the ``api_endpoint`` property takes - precedence if provided. - (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable - is "true", then the ``client_cert_source`` property can be used - to provide client certificate for mutual TLS transport. If - not provided, the default SSL client certificate will be used if - present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not - set, no client certificate will be used. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport - creation failed for any reason. - """ - if isinstance(client_options, dict): - client_options = client_options_lib.from_dict(client_options) - if client_options is None: - client_options = client_options_lib.ClientOptions() - client_options = cast(client_options_lib.ClientOptions, client_options) - - api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source(client_options) - - api_key_value = getattr(client_options, "api_key", None) - if api_key_value and credentials: - raise ValueError("client_options.api_key and credentials are mutually exclusive") - - # Save or instantiate the transport. - # Ordinarily, we provide the transport, but allowing a custom transport - # instance provides an extensibility point for unusual situations. - if isinstance(transport, TranscoderServiceTransport): - # transport is a TranscoderServiceTransport instance. - if credentials or client_options.credentials_file or api_key_value: - raise ValueError("When providing a transport instance, " - "provide its credentials directly.") - if client_options.scopes: - raise ValueError( - "When providing a transport instance, provide its scopes " - "directly." - ) - self._transport = transport - else: - import google.auth._default # type: ignore - - if api_key_value and hasattr(google.auth._default, "get_api_key_credentials"): - credentials = google.auth._default.get_api_key_credentials(api_key_value) - - Transport = type(self).get_transport_class(transport) - self._transport = Transport( - credentials=credentials, - credentials_file=client_options.credentials_file, - host=api_endpoint, - scopes=client_options.scopes, - client_cert_source_for_mtls=client_cert_source_func, - quota_project_id=client_options.quota_project_id, - client_info=client_info, - always_use_jwt_access=True, - api_audience=client_options.api_audience, - ) - - def create_job(self, - request: Optional[Union[services.CreateJobRequest, dict]] = None, - *, - parent: Optional[str] = None, - job: Optional[resources.Job] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> resources.Job: - r"""Creates a job in the specified region. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud.video import transcoder_v1 - - def sample_create_job(): - # Create a client - client = transcoder_v1.TranscoderServiceClient() - - # Initialize request argument(s) - job = transcoder_v1.Job() - job.template_id = "template_id_value" - - request = transcoder_v1.CreateJobRequest( - parent="parent_value", - job=job, - ) - - # Make the request - response = client.create_job(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.video.transcoder_v1.types.CreateJobRequest, dict]): - The request object. Request message for - `TranscoderService.CreateJob`. - parent (str): - Required. The parent location to create and process this - job. Format: ``projects/{project}/locations/{location}`` - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - job (google.cloud.video.transcoder_v1.types.Job): - Required. Parameters for creating - transcoding job. - - This corresponds to the ``job`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.video.transcoder_v1.types.Job: - Transcoding job resource. - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, job]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a services.CreateJobRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, services.CreateJobRequest): - request = services.CreateJobRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if job is not None: - request.job = job - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.create_job] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def list_jobs(self, - request: Optional[Union[services.ListJobsRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListJobsPager: - r"""Lists jobs in the specified region. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud.video import transcoder_v1 - - def sample_list_jobs(): - # Create a client - client = transcoder_v1.TranscoderServiceClient() - - # Initialize request argument(s) - request = transcoder_v1.ListJobsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_jobs(request=request) - - # Handle the response - for response in page_result: - print(response) - - Args: - request (Union[google.cloud.video.transcoder_v1.types.ListJobsRequest, dict]): - The request object. Request message for - `TranscoderService.ListJobs`. The parent location from - which to retrieve the collection of jobs. - parent (str): - Required. Format: - ``projects/{project}/locations/{location}`` - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.video.transcoder_v1.services.transcoder_service.pagers.ListJobsPager: - Response message for TranscoderService.ListJobs. - - Iterating over this object will yield results and - resolve additional pages automatically. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a services.ListJobsRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, services.ListJobsRequest): - request = services.ListJobsRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_jobs] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__iter__` convenience method. - response = pagers.ListJobsPager( - method=rpc, - request=request, - response=response, - metadata=metadata, - ) - - # Done; return the response. - return response - - def get_job(self, - request: Optional[Union[services.GetJobRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> resources.Job: - r"""Returns the job data. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud.video import transcoder_v1 - - def sample_get_job(): - # Create a client - client = transcoder_v1.TranscoderServiceClient() - - # Initialize request argument(s) - request = transcoder_v1.GetJobRequest( - name="name_value", - ) - - # Make the request - response = client.get_job(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.video.transcoder_v1.types.GetJobRequest, dict]): - The request object. Request message for - `TranscoderService.GetJob`. - name (str): - Required. The name of the job to retrieve. Format: - ``projects/{project}/locations/{location}/jobs/{job}`` - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.video.transcoder_v1.types.Job: - Transcoding job resource. - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a services.GetJobRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, services.GetJobRequest): - request = services.GetJobRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_job] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def delete_job(self, - request: Optional[Union[services.DeleteJobRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Deletes a job. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud.video import transcoder_v1 - - def sample_delete_job(): - # Create a client - client = transcoder_v1.TranscoderServiceClient() - - # Initialize request argument(s) - request = transcoder_v1.DeleteJobRequest( - name="name_value", - ) - - # Make the request - client.delete_job(request=request) - - Args: - request (Union[google.cloud.video.transcoder_v1.types.DeleteJobRequest, dict]): - The request object. Request message for - `TranscoderService.DeleteJob`. - name (str): - Required. The name of the job to delete. Format: - ``projects/{project}/locations/{location}/jobs/{job}`` - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a services.DeleteJobRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, services.DeleteJobRequest): - request = services.DeleteJobRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.delete_job] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - def create_job_template(self, - request: Optional[Union[services.CreateJobTemplateRequest, dict]] = None, - *, - parent: Optional[str] = None, - job_template: Optional[resources.JobTemplate] = None, - job_template_id: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> resources.JobTemplate: - r"""Creates a job template in the specified region. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud.video import transcoder_v1 - - def sample_create_job_template(): - # Create a client - client = transcoder_v1.TranscoderServiceClient() - - # Initialize request argument(s) - request = transcoder_v1.CreateJobTemplateRequest( - parent="parent_value", - job_template_id="job_template_id_value", - ) - - # Make the request - response = client.create_job_template(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.video.transcoder_v1.types.CreateJobTemplateRequest, dict]): - The request object. Request message for - `TranscoderService.CreateJobTemplate`. - parent (str): - Required. The parent location to create this job - template. Format: - ``projects/{project}/locations/{location}`` - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - job_template (google.cloud.video.transcoder_v1.types.JobTemplate): - Required. Parameters for creating job - template. - - This corresponds to the ``job_template`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - job_template_id (str): - Required. The ID to use for the job template, which will - become the final component of the job template's - resource name. - - This value should be 4-63 characters, and valid - characters must match the regular expression - ``[a-zA-Z][a-zA-Z0-9_-]*``. - - This corresponds to the ``job_template_id`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.video.transcoder_v1.types.JobTemplate: - Transcoding job template resource. - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent, job_template, job_template_id]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a services.CreateJobTemplateRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, services.CreateJobTemplateRequest): - request = services.CreateJobTemplateRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - if job_template is not None: - request.job_template = job_template - if job_template_id is not None: - request.job_template_id = job_template_id - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.create_job_template] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def list_job_templates(self, - request: Optional[Union[services.ListJobTemplatesRequest, dict]] = None, - *, - parent: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> pagers.ListJobTemplatesPager: - r"""Lists job templates in the specified region. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud.video import transcoder_v1 - - def sample_list_job_templates(): - # Create a client - client = transcoder_v1.TranscoderServiceClient() - - # Initialize request argument(s) - request = transcoder_v1.ListJobTemplatesRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_job_templates(request=request) - - # Handle the response - for response in page_result: - print(response) - - Args: - request (Union[google.cloud.video.transcoder_v1.types.ListJobTemplatesRequest, dict]): - The request object. Request message for - `TranscoderService.ListJobTemplates`. - parent (str): - Required. The parent location from which to retrieve the - collection of job templates. Format: - ``projects/{project}/locations/{location}`` - - This corresponds to the ``parent`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.video.transcoder_v1.services.transcoder_service.pagers.ListJobTemplatesPager: - Response message for TranscoderService.ListJobTemplates. - - Iterating over this object will yield results and - resolve additional pages automatically. - - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([parent]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a services.ListJobTemplatesRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, services.ListJobTemplatesRequest): - request = services.ListJobTemplatesRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if parent is not None: - request.parent = parent - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.list_job_templates] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("parent", request.parent), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # This method is paged; wrap the response in a pager, which provides - # an `__iter__` convenience method. - response = pagers.ListJobTemplatesPager( - method=rpc, - request=request, - response=response, - metadata=metadata, - ) - - # Done; return the response. - return response - - def get_job_template(self, - request: Optional[Union[services.GetJobTemplateRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> resources.JobTemplate: - r"""Returns the job template data. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud.video import transcoder_v1 - - def sample_get_job_template(): - # Create a client - client = transcoder_v1.TranscoderServiceClient() - - # Initialize request argument(s) - request = transcoder_v1.GetJobTemplateRequest( - name="name_value", - ) - - # Make the request - response = client.get_job_template(request=request) - - # Handle the response - print(response) - - Args: - request (Union[google.cloud.video.transcoder_v1.types.GetJobTemplateRequest, dict]): - The request object. Request message for - `TranscoderService.GetJobTemplate`. - name (str): - Required. The name of the job template to retrieve. - Format: - ``projects/{project}/locations/{location}/jobTemplates/{job_template}`` - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - - Returns: - google.cloud.video.transcoder_v1.types.JobTemplate: - Transcoding job template resource. - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a services.GetJobTemplateRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, services.GetJobTemplateRequest): - request = services.GetJobTemplateRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.get_job_template] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - response = rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - # Done; return the response. - return response - - def delete_job_template(self, - request: Optional[Union[services.DeleteJobTemplateRequest, dict]] = None, - *, - name: Optional[str] = None, - retry: OptionalRetry = gapic_v1.method.DEFAULT, - timeout: Union[float, object] = gapic_v1.method.DEFAULT, - metadata: Sequence[Tuple[str, str]] = (), - ) -> None: - r"""Deletes a job template. - - .. code-block:: python - - # This snippet has been automatically generated and should be regarded as a - # code template only. - # It will require modifications to work: - # - It may require correct/in-range values for request initialization. - # - It may require specifying regional endpoints when creating the service - # client as shown in: - # https://googleapis.dev/python/google-api-core/latest/client_options.html - from google.cloud.video import transcoder_v1 - - def sample_delete_job_template(): - # Create a client - client = transcoder_v1.TranscoderServiceClient() - - # Initialize request argument(s) - request = transcoder_v1.DeleteJobTemplateRequest( - name="name_value", - ) - - # Make the request - client.delete_job_template(request=request) - - Args: - request (Union[google.cloud.video.transcoder_v1.types.DeleteJobTemplateRequest, dict]): - The request object. Request message for - `TranscoderService.DeleteJobTemplate`. - name (str): - Required. The name of the job template to delete. - ``projects/{project}/locations/{location}/jobTemplates/{job_template}`` - - This corresponds to the ``name`` field - on the ``request`` instance; if ``request`` is provided, this - should not be set. - retry (google.api_core.retry.Retry): Designation of what errors, if any, - should be retried. - timeout (float): The timeout for this request. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - # Create or coerce a protobuf request object. - # Quick check: If we got a request object, we should *not* have - # gotten any keyword arguments that map to the request. - has_flattened_params = any([name]) - if request is not None and has_flattened_params: - raise ValueError('If the `request` argument is set, then none of ' - 'the individual field arguments should be set.') - - # Minor optimization to avoid making a copy if the user passes - # in a services.DeleteJobTemplateRequest. - # There's no risk of modifying the input as we've already verified - # there are no flattened fields. - if not isinstance(request, services.DeleteJobTemplateRequest): - request = services.DeleteJobTemplateRequest(request) - # If we have keyword arguments corresponding to fields on the - # request, apply these. - if name is not None: - request.name = name - - # Wrap the RPC method; this adds retry and timeout information, - # and friendly error handling. - rpc = self._transport._wrapped_methods[self._transport.delete_job_template] - - # Certain fields should be provided within the metadata header; - # add these here. - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ("name", request.name), - )), - ) - - # Send the request. - rpc( - request, - retry=retry, - timeout=timeout, - metadata=metadata, - ) - - def __enter__(self): - return self - - def __exit__(self, type, value, traceback): - """Releases underlying transport's resources. - - .. warning:: - ONLY use as a context manager if the transport is NOT shared - with other clients! Exiting the with block will CLOSE the transport - and may cause errors in other clients! - """ - self.transport.close() - - - - - - - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) - - -__all__ = ( - "TranscoderServiceClient", -) diff --git a/owl-bot-staging/v1/google/cloud/video/transcoder_v1/services/transcoder_service/pagers.py b/owl-bot-staging/v1/google/cloud/video/transcoder_v1/services/transcoder_service/pagers.py deleted file mode 100644 index 180638c..0000000 --- a/owl-bot-staging/v1/google/cloud/video/transcoder_v1/services/transcoder_service/pagers.py +++ /dev/null @@ -1,261 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from typing import Any, AsyncIterator, Awaitable, Callable, Sequence, Tuple, Optional, Iterator - -from google.cloud.video.transcoder_v1.types import resources -from google.cloud.video.transcoder_v1.types import services - - -class ListJobsPager: - """A pager for iterating through ``list_jobs`` requests. - - This class thinly wraps an initial - :class:`google.cloud.video.transcoder_v1.types.ListJobsResponse` object, and - provides an ``__iter__`` method to iterate through its - ``jobs`` field. - - If there are more pages, the ``__iter__`` method will make additional - ``ListJobs`` requests and continue to iterate - through the ``jobs`` field on the - corresponding responses. - - All the usual :class:`google.cloud.video.transcoder_v1.types.ListJobsResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., services.ListJobsResponse], - request: services.ListJobsRequest, - response: services.ListJobsResponse, - *, - metadata: Sequence[Tuple[str, str]] = ()): - """Instantiate the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.video.transcoder_v1.types.ListJobsRequest): - The initial request object. - response (google.cloud.video.transcoder_v1.types.ListJobsResponse): - The initial response object. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - self._method = method - self._request = services.ListJobsRequest(request) - self._response = response - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - def pages(self) -> Iterator[services.ListJobsResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, metadata=self._metadata) - yield self._response - - def __iter__(self) -> Iterator[resources.Job]: - for page in self.pages: - yield from page.jobs - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListJobsAsyncPager: - """A pager for iterating through ``list_jobs`` requests. - - This class thinly wraps an initial - :class:`google.cloud.video.transcoder_v1.types.ListJobsResponse` object, and - provides an ``__aiter__`` method to iterate through its - ``jobs`` field. - - If there are more pages, the ``__aiter__`` method will make additional - ``ListJobs`` requests and continue to iterate - through the ``jobs`` field on the - corresponding responses. - - All the usual :class:`google.cloud.video.transcoder_v1.types.ListJobsResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., Awaitable[services.ListJobsResponse]], - request: services.ListJobsRequest, - response: services.ListJobsResponse, - *, - metadata: Sequence[Tuple[str, str]] = ()): - """Instantiates the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.video.transcoder_v1.types.ListJobsRequest): - The initial request object. - response (google.cloud.video.transcoder_v1.types.ListJobsResponse): - The initial response object. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - self._method = method - self._request = services.ListJobsRequest(request) - self._response = response - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - async def pages(self) -> AsyncIterator[services.ListJobsResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, metadata=self._metadata) - yield self._response - def __aiter__(self) -> AsyncIterator[resources.Job]: - async def async_generator(): - async for page in self.pages: - for response in page.jobs: - yield response - - return async_generator() - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListJobTemplatesPager: - """A pager for iterating through ``list_job_templates`` requests. - - This class thinly wraps an initial - :class:`google.cloud.video.transcoder_v1.types.ListJobTemplatesResponse` object, and - provides an ``__iter__`` method to iterate through its - ``job_templates`` field. - - If there are more pages, the ``__iter__`` method will make additional - ``ListJobTemplates`` requests and continue to iterate - through the ``job_templates`` field on the - corresponding responses. - - All the usual :class:`google.cloud.video.transcoder_v1.types.ListJobTemplatesResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., services.ListJobTemplatesResponse], - request: services.ListJobTemplatesRequest, - response: services.ListJobTemplatesResponse, - *, - metadata: Sequence[Tuple[str, str]] = ()): - """Instantiate the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.video.transcoder_v1.types.ListJobTemplatesRequest): - The initial request object. - response (google.cloud.video.transcoder_v1.types.ListJobTemplatesResponse): - The initial response object. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - self._method = method - self._request = services.ListJobTemplatesRequest(request) - self._response = response - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - def pages(self) -> Iterator[services.ListJobTemplatesResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = self._method(self._request, metadata=self._metadata) - yield self._response - - def __iter__(self) -> Iterator[resources.JobTemplate]: - for page in self.pages: - yield from page.job_templates - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) - - -class ListJobTemplatesAsyncPager: - """A pager for iterating through ``list_job_templates`` requests. - - This class thinly wraps an initial - :class:`google.cloud.video.transcoder_v1.types.ListJobTemplatesResponse` object, and - provides an ``__aiter__`` method to iterate through its - ``job_templates`` field. - - If there are more pages, the ``__aiter__`` method will make additional - ``ListJobTemplates`` requests and continue to iterate - through the ``job_templates`` field on the - corresponding responses. - - All the usual :class:`google.cloud.video.transcoder_v1.types.ListJobTemplatesResponse` - attributes are available on the pager. If multiple requests are made, only - the most recent response is retained, and thus used for attribute lookup. - """ - def __init__(self, - method: Callable[..., Awaitable[services.ListJobTemplatesResponse]], - request: services.ListJobTemplatesRequest, - response: services.ListJobTemplatesResponse, - *, - metadata: Sequence[Tuple[str, str]] = ()): - """Instantiates the pager. - - Args: - method (Callable): The method that was originally called, and - which instantiated this pager. - request (google.cloud.video.transcoder_v1.types.ListJobTemplatesRequest): - The initial request object. - response (google.cloud.video.transcoder_v1.types.ListJobTemplatesResponse): - The initial response object. - metadata (Sequence[Tuple[str, str]]): Strings which should be - sent along with the request as metadata. - """ - self._method = method - self._request = services.ListJobTemplatesRequest(request) - self._response = response - self._metadata = metadata - - def __getattr__(self, name: str) -> Any: - return getattr(self._response, name) - - @property - async def pages(self) -> AsyncIterator[services.ListJobTemplatesResponse]: - yield self._response - while self._response.next_page_token: - self._request.page_token = self._response.next_page_token - self._response = await self._method(self._request, metadata=self._metadata) - yield self._response - def __aiter__(self) -> AsyncIterator[resources.JobTemplate]: - async def async_generator(): - async for page in self.pages: - for response in page.job_templates: - yield response - - return async_generator() - - def __repr__(self) -> str: - return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) diff --git a/owl-bot-staging/v1/google/cloud/video/transcoder_v1/services/transcoder_service/transports/__init__.py b/owl-bot-staging/v1/google/cloud/video/transcoder_v1/services/transcoder_service/transports/__init__.py deleted file mode 100644 index bdb6a47..0000000 --- a/owl-bot-staging/v1/google/cloud/video/transcoder_v1/services/transcoder_service/transports/__init__.py +++ /dev/null @@ -1,33 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from collections import OrderedDict -from typing import Dict, Type - -from .base import TranscoderServiceTransport -from .grpc import TranscoderServiceGrpcTransport -from .grpc_asyncio import TranscoderServiceGrpcAsyncIOTransport - - -# Compile a registry of transports. -_transport_registry = OrderedDict() # type: Dict[str, Type[TranscoderServiceTransport]] -_transport_registry['grpc'] = TranscoderServiceGrpcTransport -_transport_registry['grpc_asyncio'] = TranscoderServiceGrpcAsyncIOTransport - -__all__ = ( - 'TranscoderServiceTransport', - 'TranscoderServiceGrpcTransport', - 'TranscoderServiceGrpcAsyncIOTransport', -) diff --git a/owl-bot-staging/v1/google/cloud/video/transcoder_v1/services/transcoder_service/transports/base.py b/owl-bot-staging/v1/google/cloud/video/transcoder_v1/services/transcoder_service/transports/base.py deleted file mode 100644 index 31f0fbb..0000000 --- a/owl-bot-staging/v1/google/cloud/video/transcoder_v1/services/transcoder_service/transports/base.py +++ /dev/null @@ -1,248 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import abc -from typing import Awaitable, Callable, Dict, Optional, Sequence, Union - -from google.cloud.video.transcoder_v1 import gapic_version as package_version - -import google.auth # type: ignore -import google.api_core -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import retry as retries -from google.auth import credentials as ga_credentials # type: ignore -from google.oauth2 import service_account # type: ignore - -from google.cloud.video.transcoder_v1.types import resources -from google.cloud.video.transcoder_v1.types import services -from google.protobuf import empty_pb2 # type: ignore - -DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(gapic_version=package_version.__version__) - - -class TranscoderServiceTransport(abc.ABC): - """Abstract transport class for TranscoderService.""" - - AUTH_SCOPES = ( - 'https://www.googleapis.com/auth/cloud-platform', - ) - - DEFAULT_HOST: str = 'transcoder.googleapis.com' - def __init__( - self, *, - host: str = DEFAULT_HOST, - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - **kwargs, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to. - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is mutually exclusive with credentials. - scopes (Optional[Sequence[str]]): A list of scopes. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - """ - - scopes_kwargs = {"scopes": scopes, "default_scopes": self.AUTH_SCOPES} - - # Save the scopes. - self._scopes = scopes - - # If no credentials are provided, then determine the appropriate - # defaults. - if credentials and credentials_file: - raise core_exceptions.DuplicateCredentialArgs("'credentials_file' and 'credentials' are mutually exclusive") - - if credentials_file is not None: - credentials, _ = google.auth.load_credentials_from_file( - credentials_file, - **scopes_kwargs, - quota_project_id=quota_project_id - ) - elif credentials is None: - credentials, _ = google.auth.default(**scopes_kwargs, quota_project_id=quota_project_id) - # Don't apply audience if the credentials file passed from user. - if hasattr(credentials, "with_gdch_audience"): - credentials = credentials.with_gdch_audience(api_audience if api_audience else host) - - # If the credentials are service account credentials, then always try to use self signed JWT. - if always_use_jwt_access and isinstance(credentials, service_account.Credentials) and hasattr(service_account.Credentials, "with_always_use_jwt_access"): - credentials = credentials.with_always_use_jwt_access(True) - - # Save the credentials. - self._credentials = credentials - - # Save the hostname. Default to port 443 (HTTPS) if none is specified. - if ':' not in host: - host += ':443' - self._host = host - - def _prep_wrapped_messages(self, client_info): - # Precompute the wrapped methods. - self._wrapped_methods = { - self.create_job: gapic_v1.method.wrap_method( - self.create_job, - default_timeout=60.0, - client_info=client_info, - ), - self.list_jobs: gapic_v1.method.wrap_method( - self.list_jobs, - default_timeout=60.0, - client_info=client_info, - ), - self.get_job: gapic_v1.method.wrap_method( - self.get_job, - default_timeout=60.0, - client_info=client_info, - ), - self.delete_job: gapic_v1.method.wrap_method( - self.delete_job, - default_timeout=60.0, - client_info=client_info, - ), - self.create_job_template: gapic_v1.method.wrap_method( - self.create_job_template, - default_timeout=60.0, - client_info=client_info, - ), - self.list_job_templates: gapic_v1.method.wrap_method( - self.list_job_templates, - default_timeout=60.0, - client_info=client_info, - ), - self.get_job_template: gapic_v1.method.wrap_method( - self.get_job_template, - default_timeout=60.0, - client_info=client_info, - ), - self.delete_job_template: gapic_v1.method.wrap_method( - self.delete_job_template, - default_timeout=60.0, - client_info=client_info, - ), - } - - def close(self): - """Closes resources associated with the transport. - - .. warning:: - Only call this method if the transport is NOT shared - with other clients - this may cause errors in other clients! - """ - raise NotImplementedError() - - @property - def create_job(self) -> Callable[ - [services.CreateJobRequest], - Union[ - resources.Job, - Awaitable[resources.Job] - ]]: - raise NotImplementedError() - - @property - def list_jobs(self) -> Callable[ - [services.ListJobsRequest], - Union[ - services.ListJobsResponse, - Awaitable[services.ListJobsResponse] - ]]: - raise NotImplementedError() - - @property - def get_job(self) -> Callable[ - [services.GetJobRequest], - Union[ - resources.Job, - Awaitable[resources.Job] - ]]: - raise NotImplementedError() - - @property - def delete_job(self) -> Callable[ - [services.DeleteJobRequest], - Union[ - empty_pb2.Empty, - Awaitable[empty_pb2.Empty] - ]]: - raise NotImplementedError() - - @property - def create_job_template(self) -> Callable[ - [services.CreateJobTemplateRequest], - Union[ - resources.JobTemplate, - Awaitable[resources.JobTemplate] - ]]: - raise NotImplementedError() - - @property - def list_job_templates(self) -> Callable[ - [services.ListJobTemplatesRequest], - Union[ - services.ListJobTemplatesResponse, - Awaitable[services.ListJobTemplatesResponse] - ]]: - raise NotImplementedError() - - @property - def get_job_template(self) -> Callable[ - [services.GetJobTemplateRequest], - Union[ - resources.JobTemplate, - Awaitable[resources.JobTemplate] - ]]: - raise NotImplementedError() - - @property - def delete_job_template(self) -> Callable[ - [services.DeleteJobTemplateRequest], - Union[ - empty_pb2.Empty, - Awaitable[empty_pb2.Empty] - ]]: - raise NotImplementedError() - - @property - def kind(self) -> str: - raise NotImplementedError() - - -__all__ = ( - 'TranscoderServiceTransport', -) diff --git a/owl-bot-staging/v1/google/cloud/video/transcoder_v1/services/transcoder_service/transports/grpc.py b/owl-bot-staging/v1/google/cloud/video/transcoder_v1/services/transcoder_service/transports/grpc.py deleted file mode 100644 index 28376c5..0000000 --- a/owl-bot-staging/v1/google/cloud/video/transcoder_v1/services/transcoder_service/transports/grpc.py +++ /dev/null @@ -1,454 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import warnings -from typing import Callable, Dict, Optional, Sequence, Tuple, Union - -from google.api_core import grpc_helpers -from google.api_core import gapic_v1 -import google.auth # type: ignore -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore - -import grpc # type: ignore - -from google.cloud.video.transcoder_v1.types import resources -from google.cloud.video.transcoder_v1.types import services -from google.protobuf import empty_pb2 # type: ignore -from .base import TranscoderServiceTransport, DEFAULT_CLIENT_INFO - - -class TranscoderServiceGrpcTransport(TranscoderServiceTransport): - """gRPC backend transport for TranscoderService. - - Using the Transcoder API, you can queue asynchronous jobs for - transcoding media into various output formats. Output formats - may include different streaming standards such as HTTP Live - Streaming (HLS) and Dynamic Adaptive Streaming over HTTP (DASH). - You can also customize jobs using advanced features such as - Digital Rights Management (DRM), audio equalization, content - concatenation, and digital ad-stitch ready content generation. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends protocol buffers over the wire using gRPC (which is built on - top of HTTP/2); the ``grpcio`` package must be installed. - """ - _stubs: Dict[str, Callable] - - def __init__(self, *, - host: str = 'transcoder.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - channel: Optional[grpc.Channel] = None, - api_mtls_endpoint: Optional[str] = None, - client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, - client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to. - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - This argument is ignored if ``channel`` is provided. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. - scopes (Optional(Sequence[str])): A list of scopes. This argument is - ignored if ``channel`` is provided. - channel (Optional[grpc.Channel]): A ``Channel`` instance through - which to make calls. - api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. - If provided, it overrides the ``host`` argument and tries to create - a mutual TLS channel with client SSL credentials from - ``client_cert_source`` or application default SSL credentials. - client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): - Deprecated. A callback to provide client SSL certificate bytes and - private key bytes, both in PEM format. It is ignored if - ``api_mtls_endpoint`` is None. - ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for the grpc channel. It is ignored if ``channel`` is provided. - client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): - A callback to provide client certificate bytes and private key bytes, - both in PEM format. It is used to configure a mutual TLS channel. It is - ignored if ``channel`` or ``ssl_channel_credentials`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - - Raises: - google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport - creation failed for any reason. - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - self._grpc_channel = None - self._ssl_channel_credentials = ssl_channel_credentials - self._stubs: Dict[str, Callable] = {} - - if api_mtls_endpoint: - warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) - if client_cert_source: - warnings.warn("client_cert_source is deprecated", DeprecationWarning) - - if channel: - # Ignore credentials if a channel was passed. - credentials = False - # If a channel was explicitly provided, set it. - self._grpc_channel = channel - self._ssl_channel_credentials = None - - else: - if api_mtls_endpoint: - host = api_mtls_endpoint - - # Create SSL credentials with client_cert_source or application - # default SSL credentials. - if client_cert_source: - cert, key = client_cert_source() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - else: - self._ssl_channel_credentials = SslCredentials().ssl_credentials - - else: - if client_cert_source_for_mtls and not ssl_channel_credentials: - cert, key = client_cert_source_for_mtls() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - - # The base transport sets the host, credentials and scopes - super().__init__( - host=host, - credentials=credentials, - credentials_file=credentials_file, - scopes=scopes, - quota_project_id=quota_project_id, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - api_audience=api_audience, - ) - - if not self._grpc_channel: - self._grpc_channel = type(self).create_channel( - self._host, - # use the credentials which are saved - credentials=self._credentials, - # Set ``credentials_file`` to ``None`` here as - # the credentials that we saved earlier should be used. - credentials_file=None, - scopes=self._scopes, - ssl_credentials=self._ssl_channel_credentials, - quota_project_id=quota_project_id, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - # Wrap messages. This must be done after self._grpc_channel exists - self._prep_wrapped_messages(client_info) - - @classmethod - def create_channel(cls, - host: str = 'transcoder.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - **kwargs) -> grpc.Channel: - """Create and return a gRPC channel object. - Args: - host (Optional[str]): The host for the channel to use. - credentials (Optional[~.Credentials]): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If - none are specified, the client will attempt to ascertain - the credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is mutually exclusive with credentials. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - kwargs (Optional[dict]): Keyword arguments, which are passed to the - channel creation. - Returns: - grpc.Channel: A gRPC channel object. - - Raises: - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - - return grpc_helpers.create_channel( - host, - credentials=credentials, - credentials_file=credentials_file, - quota_project_id=quota_project_id, - default_scopes=cls.AUTH_SCOPES, - scopes=scopes, - default_host=cls.DEFAULT_HOST, - **kwargs - ) - - @property - def grpc_channel(self) -> grpc.Channel: - """Return the channel designed to connect to this service. - """ - return self._grpc_channel - - @property - def create_job(self) -> Callable[ - [services.CreateJobRequest], - resources.Job]: - r"""Return a callable for the create job method over gRPC. - - Creates a job in the specified region. - - Returns: - Callable[[~.CreateJobRequest], - ~.Job]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_job' not in self._stubs: - self._stubs['create_job'] = self.grpc_channel.unary_unary( - '/google.cloud.video.transcoder.v1.TranscoderService/CreateJob', - request_serializer=services.CreateJobRequest.serialize, - response_deserializer=resources.Job.deserialize, - ) - return self._stubs['create_job'] - - @property - def list_jobs(self) -> Callable[ - [services.ListJobsRequest], - services.ListJobsResponse]: - r"""Return a callable for the list jobs method over gRPC. - - Lists jobs in the specified region. - - Returns: - Callable[[~.ListJobsRequest], - ~.ListJobsResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_jobs' not in self._stubs: - self._stubs['list_jobs'] = self.grpc_channel.unary_unary( - '/google.cloud.video.transcoder.v1.TranscoderService/ListJobs', - request_serializer=services.ListJobsRequest.serialize, - response_deserializer=services.ListJobsResponse.deserialize, - ) - return self._stubs['list_jobs'] - - @property - def get_job(self) -> Callable[ - [services.GetJobRequest], - resources.Job]: - r"""Return a callable for the get job method over gRPC. - - Returns the job data. - - Returns: - Callable[[~.GetJobRequest], - ~.Job]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_job' not in self._stubs: - self._stubs['get_job'] = self.grpc_channel.unary_unary( - '/google.cloud.video.transcoder.v1.TranscoderService/GetJob', - request_serializer=services.GetJobRequest.serialize, - response_deserializer=resources.Job.deserialize, - ) - return self._stubs['get_job'] - - @property - def delete_job(self) -> Callable[ - [services.DeleteJobRequest], - empty_pb2.Empty]: - r"""Return a callable for the delete job method over gRPC. - - Deletes a job. - - Returns: - Callable[[~.DeleteJobRequest], - ~.Empty]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_job' not in self._stubs: - self._stubs['delete_job'] = self.grpc_channel.unary_unary( - '/google.cloud.video.transcoder.v1.TranscoderService/DeleteJob', - request_serializer=services.DeleteJobRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs['delete_job'] - - @property - def create_job_template(self) -> Callable[ - [services.CreateJobTemplateRequest], - resources.JobTemplate]: - r"""Return a callable for the create job template method over gRPC. - - Creates a job template in the specified region. - - Returns: - Callable[[~.CreateJobTemplateRequest], - ~.JobTemplate]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_job_template' not in self._stubs: - self._stubs['create_job_template'] = self.grpc_channel.unary_unary( - '/google.cloud.video.transcoder.v1.TranscoderService/CreateJobTemplate', - request_serializer=services.CreateJobTemplateRequest.serialize, - response_deserializer=resources.JobTemplate.deserialize, - ) - return self._stubs['create_job_template'] - - @property - def list_job_templates(self) -> Callable[ - [services.ListJobTemplatesRequest], - services.ListJobTemplatesResponse]: - r"""Return a callable for the list job templates method over gRPC. - - Lists job templates in the specified region. - - Returns: - Callable[[~.ListJobTemplatesRequest], - ~.ListJobTemplatesResponse]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_job_templates' not in self._stubs: - self._stubs['list_job_templates'] = self.grpc_channel.unary_unary( - '/google.cloud.video.transcoder.v1.TranscoderService/ListJobTemplates', - request_serializer=services.ListJobTemplatesRequest.serialize, - response_deserializer=services.ListJobTemplatesResponse.deserialize, - ) - return self._stubs['list_job_templates'] - - @property - def get_job_template(self) -> Callable[ - [services.GetJobTemplateRequest], - resources.JobTemplate]: - r"""Return a callable for the get job template method over gRPC. - - Returns the job template data. - - Returns: - Callable[[~.GetJobTemplateRequest], - ~.JobTemplate]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_job_template' not in self._stubs: - self._stubs['get_job_template'] = self.grpc_channel.unary_unary( - '/google.cloud.video.transcoder.v1.TranscoderService/GetJobTemplate', - request_serializer=services.GetJobTemplateRequest.serialize, - response_deserializer=resources.JobTemplate.deserialize, - ) - return self._stubs['get_job_template'] - - @property - def delete_job_template(self) -> Callable[ - [services.DeleteJobTemplateRequest], - empty_pb2.Empty]: - r"""Return a callable for the delete job template method over gRPC. - - Deletes a job template. - - Returns: - Callable[[~.DeleteJobTemplateRequest], - ~.Empty]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_job_template' not in self._stubs: - self._stubs['delete_job_template'] = self.grpc_channel.unary_unary( - '/google.cloud.video.transcoder.v1.TranscoderService/DeleteJobTemplate', - request_serializer=services.DeleteJobTemplateRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs['delete_job_template'] - - def close(self): - self.grpc_channel.close() - - @property - def kind(self) -> str: - return "grpc" - - -__all__ = ( - 'TranscoderServiceGrpcTransport', -) diff --git a/owl-bot-staging/v1/google/cloud/video/transcoder_v1/services/transcoder_service/transports/grpc_asyncio.py b/owl-bot-staging/v1/google/cloud/video/transcoder_v1/services/transcoder_service/transports/grpc_asyncio.py deleted file mode 100644 index 4cc0b8e..0000000 --- a/owl-bot-staging/v1/google/cloud/video/transcoder_v1/services/transcoder_service/transports/grpc_asyncio.py +++ /dev/null @@ -1,453 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import warnings -from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union - -from google.api_core import gapic_v1 -from google.api_core import grpc_helpers_async -from google.auth import credentials as ga_credentials # type: ignore -from google.auth.transport.grpc import SslCredentials # type: ignore - -import grpc # type: ignore -from grpc.experimental import aio # type: ignore - -from google.cloud.video.transcoder_v1.types import resources -from google.cloud.video.transcoder_v1.types import services -from google.protobuf import empty_pb2 # type: ignore -from .base import TranscoderServiceTransport, DEFAULT_CLIENT_INFO -from .grpc import TranscoderServiceGrpcTransport - - -class TranscoderServiceGrpcAsyncIOTransport(TranscoderServiceTransport): - """gRPC AsyncIO backend transport for TranscoderService. - - Using the Transcoder API, you can queue asynchronous jobs for - transcoding media into various output formats. Output formats - may include different streaming standards such as HTTP Live - Streaming (HLS) and Dynamic Adaptive Streaming over HTTP (DASH). - You can also customize jobs using advanced features such as - Digital Rights Management (DRM), audio equalization, content - concatenation, and digital ad-stitch ready content generation. - - This class defines the same methods as the primary client, so the - primary client can load the underlying transport implementation - and call it. - - It sends protocol buffers over the wire using gRPC (which is built on - top of HTTP/2); the ``grpcio`` package must be installed. - """ - - _grpc_channel: aio.Channel - _stubs: Dict[str, Callable] = {} - - @classmethod - def create_channel(cls, - host: str = 'transcoder.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - quota_project_id: Optional[str] = None, - **kwargs) -> aio.Channel: - """Create and return a gRPC AsyncIO channel object. - Args: - host (Optional[str]): The host for the channel to use. - credentials (Optional[~.Credentials]): The - authorization credentials to attach to requests. These - credentials identify this application to the service. If - none are specified, the client will attempt to ascertain - the credentials from the environment. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - kwargs (Optional[dict]): Keyword arguments, which are passed to the - channel creation. - Returns: - aio.Channel: A gRPC AsyncIO channel object. - """ - - return grpc_helpers_async.create_channel( - host, - credentials=credentials, - credentials_file=credentials_file, - quota_project_id=quota_project_id, - default_scopes=cls.AUTH_SCOPES, - scopes=scopes, - default_host=cls.DEFAULT_HOST, - **kwargs - ) - - def __init__(self, *, - host: str = 'transcoder.googleapis.com', - credentials: Optional[ga_credentials.Credentials] = None, - credentials_file: Optional[str] = None, - scopes: Optional[Sequence[str]] = None, - channel: Optional[aio.Channel] = None, - api_mtls_endpoint: Optional[str] = None, - client_cert_source: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - ssl_channel_credentials: Optional[grpc.ChannelCredentials] = None, - client_cert_source_for_mtls: Optional[Callable[[], Tuple[bytes, bytes]]] = None, - quota_project_id: Optional[str] = None, - client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, - always_use_jwt_access: Optional[bool] = False, - api_audience: Optional[str] = None, - ) -> None: - """Instantiate the transport. - - Args: - host (Optional[str]): - The hostname to connect to. - credentials (Optional[google.auth.credentials.Credentials]): The - authorization credentials to attach to requests. These - credentials identify the application to the service; if none - are specified, the client will attempt to ascertain the - credentials from the environment. - This argument is ignored if ``channel`` is provided. - credentials_file (Optional[str]): A file with credentials that can - be loaded with :func:`google.auth.load_credentials_from_file`. - This argument is ignored if ``channel`` is provided. - scopes (Optional[Sequence[str]]): A optional list of scopes needed for this - service. These are only used when credentials are not specified and - are passed to :func:`google.auth.default`. - channel (Optional[aio.Channel]): A ``Channel`` instance through - which to make calls. - api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint. - If provided, it overrides the ``host`` argument and tries to create - a mutual TLS channel with client SSL credentials from - ``client_cert_source`` or application default SSL credentials. - client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): - Deprecated. A callback to provide client SSL certificate bytes and - private key bytes, both in PEM format. It is ignored if - ``api_mtls_endpoint`` is None. - ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials - for the grpc channel. It is ignored if ``channel`` is provided. - client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]): - A callback to provide client certificate bytes and private key bytes, - both in PEM format. It is used to configure a mutual TLS channel. It is - ignored if ``channel`` or ``ssl_channel_credentials`` is provided. - quota_project_id (Optional[str]): An optional project to use for billing - and quota. - client_info (google.api_core.gapic_v1.client_info.ClientInfo): - The client info used to send a user-agent string along with - API requests. If ``None``, then default info will be used. - Generally, you only need to set this if you're developing - your own client library. - always_use_jwt_access (Optional[bool]): Whether self signed JWT should - be used for service account credentials. - - Raises: - google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport - creation failed for any reason. - google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials`` - and ``credentials_file`` are passed. - """ - self._grpc_channel = None - self._ssl_channel_credentials = ssl_channel_credentials - self._stubs: Dict[str, Callable] = {} - - if api_mtls_endpoint: - warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning) - if client_cert_source: - warnings.warn("client_cert_source is deprecated", DeprecationWarning) - - if channel: - # Ignore credentials if a channel was passed. - credentials = False - # If a channel was explicitly provided, set it. - self._grpc_channel = channel - self._ssl_channel_credentials = None - else: - if api_mtls_endpoint: - host = api_mtls_endpoint - - # Create SSL credentials with client_cert_source or application - # default SSL credentials. - if client_cert_source: - cert, key = client_cert_source() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - else: - self._ssl_channel_credentials = SslCredentials().ssl_credentials - - else: - if client_cert_source_for_mtls and not ssl_channel_credentials: - cert, key = client_cert_source_for_mtls() - self._ssl_channel_credentials = grpc.ssl_channel_credentials( - certificate_chain=cert, private_key=key - ) - - # The base transport sets the host, credentials and scopes - super().__init__( - host=host, - credentials=credentials, - credentials_file=credentials_file, - scopes=scopes, - quota_project_id=quota_project_id, - client_info=client_info, - always_use_jwt_access=always_use_jwt_access, - api_audience=api_audience, - ) - - if not self._grpc_channel: - self._grpc_channel = type(self).create_channel( - self._host, - # use the credentials which are saved - credentials=self._credentials, - # Set ``credentials_file`` to ``None`` here as - # the credentials that we saved earlier should be used. - credentials_file=None, - scopes=self._scopes, - ssl_credentials=self._ssl_channel_credentials, - quota_project_id=quota_project_id, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - # Wrap messages. This must be done after self._grpc_channel exists - self._prep_wrapped_messages(client_info) - - @property - def grpc_channel(self) -> aio.Channel: - """Create the channel designed to connect to this service. - - This property caches on the instance; repeated calls return - the same channel. - """ - # Return the channel from cache. - return self._grpc_channel - - @property - def create_job(self) -> Callable[ - [services.CreateJobRequest], - Awaitable[resources.Job]]: - r"""Return a callable for the create job method over gRPC. - - Creates a job in the specified region. - - Returns: - Callable[[~.CreateJobRequest], - Awaitable[~.Job]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_job' not in self._stubs: - self._stubs['create_job'] = self.grpc_channel.unary_unary( - '/google.cloud.video.transcoder.v1.TranscoderService/CreateJob', - request_serializer=services.CreateJobRequest.serialize, - response_deserializer=resources.Job.deserialize, - ) - return self._stubs['create_job'] - - @property - def list_jobs(self) -> Callable[ - [services.ListJobsRequest], - Awaitable[services.ListJobsResponse]]: - r"""Return a callable for the list jobs method over gRPC. - - Lists jobs in the specified region. - - Returns: - Callable[[~.ListJobsRequest], - Awaitable[~.ListJobsResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_jobs' not in self._stubs: - self._stubs['list_jobs'] = self.grpc_channel.unary_unary( - '/google.cloud.video.transcoder.v1.TranscoderService/ListJobs', - request_serializer=services.ListJobsRequest.serialize, - response_deserializer=services.ListJobsResponse.deserialize, - ) - return self._stubs['list_jobs'] - - @property - def get_job(self) -> Callable[ - [services.GetJobRequest], - Awaitable[resources.Job]]: - r"""Return a callable for the get job method over gRPC. - - Returns the job data. - - Returns: - Callable[[~.GetJobRequest], - Awaitable[~.Job]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_job' not in self._stubs: - self._stubs['get_job'] = self.grpc_channel.unary_unary( - '/google.cloud.video.transcoder.v1.TranscoderService/GetJob', - request_serializer=services.GetJobRequest.serialize, - response_deserializer=resources.Job.deserialize, - ) - return self._stubs['get_job'] - - @property - def delete_job(self) -> Callable[ - [services.DeleteJobRequest], - Awaitable[empty_pb2.Empty]]: - r"""Return a callable for the delete job method over gRPC. - - Deletes a job. - - Returns: - Callable[[~.DeleteJobRequest], - Awaitable[~.Empty]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_job' not in self._stubs: - self._stubs['delete_job'] = self.grpc_channel.unary_unary( - '/google.cloud.video.transcoder.v1.TranscoderService/DeleteJob', - request_serializer=services.DeleteJobRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs['delete_job'] - - @property - def create_job_template(self) -> Callable[ - [services.CreateJobTemplateRequest], - Awaitable[resources.JobTemplate]]: - r"""Return a callable for the create job template method over gRPC. - - Creates a job template in the specified region. - - Returns: - Callable[[~.CreateJobTemplateRequest], - Awaitable[~.JobTemplate]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'create_job_template' not in self._stubs: - self._stubs['create_job_template'] = self.grpc_channel.unary_unary( - '/google.cloud.video.transcoder.v1.TranscoderService/CreateJobTemplate', - request_serializer=services.CreateJobTemplateRequest.serialize, - response_deserializer=resources.JobTemplate.deserialize, - ) - return self._stubs['create_job_template'] - - @property - def list_job_templates(self) -> Callable[ - [services.ListJobTemplatesRequest], - Awaitable[services.ListJobTemplatesResponse]]: - r"""Return a callable for the list job templates method over gRPC. - - Lists job templates in the specified region. - - Returns: - Callable[[~.ListJobTemplatesRequest], - Awaitable[~.ListJobTemplatesResponse]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'list_job_templates' not in self._stubs: - self._stubs['list_job_templates'] = self.grpc_channel.unary_unary( - '/google.cloud.video.transcoder.v1.TranscoderService/ListJobTemplates', - request_serializer=services.ListJobTemplatesRequest.serialize, - response_deserializer=services.ListJobTemplatesResponse.deserialize, - ) - return self._stubs['list_job_templates'] - - @property - def get_job_template(self) -> Callable[ - [services.GetJobTemplateRequest], - Awaitable[resources.JobTemplate]]: - r"""Return a callable for the get job template method over gRPC. - - Returns the job template data. - - Returns: - Callable[[~.GetJobTemplateRequest], - Awaitable[~.JobTemplate]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'get_job_template' not in self._stubs: - self._stubs['get_job_template'] = self.grpc_channel.unary_unary( - '/google.cloud.video.transcoder.v1.TranscoderService/GetJobTemplate', - request_serializer=services.GetJobTemplateRequest.serialize, - response_deserializer=resources.JobTemplate.deserialize, - ) - return self._stubs['get_job_template'] - - @property - def delete_job_template(self) -> Callable[ - [services.DeleteJobTemplateRequest], - Awaitable[empty_pb2.Empty]]: - r"""Return a callable for the delete job template method over gRPC. - - Deletes a job template. - - Returns: - Callable[[~.DeleteJobTemplateRequest], - Awaitable[~.Empty]]: - A function that, when called, will call the underlying RPC - on the server. - """ - # Generate a "stub function" on-the-fly which will actually make - # the request. - # gRPC handles serialization and deserialization, so we just need - # to pass in the functions for each. - if 'delete_job_template' not in self._stubs: - self._stubs['delete_job_template'] = self.grpc_channel.unary_unary( - '/google.cloud.video.transcoder.v1.TranscoderService/DeleteJobTemplate', - request_serializer=services.DeleteJobTemplateRequest.serialize, - response_deserializer=empty_pb2.Empty.FromString, - ) - return self._stubs['delete_job_template'] - - def close(self): - return self.grpc_channel.close() - - -__all__ = ( - 'TranscoderServiceGrpcAsyncIOTransport', -) diff --git a/owl-bot-staging/v1/google/cloud/video/transcoder_v1/types/__init__.py b/owl-bot-staging/v1/google/cloud/video/transcoder_v1/types/__init__.py deleted file mode 100644 index ee09558..0000000 --- a/owl-bot-staging/v1/google/cloud/video/transcoder_v1/types/__init__.py +++ /dev/null @@ -1,78 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from .resources import ( - AdBreak, - AudioStream, - EditAtom, - ElementaryStream, - Input, - Job, - JobConfig, - JobTemplate, - Manifest, - MuxStream, - Output, - Overlay, - PreprocessingConfig, - PubsubDestination, - SegmentSettings, - SpriteSheet, - TextStream, - VideoStream, -) -from .services import ( - CreateJobRequest, - CreateJobTemplateRequest, - DeleteJobRequest, - DeleteJobTemplateRequest, - GetJobRequest, - GetJobTemplateRequest, - ListJobsRequest, - ListJobsResponse, - ListJobTemplatesRequest, - ListJobTemplatesResponse, -) - -__all__ = ( - 'AdBreak', - 'AudioStream', - 'EditAtom', - 'ElementaryStream', - 'Input', - 'Job', - 'JobConfig', - 'JobTemplate', - 'Manifest', - 'MuxStream', - 'Output', - 'Overlay', - 'PreprocessingConfig', - 'PubsubDestination', - 'SegmentSettings', - 'SpriteSheet', - 'TextStream', - 'VideoStream', - 'CreateJobRequest', - 'CreateJobTemplateRequest', - 'DeleteJobRequest', - 'DeleteJobTemplateRequest', - 'GetJobRequest', - 'GetJobTemplateRequest', - 'ListJobsRequest', - 'ListJobsResponse', - 'ListJobTemplatesRequest', - 'ListJobTemplatesResponse', -) diff --git a/owl-bot-staging/v1/google/cloud/video/transcoder_v1/types/resources.py b/owl-bot-staging/v1/google/cloud/video/transcoder_v1/types/resources.py deleted file mode 100644 index 3dab7f2..0000000 --- a/owl-bot-staging/v1/google/cloud/video/transcoder_v1/types/resources.py +++ /dev/null @@ -1,2172 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from typing import MutableMapping, MutableSequence - -import proto # type: ignore - -from google.protobuf import duration_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore -from google.rpc import status_pb2 # type: ignore - - -__protobuf__ = proto.module( - package='google.cloud.video.transcoder.v1', - manifest={ - 'Job', - 'JobTemplate', - 'JobConfig', - 'Input', - 'Output', - 'EditAtom', - 'AdBreak', - 'ElementaryStream', - 'MuxStream', - 'Manifest', - 'PubsubDestination', - 'SpriteSheet', - 'Overlay', - 'PreprocessingConfig', - 'VideoStream', - 'AudioStream', - 'TextStream', - 'SegmentSettings', - }, -) - - -class Job(proto.Message): - r"""Transcoding job resource. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - name (str): - The resource name of the job. Format: - ``projects/{project_number}/locations/{location}/jobs/{job}`` - input_uri (str): - Input only. Specify the ``input_uri`` to populate empty - ``uri`` fields in each element of ``Job.config.inputs`` or - ``JobTemplate.config.inputs`` when using template. URI of - the media. Input files must be at least 5 seconds in - duration and stored in Cloud Storage (for example, - ``gs://bucket/inputs/file.mp4``). See `Supported input and - output - formats `__. - output_uri (str): - Input only. Specify the ``output_uri`` to populate an empty - ``Job.config.output.uri`` or - ``JobTemplate.config.output.uri`` when using template. URI - for the output file(s). For example, - ``gs://my-bucket/outputs/``. See `Supported input and output - formats `__. - template_id (str): - Input only. Specify the ``template_id`` to use for - populating ``Job.config``. The default is ``preset/web-hd``. - - Preset Transcoder templates: - - - ``preset/{preset_id}`` - - - User defined JobTemplate: ``{job_template_id}`` - - This field is a member of `oneof`_ ``job_config``. - config (google.cloud.video.transcoder_v1.types.JobConfig): - The configuration for this job. - - This field is a member of `oneof`_ ``job_config``. - state (google.cloud.video.transcoder_v1.types.Job.ProcessingState): - Output only. The current state of the job. - create_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The time the job was created. - start_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The time the transcoding - started. - end_time (google.protobuf.timestamp_pb2.Timestamp): - Output only. The time the transcoding - finished. - ttl_after_completion_days (int): - Job time to live value in days, which will be - effective after job completion. Job should be - deleted automatically after the given TTL. Enter - a value between 1 and 90. The default is 30. - labels (MutableMapping[str, str]): - The labels associated with this job. You can - use these to organize and group your jobs. - error (google.rpc.status_pb2.Status): - Output only. An error object that describes the reason for - the failure. This property is always present when ``state`` - is ``FAILED``. - """ - class ProcessingState(proto.Enum): - r"""The current state of the job.""" - PROCESSING_STATE_UNSPECIFIED = 0 - PENDING = 1 - RUNNING = 2 - SUCCEEDED = 3 - FAILED = 4 - - name: str = proto.Field( - proto.STRING, - number=1, - ) - input_uri: str = proto.Field( - proto.STRING, - number=2, - ) - output_uri: str = proto.Field( - proto.STRING, - number=3, - ) - template_id: str = proto.Field( - proto.STRING, - number=4, - oneof='job_config', - ) - config: 'JobConfig' = proto.Field( - proto.MESSAGE, - number=5, - oneof='job_config', - message='JobConfig', - ) - state: ProcessingState = proto.Field( - proto.ENUM, - number=8, - enum=ProcessingState, - ) - create_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=12, - message=timestamp_pb2.Timestamp, - ) - start_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=13, - message=timestamp_pb2.Timestamp, - ) - end_time: timestamp_pb2.Timestamp = proto.Field( - proto.MESSAGE, - number=14, - message=timestamp_pb2.Timestamp, - ) - ttl_after_completion_days: int = proto.Field( - proto.INT32, - number=15, - ) - labels: MutableMapping[str, str] = proto.MapField( - proto.STRING, - proto.STRING, - number=16, - ) - error: status_pb2.Status = proto.Field( - proto.MESSAGE, - number=17, - message=status_pb2.Status, - ) - - -class JobTemplate(proto.Message): - r"""Transcoding job template resource. - - Attributes: - name (str): - The resource name of the job template. Format: - ``projects/{project_number}/locations/{location}/jobTemplates/{job_template}`` - config (google.cloud.video.transcoder_v1.types.JobConfig): - The configuration for this template. - labels (MutableMapping[str, str]): - The labels associated with this job template. - You can use these to organize and group your job - templates. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - config: 'JobConfig' = proto.Field( - proto.MESSAGE, - number=2, - message='JobConfig', - ) - labels: MutableMapping[str, str] = proto.MapField( - proto.STRING, - proto.STRING, - number=3, - ) - - -class JobConfig(proto.Message): - r"""Job configuration - - Attributes: - inputs (MutableSequence[google.cloud.video.transcoder_v1.types.Input]): - List of input assets stored in Cloud Storage. - edit_list (MutableSequence[google.cloud.video.transcoder_v1.types.EditAtom]): - List of ``Edit atom``\ s. Defines the ultimate timeline of - the resulting file or manifest. - elementary_streams (MutableSequence[google.cloud.video.transcoder_v1.types.ElementaryStream]): - List of elementary streams. - mux_streams (MutableSequence[google.cloud.video.transcoder_v1.types.MuxStream]): - List of multiplexing settings for output - streams. - manifests (MutableSequence[google.cloud.video.transcoder_v1.types.Manifest]): - List of output manifests. - output (google.cloud.video.transcoder_v1.types.Output): - Output configuration. - ad_breaks (MutableSequence[google.cloud.video.transcoder_v1.types.AdBreak]): - List of ad breaks. Specifies where to insert - ad break tags in the output manifests. - pubsub_destination (google.cloud.video.transcoder_v1.types.PubsubDestination): - Destination on Pub/Sub. - sprite_sheets (MutableSequence[google.cloud.video.transcoder_v1.types.SpriteSheet]): - List of output sprite sheets. - Spritesheets require at least one VideoStream in - the Jobconfig. - overlays (MutableSequence[google.cloud.video.transcoder_v1.types.Overlay]): - List of overlays on the output video, in - descending Z-order. - """ - - inputs: MutableSequence['Input'] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message='Input', - ) - edit_list: MutableSequence['EditAtom'] = proto.RepeatedField( - proto.MESSAGE, - number=2, - message='EditAtom', - ) - elementary_streams: MutableSequence['ElementaryStream'] = proto.RepeatedField( - proto.MESSAGE, - number=3, - message='ElementaryStream', - ) - mux_streams: MutableSequence['MuxStream'] = proto.RepeatedField( - proto.MESSAGE, - number=4, - message='MuxStream', - ) - manifests: MutableSequence['Manifest'] = proto.RepeatedField( - proto.MESSAGE, - number=5, - message='Manifest', - ) - output: 'Output' = proto.Field( - proto.MESSAGE, - number=6, - message='Output', - ) - ad_breaks: MutableSequence['AdBreak'] = proto.RepeatedField( - proto.MESSAGE, - number=7, - message='AdBreak', - ) - pubsub_destination: 'PubsubDestination' = proto.Field( - proto.MESSAGE, - number=8, - message='PubsubDestination', - ) - sprite_sheets: MutableSequence['SpriteSheet'] = proto.RepeatedField( - proto.MESSAGE, - number=9, - message='SpriteSheet', - ) - overlays: MutableSequence['Overlay'] = proto.RepeatedField( - proto.MESSAGE, - number=10, - message='Overlay', - ) - - -class Input(proto.Message): - r"""Input asset. - - Attributes: - key (str): - A unique key for this input. Must be - specified when using advanced mapping and edit - lists. - uri (str): - URI of the media. Input files must be at least 5 seconds in - duration and stored in Cloud Storage (for example, - ``gs://bucket/inputs/file.mp4``). If empty, the value is - populated from ``Job.input_uri``. See `Supported input and - output - formats `__. - preprocessing_config (google.cloud.video.transcoder_v1.types.PreprocessingConfig): - Preprocessing configurations. - """ - - key: str = proto.Field( - proto.STRING, - number=1, - ) - uri: str = proto.Field( - proto.STRING, - number=2, - ) - preprocessing_config: 'PreprocessingConfig' = proto.Field( - proto.MESSAGE, - number=3, - message='PreprocessingConfig', - ) - - -class Output(proto.Message): - r"""Location of output file(s) in a Cloud Storage bucket. - - Attributes: - uri (str): - URI for the output file(s). For example, - ``gs://my-bucket/outputs/``. If empty, the value is - populated from ``Job.output_uri``. See `Supported input and - output - formats `__. - """ - - uri: str = proto.Field( - proto.STRING, - number=1, - ) - - -class EditAtom(proto.Message): - r"""Edit atom. - - Attributes: - key (str): - A unique key for this atom. Must be specified - when using advanced mapping. - inputs (MutableSequence[str]): - List of ``Input.key``\ s identifying files that should be - used in this atom. The listed ``inputs`` must have the same - timeline. - end_time_offset (google.protobuf.duration_pb2.Duration): - End time in seconds for the atom, relative to the input file - timeline. When ``end_time_offset`` is not specified, the - ``inputs`` are used until the end of the atom. - start_time_offset (google.protobuf.duration_pb2.Duration): - Start time in seconds for the atom, relative to the input - file timeline. The default is ``0s``. - """ - - key: str = proto.Field( - proto.STRING, - number=1, - ) - inputs: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=2, - ) - end_time_offset: duration_pb2.Duration = proto.Field( - proto.MESSAGE, - number=3, - message=duration_pb2.Duration, - ) - start_time_offset: duration_pb2.Duration = proto.Field( - proto.MESSAGE, - number=4, - message=duration_pb2.Duration, - ) - - -class AdBreak(proto.Message): - r"""Ad break. - - Attributes: - start_time_offset (google.protobuf.duration_pb2.Duration): - Start time in seconds for the ad break, relative to the - output file timeline. The default is ``0s``. - """ - - start_time_offset: duration_pb2.Duration = proto.Field( - proto.MESSAGE, - number=1, - message=duration_pb2.Duration, - ) - - -class ElementaryStream(proto.Message): - r"""Encoding of an input file such as an audio, video, or text - track. Elementary streams must be packaged before - mapping and sharing between different output formats. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - key (str): - A unique key for this elementary stream. - video_stream (google.cloud.video.transcoder_v1.types.VideoStream): - Encoding of a video stream. - - This field is a member of `oneof`_ ``elementary_stream``. - audio_stream (google.cloud.video.transcoder_v1.types.AudioStream): - Encoding of an audio stream. - - This field is a member of `oneof`_ ``elementary_stream``. - text_stream (google.cloud.video.transcoder_v1.types.TextStream): - Encoding of a text stream. For example, - closed captions or subtitles. - - This field is a member of `oneof`_ ``elementary_stream``. - """ - - key: str = proto.Field( - proto.STRING, - number=4, - ) - video_stream: 'VideoStream' = proto.Field( - proto.MESSAGE, - number=1, - oneof='elementary_stream', - message='VideoStream', - ) - audio_stream: 'AudioStream' = proto.Field( - proto.MESSAGE, - number=2, - oneof='elementary_stream', - message='AudioStream', - ) - text_stream: 'TextStream' = proto.Field( - proto.MESSAGE, - number=3, - oneof='elementary_stream', - message='TextStream', - ) - - -class MuxStream(proto.Message): - r"""Multiplexing settings for output stream. - - Attributes: - key (str): - A unique key for this multiplexed stream. HLS media - manifests will be named ``MuxStream.key`` with the ``.m3u8`` - extension suffix. - file_name (str): - The name of the generated file. The default is - ``MuxStream.key`` with the extension suffix corresponding to - the ``MuxStream.container``. - - Individual segments also have an incremental 10-digit - zero-padded suffix starting from 0 before the extension, - such as ``mux_stream0000000123.ts``. - container (str): - The container format. The default is ``mp4`` - - Supported container formats: - - - ``ts`` - - ``fmp4``- the corresponding file extension is ``.m4s`` - - ``mp4`` - - ``vtt`` - - See also: `Supported input and output - formats `__ - elementary_streams (MutableSequence[str]): - List of ``ElementaryStream.key``\ s multiplexed in this - stream. - segment_settings (google.cloud.video.transcoder_v1.types.SegmentSettings): - Segment settings for ``ts``, ``fmp4`` and ``vtt``. - """ - - key: str = proto.Field( - proto.STRING, - number=1, - ) - file_name: str = proto.Field( - proto.STRING, - number=2, - ) - container: str = proto.Field( - proto.STRING, - number=3, - ) - elementary_streams: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=4, - ) - segment_settings: 'SegmentSettings' = proto.Field( - proto.MESSAGE, - number=5, - message='SegmentSettings', - ) - - -class Manifest(proto.Message): - r"""Manifest configuration. - - Attributes: - file_name (str): - The name of the generated file. The default is ``manifest`` - with the extension suffix corresponding to the - ``Manifest.type``. - type_ (google.cloud.video.transcoder_v1.types.Manifest.ManifestType): - Required. Type of the manifest, can be ``HLS`` or ``DASH``. - mux_streams (MutableSequence[str]): - Required. List of user given ``MuxStream.key``\ s that - should appear in this manifest. - - When ``Manifest.type`` is ``HLS``, a media manifest with - name ``MuxStream.key`` and ``.m3u8`` extension is generated - for each element of the ``Manifest.mux_streams``. - """ - class ManifestType(proto.Enum): - r"""The manifest type can be either ``HLS`` or ``DASH``.""" - MANIFEST_TYPE_UNSPECIFIED = 0 - HLS = 1 - DASH = 2 - - file_name: str = proto.Field( - proto.STRING, - number=1, - ) - type_: ManifestType = proto.Field( - proto.ENUM, - number=2, - enum=ManifestType, - ) - mux_streams: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=3, - ) - - -class PubsubDestination(proto.Message): - r"""A Pub/Sub destination. - - Attributes: - topic (str): - The name of the Pub/Sub topic to publish job completion - notification to. For example: - ``projects/{project}/topics/{topic}``. - """ - - topic: str = proto.Field( - proto.STRING, - number=1, - ) - - -class SpriteSheet(proto.Message): - r"""Sprite sheet configuration. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - format_ (str): - Format type. The default is ``jpeg``. - - Supported formats: - - - ``jpeg`` - file_prefix (str): - Required. File name prefix for the generated sprite sheets. - - Each sprite sheet has an incremental 10-digit zero-padded - suffix starting from 0 before the extension, such as - ``sprite_sheet0000000123.jpeg``. - sprite_width_pixels (int): - Required. The width of sprite in pixels. Must be an even - integer. To preserve the source aspect ratio, set the - [SpriteSheet.sprite_width_pixels][google.cloud.video.transcoder.v1.SpriteSheet.sprite_width_pixels] - field or the - [SpriteSheet.sprite_height_pixels][google.cloud.video.transcoder.v1.SpriteSheet.sprite_height_pixels] - field, but not both (the API will automatically calculate - the missing field). - - For portrait videos that contain horizontal ASR and rotation - metadata, provide the width, in pixels, per the horizontal - ASR. The API calculates the height per the horizontal ASR. - The API detects any rotation metadata and swaps the - requested height and width for the output. - sprite_height_pixels (int): - Required. The height of sprite in pixels. Must be an even - integer. To preserve the source aspect ratio, set the - [SpriteSheet.sprite_height_pixels][google.cloud.video.transcoder.v1.SpriteSheet.sprite_height_pixels] - field or the - [SpriteSheet.sprite_width_pixels][google.cloud.video.transcoder.v1.SpriteSheet.sprite_width_pixels] - field, but not both (the API will automatically calculate - the missing field). - - For portrait videos that contain horizontal ASR and rotation - metadata, provide the height, in pixels, per the horizontal - ASR. The API calculates the width per the horizontal ASR. - The API detects any rotation metadata and swaps the - requested height and width for the output. - column_count (int): - The maximum number of sprites per row in a - sprite sheet. The default is 0, which indicates - no maximum limit. - row_count (int): - The maximum number of rows per sprite sheet. - When the sprite sheet is full, a new sprite - sheet is created. The default is 0, which - indicates no maximum limit. - start_time_offset (google.protobuf.duration_pb2.Duration): - Start time in seconds, relative to the output file timeline. - Determines the first sprite to pick. The default is ``0s``. - end_time_offset (google.protobuf.duration_pb2.Duration): - End time in seconds, relative to the output file timeline. - When ``end_time_offset`` is not specified, the sprites are - generated until the end of the output file. - total_count (int): - Total number of sprites. Create the specified - number of sprites distributed evenly across the - timeline of the output media. The default is - 100. - - This field is a member of `oneof`_ ``extraction_strategy``. - interval (google.protobuf.duration_pb2.Duration): - Starting from ``0s``, create sprites at regular intervals. - Specify the interval value in seconds. - - This field is a member of `oneof`_ ``extraction_strategy``. - quality (int): - The quality of the generated sprite sheet. - Enter a value between 1 and 100, where 1 is the - lowest quality and 100 is the highest quality. - The default is 100. A high quality value - corresponds to a low image data compression - ratio. - """ - - format_: str = proto.Field( - proto.STRING, - number=1, - ) - file_prefix: str = proto.Field( - proto.STRING, - number=2, - ) - sprite_width_pixels: int = proto.Field( - proto.INT32, - number=3, - ) - sprite_height_pixels: int = proto.Field( - proto.INT32, - number=4, - ) - column_count: int = proto.Field( - proto.INT32, - number=5, - ) - row_count: int = proto.Field( - proto.INT32, - number=6, - ) - start_time_offset: duration_pb2.Duration = proto.Field( - proto.MESSAGE, - number=7, - message=duration_pb2.Duration, - ) - end_time_offset: duration_pb2.Duration = proto.Field( - proto.MESSAGE, - number=8, - message=duration_pb2.Duration, - ) - total_count: int = proto.Field( - proto.INT32, - number=9, - oneof='extraction_strategy', - ) - interval: duration_pb2.Duration = proto.Field( - proto.MESSAGE, - number=10, - oneof='extraction_strategy', - message=duration_pb2.Duration, - ) - quality: int = proto.Field( - proto.INT32, - number=11, - ) - - -class Overlay(proto.Message): - r"""Overlay configuration. - - Attributes: - image (google.cloud.video.transcoder_v1.types.Overlay.Image): - Image overlay. - animations (MutableSequence[google.cloud.video.transcoder_v1.types.Overlay.Animation]): - List of Animations. The list should be - chronological, without any time overlap. - """ - class FadeType(proto.Enum): - r"""Fade type for the overlay: ``FADE_IN`` or ``FADE_OUT``.""" - FADE_TYPE_UNSPECIFIED = 0 - FADE_IN = 1 - FADE_OUT = 2 - - class NormalizedCoordinate(proto.Message): - r"""2D normalized coordinates. Default: ``{0.0, 0.0}`` - - Attributes: - x (float): - Normalized x coordinate. - y (float): - Normalized y coordinate. - """ - - x: float = proto.Field( - proto.DOUBLE, - number=1, - ) - y: float = proto.Field( - proto.DOUBLE, - number=2, - ) - - class Image(proto.Message): - r"""Overlaid jpeg image. - - Attributes: - uri (str): - Required. URI of the JPEG image in Cloud Storage. For - example, ``gs://bucket/inputs/image.jpeg``. JPEG is the only - supported image type. - resolution (google.cloud.video.transcoder_v1.types.Overlay.NormalizedCoordinate): - Normalized image resolution, based on output video - resolution. Valid values: ``0.0``–``1.0``. To respect the - original image aspect ratio, set either ``x`` or ``y`` to - ``0.0``. To use the original image resolution, set both - ``x`` and ``y`` to ``0.0``. - alpha (float): - Target image opacity. Valid values are from ``1.0`` (solid, - default) to ``0.0`` (transparent), exclusive. Set this to a - value greater than ``0.0``. - """ - - uri: str = proto.Field( - proto.STRING, - number=1, - ) - resolution: 'Overlay.NormalizedCoordinate' = proto.Field( - proto.MESSAGE, - number=2, - message='Overlay.NormalizedCoordinate', - ) - alpha: float = proto.Field( - proto.DOUBLE, - number=3, - ) - - class AnimationStatic(proto.Message): - r"""Display static overlay object. - - Attributes: - xy (google.cloud.video.transcoder_v1.types.Overlay.NormalizedCoordinate): - Normalized coordinates based on output video resolution. - Valid values: ``0.0``–``1.0``. ``xy`` is the upper-left - coordinate of the overlay object. For example, use the x and - y coordinates {0,0} to position the top-left corner of the - overlay animation in the top-left corner of the output - video. - start_time_offset (google.protobuf.duration_pb2.Duration): - The time to start displaying the overlay - object, in seconds. Default: 0 - """ - - xy: 'Overlay.NormalizedCoordinate' = proto.Field( - proto.MESSAGE, - number=1, - message='Overlay.NormalizedCoordinate', - ) - start_time_offset: duration_pb2.Duration = proto.Field( - proto.MESSAGE, - number=2, - message=duration_pb2.Duration, - ) - - class AnimationFade(proto.Message): - r"""Display overlay object with fade animation. - - Attributes: - fade_type (google.cloud.video.transcoder_v1.types.Overlay.FadeType): - Required. Type of fade animation: ``FADE_IN`` or - ``FADE_OUT``. - xy (google.cloud.video.transcoder_v1.types.Overlay.NormalizedCoordinate): - Normalized coordinates based on output video resolution. - Valid values: ``0.0``–``1.0``. ``xy`` is the upper-left - coordinate of the overlay object. For example, use the x and - y coordinates {0,0} to position the top-left corner of the - overlay animation in the top-left corner of the output - video. - start_time_offset (google.protobuf.duration_pb2.Duration): - The time to start the fade animation, in - seconds. Default: 0 - end_time_offset (google.protobuf.duration_pb2.Duration): - The time to end the fade animation, in seconds. Default: - ``start_time_offset`` + 1s - """ - - fade_type: 'Overlay.FadeType' = proto.Field( - proto.ENUM, - number=1, - enum='Overlay.FadeType', - ) - xy: 'Overlay.NormalizedCoordinate' = proto.Field( - proto.MESSAGE, - number=2, - message='Overlay.NormalizedCoordinate', - ) - start_time_offset: duration_pb2.Duration = proto.Field( - proto.MESSAGE, - number=3, - message=duration_pb2.Duration, - ) - end_time_offset: duration_pb2.Duration = proto.Field( - proto.MESSAGE, - number=4, - message=duration_pb2.Duration, - ) - - class AnimationEnd(proto.Message): - r"""End previous overlay animation from the video. Without - AnimationEnd, the overlay object will keep the state of previous - animation until the end of the video. - - Attributes: - start_time_offset (google.protobuf.duration_pb2.Duration): - The time to end overlay object, in seconds. - Default: 0 - """ - - start_time_offset: duration_pb2.Duration = proto.Field( - proto.MESSAGE, - number=1, - message=duration_pb2.Duration, - ) - - class Animation(proto.Message): - r"""Animation types. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - animation_static (google.cloud.video.transcoder_v1.types.Overlay.AnimationStatic): - Display static overlay object. - - This field is a member of `oneof`_ ``animation_type``. - animation_fade (google.cloud.video.transcoder_v1.types.Overlay.AnimationFade): - Display overlay object with fade animation. - - This field is a member of `oneof`_ ``animation_type``. - animation_end (google.cloud.video.transcoder_v1.types.Overlay.AnimationEnd): - End previous animation. - - This field is a member of `oneof`_ ``animation_type``. - """ - - animation_static: 'Overlay.AnimationStatic' = proto.Field( - proto.MESSAGE, - number=1, - oneof='animation_type', - message='Overlay.AnimationStatic', - ) - animation_fade: 'Overlay.AnimationFade' = proto.Field( - proto.MESSAGE, - number=2, - oneof='animation_type', - message='Overlay.AnimationFade', - ) - animation_end: 'Overlay.AnimationEnd' = proto.Field( - proto.MESSAGE, - number=3, - oneof='animation_type', - message='Overlay.AnimationEnd', - ) - - image: Image = proto.Field( - proto.MESSAGE, - number=1, - message=Image, - ) - animations: MutableSequence[Animation] = proto.RepeatedField( - proto.MESSAGE, - number=2, - message=Animation, - ) - - -class PreprocessingConfig(proto.Message): - r"""Preprocessing configurations. - - Attributes: - color (google.cloud.video.transcoder_v1.types.PreprocessingConfig.Color): - Color preprocessing configuration. - denoise (google.cloud.video.transcoder_v1.types.PreprocessingConfig.Denoise): - Denoise preprocessing configuration. - deblock (google.cloud.video.transcoder_v1.types.PreprocessingConfig.Deblock): - Deblock preprocessing configuration. - audio (google.cloud.video.transcoder_v1.types.PreprocessingConfig.Audio): - Audio preprocessing configuration. - crop (google.cloud.video.transcoder_v1.types.PreprocessingConfig.Crop): - Specify the video cropping configuration. - pad (google.cloud.video.transcoder_v1.types.PreprocessingConfig.Pad): - Specify the video pad filter configuration. - deinterlace (google.cloud.video.transcoder_v1.types.PreprocessingConfig.Deinterlace): - Specify the video deinterlace configuration. - """ - - class Color(proto.Message): - r"""Color preprocessing configuration. - - **Note:** This configuration is not supported. - - Attributes: - saturation (float): - Control color saturation of the video. Enter - a value between -1 and 1, where -1 is fully - desaturated and 1 is maximum saturation. 0 is no - change. The default is 0. - contrast (float): - Control black and white contrast of the - video. Enter a value between -1 and 1, where -1 - is minimum contrast and 1 is maximum contrast. 0 - is no change. The default is 0. - brightness (float): - Control brightness of the video. Enter a - value between -1 and 1, where -1 is minimum - brightness and 1 is maximum brightness. 0 is no - change. The default is 0. - """ - - saturation: float = proto.Field( - proto.DOUBLE, - number=1, - ) - contrast: float = proto.Field( - proto.DOUBLE, - number=2, - ) - brightness: float = proto.Field( - proto.DOUBLE, - number=3, - ) - - class Denoise(proto.Message): - r"""Denoise preprocessing configuration. - - **Note:** This configuration is not supported. - - Attributes: - strength (float): - Set strength of the denoise. Enter a value - between 0 and 1. The higher the value, the - smoother the image. 0 is no denoising. The - default is 0. - tune (str): - Set the denoiser mode. The default is ``standard``. - - Supported denoiser modes: - - - ``standard`` - - ``grain`` - """ - - strength: float = proto.Field( - proto.DOUBLE, - number=1, - ) - tune: str = proto.Field( - proto.STRING, - number=2, - ) - - class Deblock(proto.Message): - r"""Deblock preprocessing configuration. - - **Note:** This configuration is not supported. - - Attributes: - strength (float): - Set strength of the deblocker. Enter a value - between 0 and 1. The higher the value, the - stronger the block removal. 0 is no deblocking. - The default is 0. - enabled (bool): - Enable deblocker. The default is ``false``. - """ - - strength: float = proto.Field( - proto.DOUBLE, - number=1, - ) - enabled: bool = proto.Field( - proto.BOOL, - number=2, - ) - - class Audio(proto.Message): - r"""Audio preprocessing configuration. - - Attributes: - lufs (float): - Specify audio loudness normalization in loudness units - relative to full scale (LUFS). Enter a value between -24 and - 0 (the default), where: - - - -24 is the Advanced Television Systems Committee (ATSC - A/85) standard - - -23 is the EU R128 broadcast standard - - -19 is the prior standard for online mono audio - - -18 is the ReplayGain standard - - -16 is the prior standard for stereo audio - - -14 is the new online audio standard recommended by - Spotify, as well as Amazon Echo - - 0 disables normalization - high_boost (bool): - Enable boosting high frequency components. The default is - ``false``. - - **Note:** This field is not supported. - low_boost (bool): - Enable boosting low frequency components. The default is - ``false``. - - **Note:** This field is not supported. - """ - - lufs: float = proto.Field( - proto.DOUBLE, - number=1, - ) - high_boost: bool = proto.Field( - proto.BOOL, - number=2, - ) - low_boost: bool = proto.Field( - proto.BOOL, - number=3, - ) - - class Crop(proto.Message): - r"""Video cropping configuration for the input video. The cropped - input video is scaled to match the output resolution. - - Attributes: - top_pixels (int): - The number of pixels to crop from the top. - The default is 0. - bottom_pixels (int): - The number of pixels to crop from the bottom. - The default is 0. - left_pixels (int): - The number of pixels to crop from the left. - The default is 0. - right_pixels (int): - The number of pixels to crop from the right. - The default is 0. - """ - - top_pixels: int = proto.Field( - proto.INT32, - number=1, - ) - bottom_pixels: int = proto.Field( - proto.INT32, - number=2, - ) - left_pixels: int = proto.Field( - proto.INT32, - number=3, - ) - right_pixels: int = proto.Field( - proto.INT32, - number=4, - ) - - class Pad(proto.Message): - r"""Pad filter configuration for the input video. The padded - input video is scaled after padding with black to match the - output resolution. - - Attributes: - top_pixels (int): - The number of pixels to add to the top. The - default is 0. - bottom_pixels (int): - The number of pixels to add to the bottom. - The default is 0. - left_pixels (int): - The number of pixels to add to the left. The - default is 0. - right_pixels (int): - The number of pixels to add to the right. The - default is 0. - """ - - top_pixels: int = proto.Field( - proto.INT32, - number=1, - ) - bottom_pixels: int = proto.Field( - proto.INT32, - number=2, - ) - left_pixels: int = proto.Field( - proto.INT32, - number=3, - ) - right_pixels: int = proto.Field( - proto.INT32, - number=4, - ) - - class Deinterlace(proto.Message): - r"""Deinterlace configuration for input video. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - yadif (google.cloud.video.transcoder_v1.types.PreprocessingConfig.Deinterlace.YadifConfig): - Specifies the Yet Another Deinterlacing - Filter Configuration. - - This field is a member of `oneof`_ ``deinterlacing_filter``. - bwdif (google.cloud.video.transcoder_v1.types.PreprocessingConfig.Deinterlace.BwdifConfig): - Specifies the Bob Weaver Deinterlacing Filter - Configuration. - - This field is a member of `oneof`_ ``deinterlacing_filter``. - """ - - class YadifConfig(proto.Message): - r"""Yet Another Deinterlacing Filter Configuration. - - Attributes: - mode (str): - Specifies the deinterlacing mode to adopt. The default is - ``send_frame``. Supported values: - - - ``send_frame``: Output one frame for each frame - - ``send_field``: Output one frame for each field - disable_spatial_interlacing (bool): - Disable spacial interlacing. The default is ``false``. - parity (str): - The picture field parity assumed for the input interlaced - video. The default is ``auto``. Supported values: - - - ``tff``: Assume the top field is first - - ``bff``: Assume the bottom field is first - - ``auto``: Enable automatic detection of field parity - deinterlace_all_frames (bool): - Deinterlace all frames rather than just the frames - identified as interlaced. The default is ``false``. - """ - - mode: str = proto.Field( - proto.STRING, - number=1, - ) - disable_spatial_interlacing: bool = proto.Field( - proto.BOOL, - number=2, - ) - parity: str = proto.Field( - proto.STRING, - number=3, - ) - deinterlace_all_frames: bool = proto.Field( - proto.BOOL, - number=4, - ) - - class BwdifConfig(proto.Message): - r"""Bob Weaver Deinterlacing Filter Configuration. - - Attributes: - mode (str): - Specifies the deinterlacing mode to adopt. The default is - ``send_frame``. Supported values: - - - ``send_frame``: Output one frame for each frame - - ``send_field``: Output one frame for each field - parity (str): - The picture field parity assumed for the input interlaced - video. The default is ``auto``. Supported values: - - - ``tff``: Assume the top field is first - - ``bff``: Assume the bottom field is first - - ``auto``: Enable automatic detection of field parity - deinterlace_all_frames (bool): - Deinterlace all frames rather than just the frames - identified as interlaced. The default is ``false``. - """ - - mode: str = proto.Field( - proto.STRING, - number=1, - ) - parity: str = proto.Field( - proto.STRING, - number=2, - ) - deinterlace_all_frames: bool = proto.Field( - proto.BOOL, - number=3, - ) - - yadif: 'PreprocessingConfig.Deinterlace.YadifConfig' = proto.Field( - proto.MESSAGE, - number=1, - oneof='deinterlacing_filter', - message='PreprocessingConfig.Deinterlace.YadifConfig', - ) - bwdif: 'PreprocessingConfig.Deinterlace.BwdifConfig' = proto.Field( - proto.MESSAGE, - number=2, - oneof='deinterlacing_filter', - message='PreprocessingConfig.Deinterlace.BwdifConfig', - ) - - color: Color = proto.Field( - proto.MESSAGE, - number=1, - message=Color, - ) - denoise: Denoise = proto.Field( - proto.MESSAGE, - number=2, - message=Denoise, - ) - deblock: Deblock = proto.Field( - proto.MESSAGE, - number=3, - message=Deblock, - ) - audio: Audio = proto.Field( - proto.MESSAGE, - number=4, - message=Audio, - ) - crop: Crop = proto.Field( - proto.MESSAGE, - number=5, - message=Crop, - ) - pad: Pad = proto.Field( - proto.MESSAGE, - number=6, - message=Pad, - ) - deinterlace: Deinterlace = proto.Field( - proto.MESSAGE, - number=7, - message=Deinterlace, - ) - - -class VideoStream(proto.Message): - r"""Video stream resource. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - h264 (google.cloud.video.transcoder_v1.types.VideoStream.H264CodecSettings): - H264 codec settings. - - This field is a member of `oneof`_ ``codec_settings``. - h265 (google.cloud.video.transcoder_v1.types.VideoStream.H265CodecSettings): - H265 codec settings. - - This field is a member of `oneof`_ ``codec_settings``. - vp9 (google.cloud.video.transcoder_v1.types.VideoStream.Vp9CodecSettings): - VP9 codec settings. - - This field is a member of `oneof`_ ``codec_settings``. - """ - - class H264CodecSettings(proto.Message): - r"""H264 codec settings. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - width_pixels (int): - The width of the video in pixels. Must be an - even integer. When not specified, the width is - adjusted to match the specified height and input - aspect ratio. If both are omitted, the input - width is used. - For portrait videos that contain horizontal ASR - and rotation metadata, provide the width, in - pixels, per the horizontal ASR. The API - calculates the height per the horizontal ASR. - The API detects any rotation metadata and swaps - the requested height and width for the output. - height_pixels (int): - The height of the video in pixels. Must be an - even integer. When not specified, the height is - adjusted to match the specified width and input - aspect ratio. If both are omitted, the input - height is used. - For portrait videos that contain horizontal ASR - and rotation metadata, provide the height, in - pixels, per the horizontal ASR. The API - calculates the width per the horizontal ASR. The - API detects any rotation metadata and swaps the - requested height and width for the output. - frame_rate (float): - Required. The target video frame rate in frames per second - (FPS). Must be less than or equal to 120. Will default to - the input frame rate if larger than the input frame rate. - The API will generate an output FPS that is divisible by the - input FPS, and smaller or equal to the target FPS. See - `Calculating frame - rate `__ - for more information. - bitrate_bps (int): - Required. The video bitrate in bits per - second. The minimum value is 1,000. The maximum - value is 800,000,000. - pixel_format (str): - Pixel format to use. The default is ``yuv420p``. - - Supported pixel formats: - - - ``yuv420p`` pixel format - - ``yuv422p`` pixel format - - ``yuv444p`` pixel format - - ``yuv420p10`` 10-bit HDR pixel format - - ``yuv422p10`` 10-bit HDR pixel format - - ``yuv444p10`` 10-bit HDR pixel format - - ``yuv420p12`` 12-bit HDR pixel format - - ``yuv422p12`` 12-bit HDR pixel format - - ``yuv444p12`` 12-bit HDR pixel format - rate_control_mode (str): - Specify the ``rate_control_mode``. The default is ``vbr``. - - Supported rate control modes: - - - ``vbr`` - variable bitrate - - ``crf`` - constant rate factor - crf_level (int): - Target CRF level. Must be between 10 and 36, - where 10 is the highest quality and 36 is the - most efficient compression. The default is 21. - allow_open_gop (bool): - Specifies whether an open Group of Pictures (GOP) structure - should be allowed or not. The default is ``false``. - gop_frame_count (int): - Select the GOP size based on the specified - frame count. Must be greater than zero. - - This field is a member of `oneof`_ ``gop_mode``. - gop_duration (google.protobuf.duration_pb2.Duration): - Select the GOP size based on the specified duration. The - default is ``3s``. Note that ``gopDuration`` must be less - than or equal to ```segmentDuration`` <#SegmentSettings>`__, - and ```segmentDuration`` <#SegmentSettings>`__ must be - divisible by ``gopDuration``. - - This field is a member of `oneof`_ ``gop_mode``. - enable_two_pass (bool): - Use two-pass encoding strategy to achieve better video - quality. ``VideoStream.rate_control_mode`` must be ``vbr``. - The default is ``false``. - vbv_size_bits (int): - Size of the Video Buffering Verifier (VBV) buffer in bits. - Must be greater than zero. The default is equal to - ``VideoStream.bitrate_bps``. - vbv_fullness_bits (int): - Initial fullness of the Video Buffering Verifier (VBV) - buffer in bits. Must be greater than zero. The default is - equal to 90% of ``VideoStream.vbv_size_bits``. - entropy_coder (str): - The entropy coder to use. The default is ``cabac``. - - Supported entropy coders: - - - ``cavlc`` - - ``cabac`` - b_pyramid (bool): - Allow B-pyramid for reference frame selection. This may not - be supported on all decoders. The default is ``false``. - b_frame_count (int): - The number of consecutive B-frames. Must be greater than or - equal to zero. Must be less than - ``VideoStream.gop_frame_count`` if set. The default is 0. - aq_strength (float): - Specify the intensity of the adaptive - quantizer (AQ). Must be between 0 and 1, where 0 - disables the quantizer and 1 maximizes the - quantizer. A higher value equals a lower bitrate - but smoother image. The default is 0. - profile (str): - Enforces the specified codec profile. The following profiles - are supported: - - - ``baseline`` - - ``main`` - - ``high`` (default) - - The available options are - `FFmpeg-compatible `__. - Note that certain values for this field may cause the - transcoder to override other fields you set in the - ``H264CodecSettings`` message. - tune (str): - Enforces the specified codec tune. The available options are - `FFmpeg-compatible `__. - Note that certain values for this field may cause the - transcoder to override other fields you set in the - ``H264CodecSettings`` message. - preset (str): - Enforces the specified codec preset. The default is - ``veryfast``. The available options are - `FFmpeg-compatible `__. - Note that certain values for this field may cause the - transcoder to override other fields you set in the - ``H264CodecSettings`` message. - """ - - width_pixels: int = proto.Field( - proto.INT32, - number=1, - ) - height_pixels: int = proto.Field( - proto.INT32, - number=2, - ) - frame_rate: float = proto.Field( - proto.DOUBLE, - number=3, - ) - bitrate_bps: int = proto.Field( - proto.INT32, - number=4, - ) - pixel_format: str = proto.Field( - proto.STRING, - number=5, - ) - rate_control_mode: str = proto.Field( - proto.STRING, - number=6, - ) - crf_level: int = proto.Field( - proto.INT32, - number=7, - ) - allow_open_gop: bool = proto.Field( - proto.BOOL, - number=8, - ) - gop_frame_count: int = proto.Field( - proto.INT32, - number=9, - oneof='gop_mode', - ) - gop_duration: duration_pb2.Duration = proto.Field( - proto.MESSAGE, - number=10, - oneof='gop_mode', - message=duration_pb2.Duration, - ) - enable_two_pass: bool = proto.Field( - proto.BOOL, - number=11, - ) - vbv_size_bits: int = proto.Field( - proto.INT32, - number=12, - ) - vbv_fullness_bits: int = proto.Field( - proto.INT32, - number=13, - ) - entropy_coder: str = proto.Field( - proto.STRING, - number=14, - ) - b_pyramid: bool = proto.Field( - proto.BOOL, - number=15, - ) - b_frame_count: int = proto.Field( - proto.INT32, - number=16, - ) - aq_strength: float = proto.Field( - proto.DOUBLE, - number=17, - ) - profile: str = proto.Field( - proto.STRING, - number=18, - ) - tune: str = proto.Field( - proto.STRING, - number=19, - ) - preset: str = proto.Field( - proto.STRING, - number=20, - ) - - class H265CodecSettings(proto.Message): - r"""H265 codec settings. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - width_pixels (int): - The width of the video in pixels. Must be an - even integer. When not specified, the width is - adjusted to match the specified height and input - aspect ratio. If both are omitted, the input - width is used. - For portrait videos that contain horizontal ASR - and rotation metadata, provide the width, in - pixels, per the horizontal ASR. The API - calculates the height per the horizontal ASR. - The API detects any rotation metadata and swaps - the requested height and width for the output. - height_pixels (int): - The height of the video in pixels. Must be an - even integer. When not specified, the height is - adjusted to match the specified width and input - aspect ratio. If both are omitted, the input - height is used. - For portrait videos that contain horizontal ASR - and rotation metadata, provide the height, in - pixels, per the horizontal ASR. The API - calculates the width per the horizontal ASR. The - API detects any rotation metadata and swaps the - requested height and width for the output. - frame_rate (float): - Required. The target video frame rate in frames per second - (FPS). Must be less than or equal to 120. Will default to - the input frame rate if larger than the input frame rate. - The API will generate an output FPS that is divisible by the - input FPS, and smaller or equal to the target FPS. See - `Calculating frame - rate `__ - for more information. - bitrate_bps (int): - Required. The video bitrate in bits per - second. The minimum value is 1,000. The maximum - value is 800,000,000. - pixel_format (str): - Pixel format to use. The default is ``yuv420p``. - - Supported pixel formats: - - - ``yuv420p`` pixel format - - ``yuv422p`` pixel format - - ``yuv444p`` pixel format - - ``yuv420p10`` 10-bit HDR pixel format - - ``yuv422p10`` 10-bit HDR pixel format - - ``yuv444p10`` 10-bit HDR pixel format - - ``yuv420p12`` 12-bit HDR pixel format - - ``yuv422p12`` 12-bit HDR pixel format - - ``yuv444p12`` 12-bit HDR pixel format - rate_control_mode (str): - Specify the ``rate_control_mode``. The default is ``vbr``. - - Supported rate control modes: - - - ``vbr`` - variable bitrate - - ``crf`` - constant rate factor - crf_level (int): - Target CRF level. Must be between 10 and 36, - where 10 is the highest quality and 36 is the - most efficient compression. The default is 21. - allow_open_gop (bool): - Specifies whether an open Group of Pictures (GOP) structure - should be allowed or not. The default is ``false``. - gop_frame_count (int): - Select the GOP size based on the specified - frame count. Must be greater than zero. - - This field is a member of `oneof`_ ``gop_mode``. - gop_duration (google.protobuf.duration_pb2.Duration): - Select the GOP size based on the specified duration. The - default is ``3s``. Note that ``gopDuration`` must be less - than or equal to ```segmentDuration`` <#SegmentSettings>`__, - and ```segmentDuration`` <#SegmentSettings>`__ must be - divisible by ``gopDuration``. - - This field is a member of `oneof`_ ``gop_mode``. - enable_two_pass (bool): - Use two-pass encoding strategy to achieve better video - quality. ``VideoStream.rate_control_mode`` must be ``vbr``. - The default is ``false``. - vbv_size_bits (int): - Size of the Video Buffering Verifier (VBV) buffer in bits. - Must be greater than zero. The default is equal to - ``VideoStream.bitrate_bps``. - vbv_fullness_bits (int): - Initial fullness of the Video Buffering Verifier (VBV) - buffer in bits. Must be greater than zero. The default is - equal to 90% of ``VideoStream.vbv_size_bits``. - b_pyramid (bool): - Allow B-pyramid for reference frame selection. This may not - be supported on all decoders. The default is ``false``. - b_frame_count (int): - The number of consecutive B-frames. Must be greater than or - equal to zero. Must be less than - ``VideoStream.gop_frame_count`` if set. The default is 0. - aq_strength (float): - Specify the intensity of the adaptive - quantizer (AQ). Must be between 0 and 1, where 0 - disables the quantizer and 1 maximizes the - quantizer. A higher value equals a lower bitrate - but smoother image. The default is 0. - profile (str): - Enforces the specified codec profile. The following profiles - are supported: - - - 8-bit profiles - - - ``main`` (default) - - ``main-intra`` - - ``mainstillpicture`` - - - 10-bit profiles - - - ``main10`` (default) - - ``main10-intra`` - - ``main422-10`` - - ``main422-10-intra`` - - ``main444-10`` - - ``main444-10-intra`` - - - 12-bit profiles - - - ``main12`` (default) - - ``main12-intra`` - - ``main422-12`` - - ``main422-12-intra`` - - ``main444-12`` - - ``main444-12-intra`` - - The available options are - `FFmpeg-compatible `__. Note - that certain values for this field may cause the transcoder - to override other fields you set in the - ``H265CodecSettings`` message. - tune (str): - Enforces the specified codec tune. The available options are - `FFmpeg-compatible `__. - Note that certain values for this field may cause the - transcoder to override other fields you set in the - ``H265CodecSettings`` message. - preset (str): - Enforces the specified codec preset. The default is - ``veryfast``. The available options are - `FFmpeg-compatible `__. - Note that certain values for this field may cause the - transcoder to override other fields you set in the - ``H265CodecSettings`` message. - """ - - width_pixels: int = proto.Field( - proto.INT32, - number=1, - ) - height_pixels: int = proto.Field( - proto.INT32, - number=2, - ) - frame_rate: float = proto.Field( - proto.DOUBLE, - number=3, - ) - bitrate_bps: int = proto.Field( - proto.INT32, - number=4, - ) - pixel_format: str = proto.Field( - proto.STRING, - number=5, - ) - rate_control_mode: str = proto.Field( - proto.STRING, - number=6, - ) - crf_level: int = proto.Field( - proto.INT32, - number=7, - ) - allow_open_gop: bool = proto.Field( - proto.BOOL, - number=8, - ) - gop_frame_count: int = proto.Field( - proto.INT32, - number=9, - oneof='gop_mode', - ) - gop_duration: duration_pb2.Duration = proto.Field( - proto.MESSAGE, - number=10, - oneof='gop_mode', - message=duration_pb2.Duration, - ) - enable_two_pass: bool = proto.Field( - proto.BOOL, - number=11, - ) - vbv_size_bits: int = proto.Field( - proto.INT32, - number=12, - ) - vbv_fullness_bits: int = proto.Field( - proto.INT32, - number=13, - ) - b_pyramid: bool = proto.Field( - proto.BOOL, - number=14, - ) - b_frame_count: int = proto.Field( - proto.INT32, - number=15, - ) - aq_strength: float = proto.Field( - proto.DOUBLE, - number=16, - ) - profile: str = proto.Field( - proto.STRING, - number=17, - ) - tune: str = proto.Field( - proto.STRING, - number=18, - ) - preset: str = proto.Field( - proto.STRING, - number=19, - ) - - class Vp9CodecSettings(proto.Message): - r"""VP9 codec settings. - - This message has `oneof`_ fields (mutually exclusive fields). - For each oneof, at most one member field can be set at the same time. - Setting any member of the oneof automatically clears all other - members. - - .. _oneof: https://proto-plus-python.readthedocs.io/en/stable/fields.html#oneofs-mutually-exclusive-fields - - Attributes: - width_pixels (int): - The width of the video in pixels. Must be an - even integer. When not specified, the width is - adjusted to match the specified height and input - aspect ratio. If both are omitted, the input - width is used. - For portrait videos that contain horizontal ASR - and rotation metadata, provide the width, in - pixels, per the horizontal ASR. The API - calculates the height per the horizontal ASR. - The API detects any rotation metadata and swaps - the requested height and width for the output. - height_pixels (int): - The height of the video in pixels. Must be an - even integer. When not specified, the height is - adjusted to match the specified width and input - aspect ratio. If both are omitted, the input - height is used. - For portrait videos that contain horizontal ASR - and rotation metadata, provide the height, in - pixels, per the horizontal ASR. The API - calculates the width per the horizontal ASR. The - API detects any rotation metadata and swaps the - requested height and width for the output. - frame_rate (float): - Required. The target video frame rate in frames per second - (FPS). Must be less than or equal to 120. Will default to - the input frame rate if larger than the input frame rate. - The API will generate an output FPS that is divisible by the - input FPS, and smaller or equal to the target FPS. See - `Calculating frame - rate `__ - for more information. - bitrate_bps (int): - Required. The video bitrate in bits per - second. The minimum value is 1,000. The maximum - value is 480,000,000. - pixel_format (str): - Pixel format to use. The default is ``yuv420p``. - - Supported pixel formats: - - - ``yuv420p`` pixel format - - ``yuv422p`` pixel format - - ``yuv444p`` pixel format - - ``yuv420p10`` 10-bit HDR pixel format - - ``yuv422p10`` 10-bit HDR pixel format - - ``yuv444p10`` 10-bit HDR pixel format - - ``yuv420p12`` 12-bit HDR pixel format - - ``yuv422p12`` 12-bit HDR pixel format - - ``yuv444p12`` 12-bit HDR pixel format - rate_control_mode (str): - Specify the ``rate_control_mode``. The default is ``vbr``. - - Supported rate control modes: - - - ``vbr`` - variable bitrate - crf_level (int): - Target CRF level. Must be between 10 and 36, where 10 is the - highest quality and 36 is the most efficient compression. - The default is 21. - - **Note:** This field is not supported. - gop_frame_count (int): - Select the GOP size based on the specified - frame count. Must be greater than zero. - - This field is a member of `oneof`_ ``gop_mode``. - gop_duration (google.protobuf.duration_pb2.Duration): - Select the GOP size based on the specified duration. The - default is ``3s``. Note that ``gopDuration`` must be less - than or equal to ```segmentDuration`` <#SegmentSettings>`__, - and ```segmentDuration`` <#SegmentSettings>`__ must be - divisible by ``gopDuration``. - - This field is a member of `oneof`_ ``gop_mode``. - profile (str): - Enforces the specified codec profile. The following profiles - are supported: - - - ``profile0`` (default) - - ``profile1`` - - ``profile2`` - - ``profile3`` - - The available options are - `WebM-compatible `__. - Note that certain values for this field may cause the - transcoder to override other fields you set in the - ``Vp9CodecSettings`` message. - """ - - width_pixels: int = proto.Field( - proto.INT32, - number=1, - ) - height_pixels: int = proto.Field( - proto.INT32, - number=2, - ) - frame_rate: float = proto.Field( - proto.DOUBLE, - number=3, - ) - bitrate_bps: int = proto.Field( - proto.INT32, - number=4, - ) - pixel_format: str = proto.Field( - proto.STRING, - number=5, - ) - rate_control_mode: str = proto.Field( - proto.STRING, - number=6, - ) - crf_level: int = proto.Field( - proto.INT32, - number=7, - ) - gop_frame_count: int = proto.Field( - proto.INT32, - number=8, - oneof='gop_mode', - ) - gop_duration: duration_pb2.Duration = proto.Field( - proto.MESSAGE, - number=9, - oneof='gop_mode', - message=duration_pb2.Duration, - ) - profile: str = proto.Field( - proto.STRING, - number=10, - ) - - h264: H264CodecSettings = proto.Field( - proto.MESSAGE, - number=1, - oneof='codec_settings', - message=H264CodecSettings, - ) - h265: H265CodecSettings = proto.Field( - proto.MESSAGE, - number=2, - oneof='codec_settings', - message=H265CodecSettings, - ) - vp9: Vp9CodecSettings = proto.Field( - proto.MESSAGE, - number=3, - oneof='codec_settings', - message=Vp9CodecSettings, - ) - - -class AudioStream(proto.Message): - r"""Audio stream resource. - - Attributes: - codec (str): - The codec for this audio stream. The default is ``aac``. - - Supported audio codecs: - - - ``aac`` - - ``aac-he`` - - ``aac-he-v2`` - - ``mp3`` - - ``ac3`` - - ``eac3`` - bitrate_bps (int): - Required. Audio bitrate in bits per second. - Must be between 1 and 10,000,000. - channel_count (int): - Number of audio channels. Must be between 1 - and 6. The default is 2. - channel_layout (MutableSequence[str]): - A list of channel names specifying layout of the audio - channels. This only affects the metadata embedded in the - container headers, if supported by the specified format. The - default is ``["fl", "fr"]``. - - Supported channel names: - - - ``fl`` - Front left channel - - ``fr`` - Front right channel - - ``sl`` - Side left channel - - ``sr`` - Side right channel - - ``fc`` - Front center channel - - ``lfe`` - Low frequency - mapping_ (MutableSequence[google.cloud.video.transcoder_v1.types.AudioStream.AudioMapping]): - The mapping for the ``Job.edit_list`` atoms with audio - ``EditAtom.inputs``. - sample_rate_hertz (int): - The audio sample rate in Hertz. The default - is 48000 Hertz. - """ - - class AudioMapping(proto.Message): - r"""The mapping for the ``Job.edit_list`` atoms with audio - ``EditAtom.inputs``. - - Attributes: - atom_key (str): - Required. The ``EditAtom.key`` that references the atom with - audio inputs in the ``Job.edit_list``. - input_key (str): - Required. The ``Input.key`` that identifies the input file. - input_track (int): - Required. The zero-based index of the track - in the input file. - input_channel (int): - Required. The zero-based index of the channel - in the input audio stream. - output_channel (int): - Required. The zero-based index of the channel - in the output audio stream. - gain_db (float): - Audio volume control in dB. Negative values - decrease volume, positive values increase. The - default is 0. - """ - - atom_key: str = proto.Field( - proto.STRING, - number=1, - ) - input_key: str = proto.Field( - proto.STRING, - number=2, - ) - input_track: int = proto.Field( - proto.INT32, - number=3, - ) - input_channel: int = proto.Field( - proto.INT32, - number=4, - ) - output_channel: int = proto.Field( - proto.INT32, - number=5, - ) - gain_db: float = proto.Field( - proto.DOUBLE, - number=6, - ) - - codec: str = proto.Field( - proto.STRING, - number=1, - ) - bitrate_bps: int = proto.Field( - proto.INT32, - number=2, - ) - channel_count: int = proto.Field( - proto.INT32, - number=3, - ) - channel_layout: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=4, - ) - mapping_: MutableSequence[AudioMapping] = proto.RepeatedField( - proto.MESSAGE, - number=5, - message=AudioMapping, - ) - sample_rate_hertz: int = proto.Field( - proto.INT32, - number=6, - ) - - -class TextStream(proto.Message): - r"""Encoding of a text stream. For example, closed captions or - subtitles. - - Attributes: - codec (str): - The codec for this text stream. The default is ``webvtt``. - - Supported text codecs: - - - ``srt`` - - ``ttml`` - - ``cea608`` - - ``cea708`` - - ``webvtt`` - mapping_ (MutableSequence[google.cloud.video.transcoder_v1.types.TextStream.TextMapping]): - The mapping for the ``Job.edit_list`` atoms with text - ``EditAtom.inputs``. - """ - - class TextMapping(proto.Message): - r"""The mapping for the ``Job.edit_list`` atoms with text - ``EditAtom.inputs``. - - Attributes: - atom_key (str): - Required. The ``EditAtom.key`` that references atom with - text inputs in the ``Job.edit_list``. - input_key (str): - Required. The ``Input.key`` that identifies the input file. - input_track (int): - Required. The zero-based index of the track - in the input file. - """ - - atom_key: str = proto.Field( - proto.STRING, - number=1, - ) - input_key: str = proto.Field( - proto.STRING, - number=2, - ) - input_track: int = proto.Field( - proto.INT32, - number=3, - ) - - codec: str = proto.Field( - proto.STRING, - number=1, - ) - mapping_: MutableSequence[TextMapping] = proto.RepeatedField( - proto.MESSAGE, - number=3, - message=TextMapping, - ) - - -class SegmentSettings(proto.Message): - r"""Segment settings for ``ts``, ``fmp4`` and ``vtt``. - - Attributes: - segment_duration (google.protobuf.duration_pb2.Duration): - Duration of the segments in seconds. The default is - ``6.0s``. Note that ``segmentDuration`` must be greater than - or equal to ```gopDuration`` <#videostream>`__, and - ``segmentDuration`` must be divisible by - ```gopDuration`` <#videostream>`__. - individual_segments (bool): - Required. Create an individual segment file. The default is - ``false``. - """ - - segment_duration: duration_pb2.Duration = proto.Field( - proto.MESSAGE, - number=1, - message=duration_pb2.Duration, - ) - individual_segments: bool = proto.Field( - proto.BOOL, - number=3, - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/v1/google/cloud/video/transcoder_v1/types/services.py b/owl-bot-staging/v1/google/cloud/video/transcoder_v1/types/services.py deleted file mode 100644 index 2ad601f..0000000 --- a/owl-bot-staging/v1/google/cloud/video/transcoder_v1/types/services.py +++ /dev/null @@ -1,325 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -from typing import MutableMapping, MutableSequence - -import proto # type: ignore - -from google.cloud.video.transcoder_v1.types import resources - - -__protobuf__ = proto.module( - package='google.cloud.video.transcoder.v1', - manifest={ - 'CreateJobRequest', - 'ListJobsRequest', - 'GetJobRequest', - 'DeleteJobRequest', - 'ListJobsResponse', - 'CreateJobTemplateRequest', - 'ListJobTemplatesRequest', - 'GetJobTemplateRequest', - 'DeleteJobTemplateRequest', - 'ListJobTemplatesResponse', - }, -) - - -class CreateJobRequest(proto.Message): - r"""Request message for ``TranscoderService.CreateJob``. - - Attributes: - parent (str): - Required. The parent location to create and process this - job. Format: ``projects/{project}/locations/{location}`` - job (google.cloud.video.transcoder_v1.types.Job): - Required. Parameters for creating transcoding - job. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - job: resources.Job = proto.Field( - proto.MESSAGE, - number=2, - message=resources.Job, - ) - - -class ListJobsRequest(proto.Message): - r"""Request message for ``TranscoderService.ListJobs``. The parent - location from which to retrieve the collection of jobs. - - Attributes: - parent (str): - Required. Format: - ``projects/{project}/locations/{location}`` - page_size (int): - The maximum number of items to return. - page_token (str): - The ``next_page_token`` value returned from a previous List - request, if any. - filter (str): - The filter expression, following the syntax - outlined in https://google.aip.dev/160. - order_by (str): - One or more fields to compare and use to sort - the output. See - https://google.aip.dev/132#ordering. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - page_size: int = proto.Field( - proto.INT32, - number=2, - ) - page_token: str = proto.Field( - proto.STRING, - number=3, - ) - filter: str = proto.Field( - proto.STRING, - number=4, - ) - order_by: str = proto.Field( - proto.STRING, - number=5, - ) - - -class GetJobRequest(proto.Message): - r"""Request message for ``TranscoderService.GetJob``. - - Attributes: - name (str): - Required. The name of the job to retrieve. Format: - ``projects/{project}/locations/{location}/jobs/{job}`` - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class DeleteJobRequest(proto.Message): - r"""Request message for ``TranscoderService.DeleteJob``. - - Attributes: - name (str): - Required. The name of the job to delete. Format: - ``projects/{project}/locations/{location}/jobs/{job}`` - allow_missing (bool): - If set to true, and the job is not found, the - request will succeed but no action will be taken - on the server. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - allow_missing: bool = proto.Field( - proto.BOOL, - number=2, - ) - - -class ListJobsResponse(proto.Message): - r"""Response message for ``TranscoderService.ListJobs``. - - Attributes: - jobs (MutableSequence[google.cloud.video.transcoder_v1.types.Job]): - List of jobs in the specified region. - next_page_token (str): - The pagination token. - unreachable (MutableSequence[str]): - List of regions that could not be reached. - """ - - @property - def raw_page(self): - return self - - jobs: MutableSequence[resources.Job] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message=resources.Job, - ) - next_page_token: str = proto.Field( - proto.STRING, - number=2, - ) - unreachable: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=3, - ) - - -class CreateJobTemplateRequest(proto.Message): - r"""Request message for ``TranscoderService.CreateJobTemplate``. - - Attributes: - parent (str): - Required. The parent location to create this job template. - Format: ``projects/{project}/locations/{location}`` - job_template (google.cloud.video.transcoder_v1.types.JobTemplate): - Required. Parameters for creating job - template. - job_template_id (str): - Required. The ID to use for the job template, which will - become the final component of the job template's resource - name. - - This value should be 4-63 characters, and valid characters - must match the regular expression - ``[a-zA-Z][a-zA-Z0-9_-]*``. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - job_template: resources.JobTemplate = proto.Field( - proto.MESSAGE, - number=2, - message=resources.JobTemplate, - ) - job_template_id: str = proto.Field( - proto.STRING, - number=3, - ) - - -class ListJobTemplatesRequest(proto.Message): - r"""Request message for ``TranscoderService.ListJobTemplates``. - - Attributes: - parent (str): - Required. The parent location from which to retrieve the - collection of job templates. Format: - ``projects/{project}/locations/{location}`` - page_size (int): - The maximum number of items to return. - page_token (str): - The ``next_page_token`` value returned from a previous List - request, if any. - filter (str): - The filter expression, following the syntax - outlined in https://google.aip.dev/160. - order_by (str): - One or more fields to compare and use to sort - the output. See - https://google.aip.dev/132#ordering. - """ - - parent: str = proto.Field( - proto.STRING, - number=1, - ) - page_size: int = proto.Field( - proto.INT32, - number=2, - ) - page_token: str = proto.Field( - proto.STRING, - number=3, - ) - filter: str = proto.Field( - proto.STRING, - number=4, - ) - order_by: str = proto.Field( - proto.STRING, - number=5, - ) - - -class GetJobTemplateRequest(proto.Message): - r"""Request message for ``TranscoderService.GetJobTemplate``. - - Attributes: - name (str): - Required. The name of the job template to retrieve. Format: - ``projects/{project}/locations/{location}/jobTemplates/{job_template}`` - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - - -class DeleteJobTemplateRequest(proto.Message): - r"""Request message for ``TranscoderService.DeleteJobTemplate``. - - Attributes: - name (str): - Required. The name of the job template to delete. - ``projects/{project}/locations/{location}/jobTemplates/{job_template}`` - allow_missing (bool): - If set to true, and the job template is not - found, the request will succeed but no action - will be taken on the server. - """ - - name: str = proto.Field( - proto.STRING, - number=1, - ) - allow_missing: bool = proto.Field( - proto.BOOL, - number=2, - ) - - -class ListJobTemplatesResponse(proto.Message): - r"""Response message for ``TranscoderService.ListJobTemplates``. - - Attributes: - job_templates (MutableSequence[google.cloud.video.transcoder_v1.types.JobTemplate]): - List of job templates in the specified - region. - next_page_token (str): - The pagination token. - unreachable (MutableSequence[str]): - List of regions that could not be reached. - """ - - @property - def raw_page(self): - return self - - job_templates: MutableSequence[resources.JobTemplate] = proto.RepeatedField( - proto.MESSAGE, - number=1, - message=resources.JobTemplate, - ) - next_page_token: str = proto.Field( - proto.STRING, - number=2, - ) - unreachable: MutableSequence[str] = proto.RepeatedField( - proto.STRING, - number=3, - ) - - -__all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/owl-bot-staging/v1/mypy.ini b/owl-bot-staging/v1/mypy.ini deleted file mode 100644 index 574c5ae..0000000 --- a/owl-bot-staging/v1/mypy.ini +++ /dev/null @@ -1,3 +0,0 @@ -[mypy] -python_version = 3.7 -namespace_packages = True diff --git a/owl-bot-staging/v1/noxfile.py b/owl-bot-staging/v1/noxfile.py deleted file mode 100644 index 974c78e..0000000 --- a/owl-bot-staging/v1/noxfile.py +++ /dev/null @@ -1,184 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import os -import pathlib -import shutil -import subprocess -import sys - - -import nox # type: ignore - -ALL_PYTHON = [ - "3.7", - "3.8", - "3.9", - "3.10", - "3.11", -] - -CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() - -LOWER_BOUND_CONSTRAINTS_FILE = CURRENT_DIRECTORY / "constraints.txt" -PACKAGE_NAME = subprocess.check_output([sys.executable, "setup.py", "--name"], encoding="utf-8") - -BLACK_VERSION = "black==22.3.0" -BLACK_PATHS = ["docs", "google", "tests", "samples", "noxfile.py", "setup.py"] -DEFAULT_PYTHON_VERSION = "3.11" - -nox.sessions = [ - "unit", - "cover", - "mypy", - "check_lower_bounds" - # exclude update_lower_bounds from default - "docs", - "blacken", - "lint", - "lint_setup_py", -] - -@nox.session(python=ALL_PYTHON) -def unit(session): - """Run the unit test suite.""" - - session.install('coverage', 'pytest', 'pytest-cov', 'pytest-asyncio', 'asyncmock; python_version < "3.8"') - session.install('-e', '.') - - session.run( - 'py.test', - '--quiet', - '--cov=google/cloud/video/transcoder_v1/', - '--cov=tests/', - '--cov-config=.coveragerc', - '--cov-report=term', - '--cov-report=html', - os.path.join('tests', 'unit', ''.join(session.posargs)) - ) - - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def cover(session): - """Run the final coverage report. - This outputs the coverage report aggregating coverage from the unit - test runs (not system test runs), and then erases coverage data. - """ - session.install("coverage", "pytest-cov") - session.run("coverage", "report", "--show-missing", "--fail-under=100") - - session.run("coverage", "erase") - - -@nox.session(python=ALL_PYTHON) -def mypy(session): - """Run the type checker.""" - session.install( - 'mypy', - 'types-requests', - 'types-protobuf' - ) - session.install('.') - session.run( - 'mypy', - '--explicit-package-bases', - 'google', - ) - - -@nox.session -def update_lower_bounds(session): - """Update lower bounds in constraints.txt to match setup.py""" - session.install('google-cloud-testutils') - session.install('.') - - session.run( - 'lower-bound-checker', - 'update', - '--package-name', - PACKAGE_NAME, - '--constraints-file', - str(LOWER_BOUND_CONSTRAINTS_FILE), - ) - - -@nox.session -def check_lower_bounds(session): - """Check lower bounds in setup.py are reflected in constraints file""" - session.install('google-cloud-testutils') - session.install('.') - - session.run( - 'lower-bound-checker', - 'check', - '--package-name', - PACKAGE_NAME, - '--constraints-file', - str(LOWER_BOUND_CONSTRAINTS_FILE), - ) - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def docs(session): - """Build the docs for this library.""" - - session.install("-e", ".") - session.install("sphinx==4.0.1", "alabaster", "recommonmark") - - shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True) - session.run( - "sphinx-build", - "-W", # warnings as errors - "-T", # show full traceback on exception - "-N", # no colors - "-b", - "html", - "-d", - os.path.join("docs", "_build", "doctrees", ""), - os.path.join("docs", ""), - os.path.join("docs", "_build", "html", ""), - ) - - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def lint(session): - """Run linters. - - Returns a failure if the linters find linting errors or sufficiently - serious code quality issues. - """ - session.install("flake8", BLACK_VERSION) - session.run( - "black", - "--check", - *BLACK_PATHS, - ) - session.run("flake8", "google", "tests", "samples") - - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def blacken(session): - """Run black. Format code to uniform standard.""" - session.install(BLACK_VERSION) - session.run( - "black", - *BLACK_PATHS, - ) - - -@nox.session(python=DEFAULT_PYTHON_VERSION) -def lint_setup_py(session): - """Verify that setup.py is valid (including RST check).""" - session.install("docutils", "pygments") - session.run("python", "setup.py", "check", "--restructuredtext", "--strict") diff --git a/owl-bot-staging/v1/samples/generated_samples/snippet_metadata_google.cloud.video.transcoder.v1.json b/owl-bot-staging/v1/samples/generated_samples/snippet_metadata_google.cloud.video.transcoder.v1.json deleted file mode 100644 index 2c7572e..0000000 --- a/owl-bot-staging/v1/samples/generated_samples/snippet_metadata_google.cloud.video.transcoder.v1.json +++ /dev/null @@ -1,1315 +0,0 @@ -{ - "clientLibrary": { - "apis": [ - { - "id": "google.cloud.video.transcoder.v1", - "version": "v1" - } - ], - "language": "PYTHON", - "name": "google-cloud-video-transcoder", - "version": "0.1.0" - }, - "snippets": [ - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.video.transcoder_v1.TranscoderServiceAsyncClient", - "shortName": "TranscoderServiceAsyncClient" - }, - "fullName": "google.cloud.video.transcoder_v1.TranscoderServiceAsyncClient.create_job_template", - "method": { - "fullName": "google.cloud.video.transcoder.v1.TranscoderService.CreateJobTemplate", - "service": { - "fullName": "google.cloud.video.transcoder.v1.TranscoderService", - "shortName": "TranscoderService" - }, - "shortName": "CreateJobTemplate" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.video.transcoder_v1.types.CreateJobTemplateRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "job_template", - "type": "google.cloud.video.transcoder_v1.types.JobTemplate" - }, - { - "name": "job_template_id", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.video.transcoder_v1.types.JobTemplate", - "shortName": "create_job_template" - }, - "description": "Sample for CreateJobTemplate", - "file": "transcoder_v1_generated_transcoder_service_create_job_template_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "transcoder_v1_generated_TranscoderService_CreateJobTemplate_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 49, - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "transcoder_v1_generated_transcoder_service_create_job_template_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.video.transcoder_v1.TranscoderServiceClient", - "shortName": "TranscoderServiceClient" - }, - "fullName": "google.cloud.video.transcoder_v1.TranscoderServiceClient.create_job_template", - "method": { - "fullName": "google.cloud.video.transcoder.v1.TranscoderService.CreateJobTemplate", - "service": { - "fullName": "google.cloud.video.transcoder.v1.TranscoderService", - "shortName": "TranscoderService" - }, - "shortName": "CreateJobTemplate" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.video.transcoder_v1.types.CreateJobTemplateRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "job_template", - "type": "google.cloud.video.transcoder_v1.types.JobTemplate" - }, - { - "name": "job_template_id", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.video.transcoder_v1.types.JobTemplate", - "shortName": "create_job_template" - }, - "description": "Sample for CreateJobTemplate", - "file": "transcoder_v1_generated_transcoder_service_create_job_template_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "transcoder_v1_generated_TranscoderService_CreateJobTemplate_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 46, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 49, - "start": 47, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "transcoder_v1_generated_transcoder_service_create_job_template_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.video.transcoder_v1.TranscoderServiceAsyncClient", - "shortName": "TranscoderServiceAsyncClient" - }, - "fullName": "google.cloud.video.transcoder_v1.TranscoderServiceAsyncClient.create_job", - "method": { - "fullName": "google.cloud.video.transcoder.v1.TranscoderService.CreateJob", - "service": { - "fullName": "google.cloud.video.transcoder.v1.TranscoderService", - "shortName": "TranscoderService" - }, - "shortName": "CreateJob" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.video.transcoder_v1.types.CreateJobRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "job", - "type": "google.cloud.video.transcoder_v1.types.Job" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.video.transcoder_v1.types.Job", - "shortName": "create_job" - }, - "description": "Sample for CreateJob", - "file": "transcoder_v1_generated_transcoder_service_create_job_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "transcoder_v1_generated_TranscoderService_CreateJob_async", - "segments": [ - { - "end": 55, - "start": 27, - "type": "FULL" - }, - { - "end": 55, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 49, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 52, - "start": 50, - "type": "REQUEST_EXECUTION" - }, - { - "end": 56, - "start": 53, - "type": "RESPONSE_HANDLING" - } - ], - "title": "transcoder_v1_generated_transcoder_service_create_job_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.video.transcoder_v1.TranscoderServiceClient", - "shortName": "TranscoderServiceClient" - }, - "fullName": "google.cloud.video.transcoder_v1.TranscoderServiceClient.create_job", - "method": { - "fullName": "google.cloud.video.transcoder.v1.TranscoderService.CreateJob", - "service": { - "fullName": "google.cloud.video.transcoder.v1.TranscoderService", - "shortName": "TranscoderService" - }, - "shortName": "CreateJob" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.video.transcoder_v1.types.CreateJobRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "job", - "type": "google.cloud.video.transcoder_v1.types.Job" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.video.transcoder_v1.types.Job", - "shortName": "create_job" - }, - "description": "Sample for CreateJob", - "file": "transcoder_v1_generated_transcoder_service_create_job_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "transcoder_v1_generated_TranscoderService_CreateJob_sync", - "segments": [ - { - "end": 55, - "start": 27, - "type": "FULL" - }, - { - "end": 55, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 49, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 52, - "start": 50, - "type": "REQUEST_EXECUTION" - }, - { - "end": 56, - "start": 53, - "type": "RESPONSE_HANDLING" - } - ], - "title": "transcoder_v1_generated_transcoder_service_create_job_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.video.transcoder_v1.TranscoderServiceAsyncClient", - "shortName": "TranscoderServiceAsyncClient" - }, - "fullName": "google.cloud.video.transcoder_v1.TranscoderServiceAsyncClient.delete_job_template", - "method": { - "fullName": "google.cloud.video.transcoder.v1.TranscoderService.DeleteJobTemplate", - "service": { - "fullName": "google.cloud.video.transcoder.v1.TranscoderService", - "shortName": "TranscoderService" - }, - "shortName": "DeleteJobTemplate" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.video.transcoder_v1.types.DeleteJobTemplateRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "shortName": "delete_job_template" - }, - "description": "Sample for DeleteJobTemplate", - "file": "transcoder_v1_generated_transcoder_service_delete_job_template_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "transcoder_v1_generated_TranscoderService_DeleteJobTemplate_async", - "segments": [ - { - "end": 49, - "start": 27, - "type": "FULL" - }, - { - "end": 49, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "transcoder_v1_generated_transcoder_service_delete_job_template_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.video.transcoder_v1.TranscoderServiceClient", - "shortName": "TranscoderServiceClient" - }, - "fullName": "google.cloud.video.transcoder_v1.TranscoderServiceClient.delete_job_template", - "method": { - "fullName": "google.cloud.video.transcoder.v1.TranscoderService.DeleteJobTemplate", - "service": { - "fullName": "google.cloud.video.transcoder.v1.TranscoderService", - "shortName": "TranscoderService" - }, - "shortName": "DeleteJobTemplate" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.video.transcoder_v1.types.DeleteJobTemplateRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "shortName": "delete_job_template" - }, - "description": "Sample for DeleteJobTemplate", - "file": "transcoder_v1_generated_transcoder_service_delete_job_template_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "transcoder_v1_generated_TranscoderService_DeleteJobTemplate_sync", - "segments": [ - { - "end": 49, - "start": 27, - "type": "FULL" - }, - { - "end": 49, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "transcoder_v1_generated_transcoder_service_delete_job_template_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.video.transcoder_v1.TranscoderServiceAsyncClient", - "shortName": "TranscoderServiceAsyncClient" - }, - "fullName": "google.cloud.video.transcoder_v1.TranscoderServiceAsyncClient.delete_job", - "method": { - "fullName": "google.cloud.video.transcoder.v1.TranscoderService.DeleteJob", - "service": { - "fullName": "google.cloud.video.transcoder.v1.TranscoderService", - "shortName": "TranscoderService" - }, - "shortName": "DeleteJob" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.video.transcoder_v1.types.DeleteJobRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "shortName": "delete_job" - }, - "description": "Sample for DeleteJob", - "file": "transcoder_v1_generated_transcoder_service_delete_job_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "transcoder_v1_generated_TranscoderService_DeleteJob_async", - "segments": [ - { - "end": 49, - "start": 27, - "type": "FULL" - }, - { - "end": 49, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "transcoder_v1_generated_transcoder_service_delete_job_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.video.transcoder_v1.TranscoderServiceClient", - "shortName": "TranscoderServiceClient" - }, - "fullName": "google.cloud.video.transcoder_v1.TranscoderServiceClient.delete_job", - "method": { - "fullName": "google.cloud.video.transcoder.v1.TranscoderService.DeleteJob", - "service": { - "fullName": "google.cloud.video.transcoder.v1.TranscoderService", - "shortName": "TranscoderService" - }, - "shortName": "DeleteJob" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.video.transcoder_v1.types.DeleteJobRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "shortName": "delete_job" - }, - "description": "Sample for DeleteJob", - "file": "transcoder_v1_generated_transcoder_service_delete_job_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "transcoder_v1_generated_TranscoderService_DeleteJob_sync", - "segments": [ - { - "end": 49, - "start": 27, - "type": "FULL" - }, - { - "end": 49, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 50, - "type": "RESPONSE_HANDLING" - } - ], - "title": "transcoder_v1_generated_transcoder_service_delete_job_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.video.transcoder_v1.TranscoderServiceAsyncClient", - "shortName": "TranscoderServiceAsyncClient" - }, - "fullName": "google.cloud.video.transcoder_v1.TranscoderServiceAsyncClient.get_job_template", - "method": { - "fullName": "google.cloud.video.transcoder.v1.TranscoderService.GetJobTemplate", - "service": { - "fullName": "google.cloud.video.transcoder.v1.TranscoderService", - "shortName": "TranscoderService" - }, - "shortName": "GetJobTemplate" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.video.transcoder_v1.types.GetJobTemplateRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.video.transcoder_v1.types.JobTemplate", - "shortName": "get_job_template" - }, - "description": "Sample for GetJobTemplate", - "file": "transcoder_v1_generated_transcoder_service_get_job_template_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "transcoder_v1_generated_TranscoderService_GetJobTemplate_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "transcoder_v1_generated_transcoder_service_get_job_template_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.video.transcoder_v1.TranscoderServiceClient", - "shortName": "TranscoderServiceClient" - }, - "fullName": "google.cloud.video.transcoder_v1.TranscoderServiceClient.get_job_template", - "method": { - "fullName": "google.cloud.video.transcoder.v1.TranscoderService.GetJobTemplate", - "service": { - "fullName": "google.cloud.video.transcoder.v1.TranscoderService", - "shortName": "TranscoderService" - }, - "shortName": "GetJobTemplate" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.video.transcoder_v1.types.GetJobTemplateRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.video.transcoder_v1.types.JobTemplate", - "shortName": "get_job_template" - }, - "description": "Sample for GetJobTemplate", - "file": "transcoder_v1_generated_transcoder_service_get_job_template_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "transcoder_v1_generated_TranscoderService_GetJobTemplate_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "transcoder_v1_generated_transcoder_service_get_job_template_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.video.transcoder_v1.TranscoderServiceAsyncClient", - "shortName": "TranscoderServiceAsyncClient" - }, - "fullName": "google.cloud.video.transcoder_v1.TranscoderServiceAsyncClient.get_job", - "method": { - "fullName": "google.cloud.video.transcoder.v1.TranscoderService.GetJob", - "service": { - "fullName": "google.cloud.video.transcoder.v1.TranscoderService", - "shortName": "TranscoderService" - }, - "shortName": "GetJob" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.video.transcoder_v1.types.GetJobRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.video.transcoder_v1.types.Job", - "shortName": "get_job" - }, - "description": "Sample for GetJob", - "file": "transcoder_v1_generated_transcoder_service_get_job_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "transcoder_v1_generated_TranscoderService_GetJob_async", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "transcoder_v1_generated_transcoder_service_get_job_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.video.transcoder_v1.TranscoderServiceClient", - "shortName": "TranscoderServiceClient" - }, - "fullName": "google.cloud.video.transcoder_v1.TranscoderServiceClient.get_job", - "method": { - "fullName": "google.cloud.video.transcoder.v1.TranscoderService.GetJob", - "service": { - "fullName": "google.cloud.video.transcoder.v1.TranscoderService", - "shortName": "TranscoderService" - }, - "shortName": "GetJob" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.video.transcoder_v1.types.GetJobRequest" - }, - { - "name": "name", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.video.transcoder_v1.types.Job", - "shortName": "get_job" - }, - "description": "Sample for GetJob", - "file": "transcoder_v1_generated_transcoder_service_get_job_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "transcoder_v1_generated_TranscoderService_GetJob_sync", - "segments": [ - { - "end": 51, - "start": 27, - "type": "FULL" - }, - { - "end": 51, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 52, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "transcoder_v1_generated_transcoder_service_get_job_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.video.transcoder_v1.TranscoderServiceAsyncClient", - "shortName": "TranscoderServiceAsyncClient" - }, - "fullName": "google.cloud.video.transcoder_v1.TranscoderServiceAsyncClient.list_job_templates", - "method": { - "fullName": "google.cloud.video.transcoder.v1.TranscoderService.ListJobTemplates", - "service": { - "fullName": "google.cloud.video.transcoder.v1.TranscoderService", - "shortName": "TranscoderService" - }, - "shortName": "ListJobTemplates" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.video.transcoder_v1.types.ListJobTemplatesRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.video.transcoder_v1.services.transcoder_service.pagers.ListJobTemplatesAsyncPager", - "shortName": "list_job_templates" - }, - "description": "Sample for ListJobTemplates", - "file": "transcoder_v1_generated_transcoder_service_list_job_templates_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "transcoder_v1_generated_TranscoderService_ListJobTemplates_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "transcoder_v1_generated_transcoder_service_list_job_templates_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.video.transcoder_v1.TranscoderServiceClient", - "shortName": "TranscoderServiceClient" - }, - "fullName": "google.cloud.video.transcoder_v1.TranscoderServiceClient.list_job_templates", - "method": { - "fullName": "google.cloud.video.transcoder.v1.TranscoderService.ListJobTemplates", - "service": { - "fullName": "google.cloud.video.transcoder.v1.TranscoderService", - "shortName": "TranscoderService" - }, - "shortName": "ListJobTemplates" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.video.transcoder_v1.types.ListJobTemplatesRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.video.transcoder_v1.services.transcoder_service.pagers.ListJobTemplatesPager", - "shortName": "list_job_templates" - }, - "description": "Sample for ListJobTemplates", - "file": "transcoder_v1_generated_transcoder_service_list_job_templates_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "transcoder_v1_generated_TranscoderService_ListJobTemplates_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "transcoder_v1_generated_transcoder_service_list_job_templates_sync.py" - }, - { - "canonical": true, - "clientMethod": { - "async": true, - "client": { - "fullName": "google.cloud.video.transcoder_v1.TranscoderServiceAsyncClient", - "shortName": "TranscoderServiceAsyncClient" - }, - "fullName": "google.cloud.video.transcoder_v1.TranscoderServiceAsyncClient.list_jobs", - "method": { - "fullName": "google.cloud.video.transcoder.v1.TranscoderService.ListJobs", - "service": { - "fullName": "google.cloud.video.transcoder.v1.TranscoderService", - "shortName": "TranscoderService" - }, - "shortName": "ListJobs" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.video.transcoder_v1.types.ListJobsRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.video.transcoder_v1.services.transcoder_service.pagers.ListJobsAsyncPager", - "shortName": "list_jobs" - }, - "description": "Sample for ListJobs", - "file": "transcoder_v1_generated_transcoder_service_list_jobs_async.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "transcoder_v1_generated_TranscoderService_ListJobs_async", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "transcoder_v1_generated_transcoder_service_list_jobs_async.py" - }, - { - "canonical": true, - "clientMethod": { - "client": { - "fullName": "google.cloud.video.transcoder_v1.TranscoderServiceClient", - "shortName": "TranscoderServiceClient" - }, - "fullName": "google.cloud.video.transcoder_v1.TranscoderServiceClient.list_jobs", - "method": { - "fullName": "google.cloud.video.transcoder.v1.TranscoderService.ListJobs", - "service": { - "fullName": "google.cloud.video.transcoder.v1.TranscoderService", - "shortName": "TranscoderService" - }, - "shortName": "ListJobs" - }, - "parameters": [ - { - "name": "request", - "type": "google.cloud.video.transcoder_v1.types.ListJobsRequest" - }, - { - "name": "parent", - "type": "str" - }, - { - "name": "retry", - "type": "google.api_core.retry.Retry" - }, - { - "name": "timeout", - "type": "float" - }, - { - "name": "metadata", - "type": "Sequence[Tuple[str, str]" - } - ], - "resultType": "google.cloud.video.transcoder_v1.services.transcoder_service.pagers.ListJobsPager", - "shortName": "list_jobs" - }, - "description": "Sample for ListJobs", - "file": "transcoder_v1_generated_transcoder_service_list_jobs_sync.py", - "language": "PYTHON", - "origin": "API_DEFINITION", - "regionTag": "transcoder_v1_generated_TranscoderService_ListJobs_sync", - "segments": [ - { - "end": 52, - "start": 27, - "type": "FULL" - }, - { - "end": 52, - "start": 27, - "type": "SHORT" - }, - { - "end": 40, - "start": 38, - "type": "CLIENT_INITIALIZATION" - }, - { - "end": 45, - "start": 41, - "type": "REQUEST_INITIALIZATION" - }, - { - "end": 48, - "start": 46, - "type": "REQUEST_EXECUTION" - }, - { - "end": 53, - "start": 49, - "type": "RESPONSE_HANDLING" - } - ], - "title": "transcoder_v1_generated_transcoder_service_list_jobs_sync.py" - } - ] -} diff --git a/owl-bot-staging/v1/samples/generated_samples/transcoder_v1_generated_transcoder_service_create_job_async.py b/owl-bot-staging/v1/samples/generated_samples/transcoder_v1_generated_transcoder_service_create_job_async.py deleted file mode 100644 index 521cfc2..0000000 --- a/owl-bot-staging/v1/samples/generated_samples/transcoder_v1_generated_transcoder_service_create_job_async.py +++ /dev/null @@ -1,56 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateJob -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-video-transcoder - - -# [START transcoder_v1_generated_TranscoderService_CreateJob_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud.video import transcoder_v1 - - -async def sample_create_job(): - # Create a client - client = transcoder_v1.TranscoderServiceAsyncClient() - - # Initialize request argument(s) - job = transcoder_v1.Job() - job.template_id = "template_id_value" - - request = transcoder_v1.CreateJobRequest( - parent="parent_value", - job=job, - ) - - # Make the request - response = await client.create_job(request=request) - - # Handle the response - print(response) - -# [END transcoder_v1_generated_TranscoderService_CreateJob_async] diff --git a/owl-bot-staging/v1/samples/generated_samples/transcoder_v1_generated_transcoder_service_create_job_sync.py b/owl-bot-staging/v1/samples/generated_samples/transcoder_v1_generated_transcoder_service_create_job_sync.py deleted file mode 100644 index 739f5e3..0000000 --- a/owl-bot-staging/v1/samples/generated_samples/transcoder_v1_generated_transcoder_service_create_job_sync.py +++ /dev/null @@ -1,56 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateJob -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-video-transcoder - - -# [START transcoder_v1_generated_TranscoderService_CreateJob_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud.video import transcoder_v1 - - -def sample_create_job(): - # Create a client - client = transcoder_v1.TranscoderServiceClient() - - # Initialize request argument(s) - job = transcoder_v1.Job() - job.template_id = "template_id_value" - - request = transcoder_v1.CreateJobRequest( - parent="parent_value", - job=job, - ) - - # Make the request - response = client.create_job(request=request) - - # Handle the response - print(response) - -# [END transcoder_v1_generated_TranscoderService_CreateJob_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/transcoder_v1_generated_transcoder_service_create_job_template_async.py b/owl-bot-staging/v1/samples/generated_samples/transcoder_v1_generated_transcoder_service_create_job_template_async.py deleted file mode 100644 index 609d3ae..0000000 --- a/owl-bot-staging/v1/samples/generated_samples/transcoder_v1_generated_transcoder_service_create_job_template_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateJobTemplate -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-video-transcoder - - -# [START transcoder_v1_generated_TranscoderService_CreateJobTemplate_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud.video import transcoder_v1 - - -async def sample_create_job_template(): - # Create a client - client = transcoder_v1.TranscoderServiceAsyncClient() - - # Initialize request argument(s) - request = transcoder_v1.CreateJobTemplateRequest( - parent="parent_value", - job_template_id="job_template_id_value", - ) - - # Make the request - response = await client.create_job_template(request=request) - - # Handle the response - print(response) - -# [END transcoder_v1_generated_TranscoderService_CreateJobTemplate_async] diff --git a/owl-bot-staging/v1/samples/generated_samples/transcoder_v1_generated_transcoder_service_create_job_template_sync.py b/owl-bot-staging/v1/samples/generated_samples/transcoder_v1_generated_transcoder_service_create_job_template_sync.py deleted file mode 100644 index 8c47c3c..0000000 --- a/owl-bot-staging/v1/samples/generated_samples/transcoder_v1_generated_transcoder_service_create_job_template_sync.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for CreateJobTemplate -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-video-transcoder - - -# [START transcoder_v1_generated_TranscoderService_CreateJobTemplate_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud.video import transcoder_v1 - - -def sample_create_job_template(): - # Create a client - client = transcoder_v1.TranscoderServiceClient() - - # Initialize request argument(s) - request = transcoder_v1.CreateJobTemplateRequest( - parent="parent_value", - job_template_id="job_template_id_value", - ) - - # Make the request - response = client.create_job_template(request=request) - - # Handle the response - print(response) - -# [END transcoder_v1_generated_TranscoderService_CreateJobTemplate_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/transcoder_v1_generated_transcoder_service_delete_job_async.py b/owl-bot-staging/v1/samples/generated_samples/transcoder_v1_generated_transcoder_service_delete_job_async.py deleted file mode 100644 index 60c9709..0000000 --- a/owl-bot-staging/v1/samples/generated_samples/transcoder_v1_generated_transcoder_service_delete_job_async.py +++ /dev/null @@ -1,50 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteJob -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-video-transcoder - - -# [START transcoder_v1_generated_TranscoderService_DeleteJob_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud.video import transcoder_v1 - - -async def sample_delete_job(): - # Create a client - client = transcoder_v1.TranscoderServiceAsyncClient() - - # Initialize request argument(s) - request = transcoder_v1.DeleteJobRequest( - name="name_value", - ) - - # Make the request - await client.delete_job(request=request) - - -# [END transcoder_v1_generated_TranscoderService_DeleteJob_async] diff --git a/owl-bot-staging/v1/samples/generated_samples/transcoder_v1_generated_transcoder_service_delete_job_sync.py b/owl-bot-staging/v1/samples/generated_samples/transcoder_v1_generated_transcoder_service_delete_job_sync.py deleted file mode 100644 index 5735b2e..0000000 --- a/owl-bot-staging/v1/samples/generated_samples/transcoder_v1_generated_transcoder_service_delete_job_sync.py +++ /dev/null @@ -1,50 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteJob -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-video-transcoder - - -# [START transcoder_v1_generated_TranscoderService_DeleteJob_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud.video import transcoder_v1 - - -def sample_delete_job(): - # Create a client - client = transcoder_v1.TranscoderServiceClient() - - # Initialize request argument(s) - request = transcoder_v1.DeleteJobRequest( - name="name_value", - ) - - # Make the request - client.delete_job(request=request) - - -# [END transcoder_v1_generated_TranscoderService_DeleteJob_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/transcoder_v1_generated_transcoder_service_delete_job_template_async.py b/owl-bot-staging/v1/samples/generated_samples/transcoder_v1_generated_transcoder_service_delete_job_template_async.py deleted file mode 100644 index 1168d4b..0000000 --- a/owl-bot-staging/v1/samples/generated_samples/transcoder_v1_generated_transcoder_service_delete_job_template_async.py +++ /dev/null @@ -1,50 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteJobTemplate -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-video-transcoder - - -# [START transcoder_v1_generated_TranscoderService_DeleteJobTemplate_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud.video import transcoder_v1 - - -async def sample_delete_job_template(): - # Create a client - client = transcoder_v1.TranscoderServiceAsyncClient() - - # Initialize request argument(s) - request = transcoder_v1.DeleteJobTemplateRequest( - name="name_value", - ) - - # Make the request - await client.delete_job_template(request=request) - - -# [END transcoder_v1_generated_TranscoderService_DeleteJobTemplate_async] diff --git a/owl-bot-staging/v1/samples/generated_samples/transcoder_v1_generated_transcoder_service_delete_job_template_sync.py b/owl-bot-staging/v1/samples/generated_samples/transcoder_v1_generated_transcoder_service_delete_job_template_sync.py deleted file mode 100644 index 2a9924e..0000000 --- a/owl-bot-staging/v1/samples/generated_samples/transcoder_v1_generated_transcoder_service_delete_job_template_sync.py +++ /dev/null @@ -1,50 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for DeleteJobTemplate -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-video-transcoder - - -# [START transcoder_v1_generated_TranscoderService_DeleteJobTemplate_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud.video import transcoder_v1 - - -def sample_delete_job_template(): - # Create a client - client = transcoder_v1.TranscoderServiceClient() - - # Initialize request argument(s) - request = transcoder_v1.DeleteJobTemplateRequest( - name="name_value", - ) - - # Make the request - client.delete_job_template(request=request) - - -# [END transcoder_v1_generated_TranscoderService_DeleteJobTemplate_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/transcoder_v1_generated_transcoder_service_get_job_async.py b/owl-bot-staging/v1/samples/generated_samples/transcoder_v1_generated_transcoder_service_get_job_async.py deleted file mode 100644 index 41e2bb5..0000000 --- a/owl-bot-staging/v1/samples/generated_samples/transcoder_v1_generated_transcoder_service_get_job_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetJob -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-video-transcoder - - -# [START transcoder_v1_generated_TranscoderService_GetJob_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud.video import transcoder_v1 - - -async def sample_get_job(): - # Create a client - client = transcoder_v1.TranscoderServiceAsyncClient() - - # Initialize request argument(s) - request = transcoder_v1.GetJobRequest( - name="name_value", - ) - - # Make the request - response = await client.get_job(request=request) - - # Handle the response - print(response) - -# [END transcoder_v1_generated_TranscoderService_GetJob_async] diff --git a/owl-bot-staging/v1/samples/generated_samples/transcoder_v1_generated_transcoder_service_get_job_sync.py b/owl-bot-staging/v1/samples/generated_samples/transcoder_v1_generated_transcoder_service_get_job_sync.py deleted file mode 100644 index dbfed87..0000000 --- a/owl-bot-staging/v1/samples/generated_samples/transcoder_v1_generated_transcoder_service_get_job_sync.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetJob -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-video-transcoder - - -# [START transcoder_v1_generated_TranscoderService_GetJob_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud.video import transcoder_v1 - - -def sample_get_job(): - # Create a client - client = transcoder_v1.TranscoderServiceClient() - - # Initialize request argument(s) - request = transcoder_v1.GetJobRequest( - name="name_value", - ) - - # Make the request - response = client.get_job(request=request) - - # Handle the response - print(response) - -# [END transcoder_v1_generated_TranscoderService_GetJob_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/transcoder_v1_generated_transcoder_service_get_job_template_async.py b/owl-bot-staging/v1/samples/generated_samples/transcoder_v1_generated_transcoder_service_get_job_template_async.py deleted file mode 100644 index 4360a61..0000000 --- a/owl-bot-staging/v1/samples/generated_samples/transcoder_v1_generated_transcoder_service_get_job_template_async.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetJobTemplate -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-video-transcoder - - -# [START transcoder_v1_generated_TranscoderService_GetJobTemplate_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud.video import transcoder_v1 - - -async def sample_get_job_template(): - # Create a client - client = transcoder_v1.TranscoderServiceAsyncClient() - - # Initialize request argument(s) - request = transcoder_v1.GetJobTemplateRequest( - name="name_value", - ) - - # Make the request - response = await client.get_job_template(request=request) - - # Handle the response - print(response) - -# [END transcoder_v1_generated_TranscoderService_GetJobTemplate_async] diff --git a/owl-bot-staging/v1/samples/generated_samples/transcoder_v1_generated_transcoder_service_get_job_template_sync.py b/owl-bot-staging/v1/samples/generated_samples/transcoder_v1_generated_transcoder_service_get_job_template_sync.py deleted file mode 100644 index 242fd23..0000000 --- a/owl-bot-staging/v1/samples/generated_samples/transcoder_v1_generated_transcoder_service_get_job_template_sync.py +++ /dev/null @@ -1,52 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for GetJobTemplate -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-video-transcoder - - -# [START transcoder_v1_generated_TranscoderService_GetJobTemplate_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud.video import transcoder_v1 - - -def sample_get_job_template(): - # Create a client - client = transcoder_v1.TranscoderServiceClient() - - # Initialize request argument(s) - request = transcoder_v1.GetJobTemplateRequest( - name="name_value", - ) - - # Make the request - response = client.get_job_template(request=request) - - # Handle the response - print(response) - -# [END transcoder_v1_generated_TranscoderService_GetJobTemplate_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/transcoder_v1_generated_transcoder_service_list_job_templates_async.py b/owl-bot-staging/v1/samples/generated_samples/transcoder_v1_generated_transcoder_service_list_job_templates_async.py deleted file mode 100644 index f1621bb..0000000 --- a/owl-bot-staging/v1/samples/generated_samples/transcoder_v1_generated_transcoder_service_list_job_templates_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListJobTemplates -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-video-transcoder - - -# [START transcoder_v1_generated_TranscoderService_ListJobTemplates_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud.video import transcoder_v1 - - -async def sample_list_job_templates(): - # Create a client - client = transcoder_v1.TranscoderServiceAsyncClient() - - # Initialize request argument(s) - request = transcoder_v1.ListJobTemplatesRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_job_templates(request=request) - - # Handle the response - async for response in page_result: - print(response) - -# [END transcoder_v1_generated_TranscoderService_ListJobTemplates_async] diff --git a/owl-bot-staging/v1/samples/generated_samples/transcoder_v1_generated_transcoder_service_list_job_templates_sync.py b/owl-bot-staging/v1/samples/generated_samples/transcoder_v1_generated_transcoder_service_list_job_templates_sync.py deleted file mode 100644 index 6a1b7d5..0000000 --- a/owl-bot-staging/v1/samples/generated_samples/transcoder_v1_generated_transcoder_service_list_job_templates_sync.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListJobTemplates -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-video-transcoder - - -# [START transcoder_v1_generated_TranscoderService_ListJobTemplates_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud.video import transcoder_v1 - - -def sample_list_job_templates(): - # Create a client - client = transcoder_v1.TranscoderServiceClient() - - # Initialize request argument(s) - request = transcoder_v1.ListJobTemplatesRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_job_templates(request=request) - - # Handle the response - for response in page_result: - print(response) - -# [END transcoder_v1_generated_TranscoderService_ListJobTemplates_sync] diff --git a/owl-bot-staging/v1/samples/generated_samples/transcoder_v1_generated_transcoder_service_list_jobs_async.py b/owl-bot-staging/v1/samples/generated_samples/transcoder_v1_generated_transcoder_service_list_jobs_async.py deleted file mode 100644 index a559323..0000000 --- a/owl-bot-staging/v1/samples/generated_samples/transcoder_v1_generated_transcoder_service_list_jobs_async.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListJobs -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-video-transcoder - - -# [START transcoder_v1_generated_TranscoderService_ListJobs_async] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud.video import transcoder_v1 - - -async def sample_list_jobs(): - # Create a client - client = transcoder_v1.TranscoderServiceAsyncClient() - - # Initialize request argument(s) - request = transcoder_v1.ListJobsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_jobs(request=request) - - # Handle the response - async for response in page_result: - print(response) - -# [END transcoder_v1_generated_TranscoderService_ListJobs_async] diff --git a/owl-bot-staging/v1/samples/generated_samples/transcoder_v1_generated_transcoder_service_list_jobs_sync.py b/owl-bot-staging/v1/samples/generated_samples/transcoder_v1_generated_transcoder_service_list_jobs_sync.py deleted file mode 100644 index badb770..0000000 --- a/owl-bot-staging/v1/samples/generated_samples/transcoder_v1_generated_transcoder_service_list_jobs_sync.py +++ /dev/null @@ -1,53 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# Generated code. DO NOT EDIT! -# -# Snippet for ListJobs -# NOTE: This snippet has been automatically generated for illustrative purposes only. -# It may require modifications to work in your environment. - -# To install the latest published package dependency, execute the following: -# python3 -m pip install google-cloud-video-transcoder - - -# [START transcoder_v1_generated_TranscoderService_ListJobs_sync] -# This snippet has been automatically generated and should be regarded as a -# code template only. -# It will require modifications to work: -# - It may require correct/in-range values for request initialization. -# - It may require specifying regional endpoints when creating the service -# client as shown in: -# https://googleapis.dev/python/google-api-core/latest/client_options.html -from google.cloud.video import transcoder_v1 - - -def sample_list_jobs(): - # Create a client - client = transcoder_v1.TranscoderServiceClient() - - # Initialize request argument(s) - request = transcoder_v1.ListJobsRequest( - parent="parent_value", - ) - - # Make the request - page_result = client.list_jobs(request=request) - - # Handle the response - for response in page_result: - print(response) - -# [END transcoder_v1_generated_TranscoderService_ListJobs_sync] diff --git a/owl-bot-staging/v1/scripts/fixup_transcoder_v1_keywords.py b/owl-bot-staging/v1/scripts/fixup_transcoder_v1_keywords.py deleted file mode 100644 index 667e582..0000000 --- a/owl-bot-staging/v1/scripts/fixup_transcoder_v1_keywords.py +++ /dev/null @@ -1,183 +0,0 @@ -#! /usr/bin/env python3 -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import argparse -import os -import libcst as cst -import pathlib -import sys -from typing import (Any, Callable, Dict, List, Sequence, Tuple) - - -def partition( - predicate: Callable[[Any], bool], - iterator: Sequence[Any] -) -> Tuple[List[Any], List[Any]]: - """A stable, out-of-place partition.""" - results = ([], []) - - for i in iterator: - results[int(predicate(i))].append(i) - - # Returns trueList, falseList - return results[1], results[0] - - -class transcoderCallTransformer(cst.CSTTransformer): - CTRL_PARAMS: Tuple[str] = ('retry', 'timeout', 'metadata') - METHOD_TO_PARAMS: Dict[str, Tuple[str]] = { - 'create_job': ('parent', 'job', ), - 'create_job_template': ('parent', 'job_template', 'job_template_id', ), - 'delete_job': ('name', 'allow_missing', ), - 'delete_job_template': ('name', 'allow_missing', ), - 'get_job': ('name', ), - 'get_job_template': ('name', ), - 'list_jobs': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), - 'list_job_templates': ('parent', 'page_size', 'page_token', 'filter', 'order_by', ), - } - - def leave_Call(self, original: cst.Call, updated: cst.Call) -> cst.CSTNode: - try: - key = original.func.attr.value - kword_params = self.METHOD_TO_PARAMS[key] - except (AttributeError, KeyError): - # Either not a method from the API or too convoluted to be sure. - return updated - - # If the existing code is valid, keyword args come after positional args. - # Therefore, all positional args must map to the first parameters. - args, kwargs = partition(lambda a: not bool(a.keyword), updated.args) - if any(k.keyword.value == "request" for k in kwargs): - # We've already fixed this file, don't fix it again. - return updated - - kwargs, ctrl_kwargs = partition( - lambda a: a.keyword.value not in self.CTRL_PARAMS, - kwargs - ) - - args, ctrl_args = args[:len(kword_params)], args[len(kword_params):] - ctrl_kwargs.extend(cst.Arg(value=a.value, keyword=cst.Name(value=ctrl)) - for a, ctrl in zip(ctrl_args, self.CTRL_PARAMS)) - - request_arg = cst.Arg( - value=cst.Dict([ - cst.DictElement( - cst.SimpleString("'{}'".format(name)), -cst.Element(value=arg.value) - ) - # Note: the args + kwargs looks silly, but keep in mind that - # the control parameters had to be stripped out, and that - # those could have been passed positionally or by keyword. - for name, arg in zip(kword_params, args + kwargs)]), - keyword=cst.Name("request") - ) - - return updated.with_changes( - args=[request_arg] + ctrl_kwargs - ) - - -def fix_files( - in_dir: pathlib.Path, - out_dir: pathlib.Path, - *, - transformer=transcoderCallTransformer(), -): - """Duplicate the input dir to the output dir, fixing file method calls. - - Preconditions: - * in_dir is a real directory - * out_dir is a real, empty directory - """ - pyfile_gen = ( - pathlib.Path(os.path.join(root, f)) - for root, _, files in os.walk(in_dir) - for f in files if os.path.splitext(f)[1] == ".py" - ) - - for fpath in pyfile_gen: - with open(fpath, 'r') as f: - src = f.read() - - # Parse the code and insert method call fixes. - tree = cst.parse_module(src) - updated = tree.visit(transformer) - - # Create the path and directory structure for the new file. - updated_path = out_dir.joinpath(fpath.relative_to(in_dir)) - updated_path.parent.mkdir(parents=True, exist_ok=True) - - # Generate the updated source file at the corresponding path. - with open(updated_path, 'w') as f: - f.write(updated.code) - - -if __name__ == '__main__': - parser = argparse.ArgumentParser( - description="""Fix up source that uses the transcoder client library. - -The existing sources are NOT overwritten but are copied to output_dir with changes made. - -Note: This tool operates at a best-effort level at converting positional - parameters in client method calls to keyword based parameters. - Cases where it WILL FAIL include - A) * or ** expansion in a method call. - B) Calls via function or method alias (includes free function calls) - C) Indirect or dispatched calls (e.g. the method is looked up dynamically) - - These all constitute false negatives. The tool will also detect false - positives when an API method shares a name with another method. -""") - parser.add_argument( - '-d', - '--input-directory', - required=True, - dest='input_dir', - help='the input directory to walk for python files to fix up', - ) - parser.add_argument( - '-o', - '--output-directory', - required=True, - dest='output_dir', - help='the directory to output files fixed via un-flattening', - ) - args = parser.parse_args() - input_dir = pathlib.Path(args.input_dir) - output_dir = pathlib.Path(args.output_dir) - if not input_dir.is_dir(): - print( - f"input directory '{input_dir}' does not exist or is not a directory", - file=sys.stderr, - ) - sys.exit(-1) - - if not output_dir.is_dir(): - print( - f"output directory '{output_dir}' does not exist or is not a directory", - file=sys.stderr, - ) - sys.exit(-1) - - if os.listdir(output_dir): - print( - f"output directory '{output_dir}' is not empty", - file=sys.stderr, - ) - sys.exit(-1) - - fix_files(input_dir, output_dir) diff --git a/owl-bot-staging/v1/setup.py b/owl-bot-staging/v1/setup.py deleted file mode 100644 index b2a3a81..0000000 --- a/owl-bot-staging/v1/setup.py +++ /dev/null @@ -1,92 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import io -import os - -import setuptools # type: ignore - -package_root = os.path.abspath(os.path.dirname(__file__)) - -name = 'google-cloud-video-transcoder' - - -description = "Google Cloud Video Transcoder API client library" - -version = {} -with open(os.path.join(package_root, 'google/cloud/video/transcoder/gapic_version.py')) as fp: - exec(fp.read(), version) -version = version["__version__"] - -if version[0] == "0": - release_status = "Development Status :: 4 - Beta" -else: - release_status = "Development Status :: 5 - Production/Stable" - -dependencies = [ - "google-api-core[grpc] >= 1.34.0, <3.0.0dev,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,!=2.10.*", - "proto-plus >= 1.22.0, <2.0.0dev", - "proto-plus >= 1.22.2, <2.0.0dev; python_version>='3.11'", - "protobuf>=3.19.5,<5.0.0dev,!=3.20.0,!=3.20.1,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", -] -url = "https://github.com/googleapis/python-video-transcoder" - -package_root = os.path.abspath(os.path.dirname(__file__)) - -readme_filename = os.path.join(package_root, "README.rst") -with io.open(readme_filename, encoding="utf-8") as readme_file: - readme = readme_file.read() - -packages = [ - package - for package in setuptools.PEP420PackageFinder.find() - if package.startswith("google") -] - -namespaces = ["google"] -if "google.cloud" in packages: - namespaces.append("google.cloud") - -setuptools.setup( - name=name, - version=version, - description=description, - long_description=readme, - author="Google LLC", - author_email="googleapis-packages@google.com", - license="Apache 2.0", - url=url, - classifiers=[ - release_status, - "Intended Audience :: Developers", - "License :: OSI Approved :: Apache Software License", - "Programming Language :: Python", - "Programming Language :: Python :: 3", - "Programming Language :: Python :: 3.7", - "Programming Language :: Python :: 3.8", - "Programming Language :: Python :: 3.9", - "Programming Language :: Python :: 3.10", - "Programming Language :: Python :: 3.11", - "Operating System :: OS Independent", - "Topic :: Internet", - ], - platforms="Posix; MacOS X; Windows", - packages=packages, - python_requires=">=3.7", - namespace_packages=namespaces, - install_requires=dependencies, - include_package_data=True, - zip_safe=False, -) diff --git a/owl-bot-staging/v1/testing/constraints-3.10.txt b/owl-bot-staging/v1/testing/constraints-3.10.txt deleted file mode 100644 index ed7f9ae..0000000 --- a/owl-bot-staging/v1/testing/constraints-3.10.txt +++ /dev/null @@ -1,6 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf diff --git a/owl-bot-staging/v1/testing/constraints-3.11.txt b/owl-bot-staging/v1/testing/constraints-3.11.txt deleted file mode 100644 index ed7f9ae..0000000 --- a/owl-bot-staging/v1/testing/constraints-3.11.txt +++ /dev/null @@ -1,6 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf diff --git a/owl-bot-staging/v1/testing/constraints-3.7.txt b/owl-bot-staging/v1/testing/constraints-3.7.txt deleted file mode 100644 index 6c44adf..0000000 --- a/owl-bot-staging/v1/testing/constraints-3.7.txt +++ /dev/null @@ -1,9 +0,0 @@ -# This constraints file is used to check that lower bounds -# are correct in setup.py -# List all library dependencies and extras in this file. -# Pin the version to the lower bound. -# e.g., if setup.py has "google-cloud-foo >= 1.14.0, < 2.0.0dev", -# Then this file should have google-cloud-foo==1.14.0 -google-api-core==1.34.0 -proto-plus==1.22.0 -protobuf==3.19.5 diff --git a/owl-bot-staging/v1/testing/constraints-3.8.txt b/owl-bot-staging/v1/testing/constraints-3.8.txt deleted file mode 100644 index ed7f9ae..0000000 --- a/owl-bot-staging/v1/testing/constraints-3.8.txt +++ /dev/null @@ -1,6 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf diff --git a/owl-bot-staging/v1/testing/constraints-3.9.txt b/owl-bot-staging/v1/testing/constraints-3.9.txt deleted file mode 100644 index ed7f9ae..0000000 --- a/owl-bot-staging/v1/testing/constraints-3.9.txt +++ /dev/null @@ -1,6 +0,0 @@ -# -*- coding: utf-8 -*- -# This constraints file is required for unit tests. -# List all library dependencies and extras in this file. -google-api-core -proto-plus -protobuf diff --git a/owl-bot-staging/v1/tests/__init__.py b/owl-bot-staging/v1/tests/__init__.py deleted file mode 100644 index 231bc12..0000000 --- a/owl-bot-staging/v1/tests/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ - -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/v1/tests/unit/__init__.py b/owl-bot-staging/v1/tests/unit/__init__.py deleted file mode 100644 index 231bc12..0000000 --- a/owl-bot-staging/v1/tests/unit/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ - -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/v1/tests/unit/gapic/__init__.py b/owl-bot-staging/v1/tests/unit/gapic/__init__.py deleted file mode 100644 index 231bc12..0000000 --- a/owl-bot-staging/v1/tests/unit/gapic/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ - -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/v1/tests/unit/gapic/transcoder_v1/__init__.py b/owl-bot-staging/v1/tests/unit/gapic/transcoder_v1/__init__.py deleted file mode 100644 index 231bc12..0000000 --- a/owl-bot-staging/v1/tests/unit/gapic/transcoder_v1/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ - -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# diff --git a/owl-bot-staging/v1/tests/unit/gapic/transcoder_v1/test_transcoder_service.py b/owl-bot-staging/v1/tests/unit/gapic/transcoder_v1/test_transcoder_service.py deleted file mode 100644 index e714ff7..0000000 --- a/owl-bot-staging/v1/tests/unit/gapic/transcoder_v1/test_transcoder_service.py +++ /dev/null @@ -1,3484 +0,0 @@ -# -*- coding: utf-8 -*- -# Copyright 2022 Google LLC -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -import os -# try/except added for compatibility with python < 3.8 -try: - from unittest import mock - from unittest.mock import AsyncMock # pragma: NO COVER -except ImportError: # pragma: NO COVER - import mock - -import grpc -from grpc.experimental import aio -import math -import pytest -from proto.marshal.rules.dates import DurationRule, TimestampRule -from proto.marshal.rules import wrappers - -from google.api_core import client_options -from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import grpc_helpers -from google.api_core import grpc_helpers_async -from google.api_core import path_template -from google.auth import credentials as ga_credentials -from google.auth.exceptions import MutualTLSChannelError -from google.cloud.video.transcoder_v1.services.transcoder_service import TranscoderServiceAsyncClient -from google.cloud.video.transcoder_v1.services.transcoder_service import TranscoderServiceClient -from google.cloud.video.transcoder_v1.services.transcoder_service import pagers -from google.cloud.video.transcoder_v1.services.transcoder_service import transports -from google.cloud.video.transcoder_v1.types import resources -from google.cloud.video.transcoder_v1.types import services -from google.oauth2 import service_account -from google.protobuf import any_pb2 # type: ignore -from google.protobuf import duration_pb2 # type: ignore -from google.protobuf import timestamp_pb2 # type: ignore -from google.rpc import status_pb2 # type: ignore -import google.auth - - -def client_cert_source_callback(): - return b"cert bytes", b"key bytes" - - -# If default endpoint is localhost, then default mtls endpoint will be the same. -# This method modifies the default endpoint so the client can produce a different -# mtls endpoint for endpoint testing purposes. -def modify_default_endpoint(client): - return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT - - -def test__get_default_mtls_endpoint(): - api_endpoint = "example.googleapis.com" - api_mtls_endpoint = "example.mtls.googleapis.com" - sandbox_endpoint = "example.sandbox.googleapis.com" - sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com" - non_googleapi = "api.example.com" - - assert TranscoderServiceClient._get_default_mtls_endpoint(None) is None - assert TranscoderServiceClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint - assert TranscoderServiceClient._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint - assert TranscoderServiceClient._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint - assert TranscoderServiceClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint - assert TranscoderServiceClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi - - -@pytest.mark.parametrize("client_class,transport_name", [ - (TranscoderServiceClient, "grpc"), - (TranscoderServiceAsyncClient, "grpc_asyncio"), -]) -def test_transcoder_service_client_from_service_account_info(client_class, transport_name): - creds = ga_credentials.AnonymousCredentials() - with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory: - factory.return_value = creds - info = {"valid": True} - client = client_class.from_service_account_info(info, transport=transport_name) - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - assert client.transport._host == ( - 'transcoder.googleapis.com:443' - ) - - -@pytest.mark.parametrize("transport_class,transport_name", [ - (transports.TranscoderServiceGrpcTransport, "grpc"), - (transports.TranscoderServiceGrpcAsyncIOTransport, "grpc_asyncio"), -]) -def test_transcoder_service_client_service_account_always_use_jwt(transport_class, transport_name): - with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: - creds = service_account.Credentials(None, None, None) - transport = transport_class(credentials=creds, always_use_jwt_access=True) - use_jwt.assert_called_once_with(True) - - with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt: - creds = service_account.Credentials(None, None, None) - transport = transport_class(credentials=creds, always_use_jwt_access=False) - use_jwt.assert_not_called() - - -@pytest.mark.parametrize("client_class,transport_name", [ - (TranscoderServiceClient, "grpc"), - (TranscoderServiceAsyncClient, "grpc_asyncio"), -]) -def test_transcoder_service_client_from_service_account_file(client_class, transport_name): - creds = ga_credentials.AnonymousCredentials() - with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory: - factory.return_value = creds - client = client_class.from_service_account_file("dummy/file/path.json", transport=transport_name) - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - client = client_class.from_service_account_json("dummy/file/path.json", transport=transport_name) - assert client.transport._credentials == creds - assert isinstance(client, client_class) - - assert client.transport._host == ( - 'transcoder.googleapis.com:443' - ) - - -def test_transcoder_service_client_get_transport_class(): - transport = TranscoderServiceClient.get_transport_class() - available_transports = [ - transports.TranscoderServiceGrpcTransport, - ] - assert transport in available_transports - - transport = TranscoderServiceClient.get_transport_class("grpc") - assert transport == transports.TranscoderServiceGrpcTransport - - -@pytest.mark.parametrize("client_class,transport_class,transport_name", [ - (TranscoderServiceClient, transports.TranscoderServiceGrpcTransport, "grpc"), - (TranscoderServiceAsyncClient, transports.TranscoderServiceGrpcAsyncIOTransport, "grpc_asyncio"), -]) -@mock.patch.object(TranscoderServiceClient, "DEFAULT_ENDPOINT", modify_default_endpoint(TranscoderServiceClient)) -@mock.patch.object(TranscoderServiceAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(TranscoderServiceAsyncClient)) -def test_transcoder_service_client_client_options(client_class, transport_class, transport_name): - # Check that if channel is provided we won't create a new one. - with mock.patch.object(TranscoderServiceClient, 'get_transport_class') as gtc: - transport = transport_class( - credentials=ga_credentials.AnonymousCredentials() - ) - client = client_class(transport=transport) - gtc.assert_not_called() - - # Check that if channel is provided via str we will create a new one. - with mock.patch.object(TranscoderServiceClient, 'get_transport_class') as gtc: - client = client_class(transport=transport_name) - gtc.assert_called() - - # Check the case api_endpoint is provided. - options = client_options.ClientOptions(api_endpoint="squid.clam.whelk") - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(transport=transport_name, client_options=options) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host="squid.clam.whelk", - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is - # "never". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is - # "always". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_MTLS_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has - # unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}): - with pytest.raises(MutualTLSChannelError): - client = client_class(transport=transport_name) - - # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}): - with pytest.raises(ValueError): - client = client_class(transport=transport_name) - - # Check the case quota_project_id is provided - options = client_options.ClientOptions(quota_project_id="octopus") - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id="octopus", - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - # Check the case api_endpoint is provided - options = client_options.ClientOptions(api_audience="https://language.googleapis.com") - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience="https://language.googleapis.com" - ) - -@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [ - (TranscoderServiceClient, transports.TranscoderServiceGrpcTransport, "grpc", "true"), - (TranscoderServiceAsyncClient, transports.TranscoderServiceGrpcAsyncIOTransport, "grpc_asyncio", "true"), - (TranscoderServiceClient, transports.TranscoderServiceGrpcTransport, "grpc", "false"), - (TranscoderServiceAsyncClient, transports.TranscoderServiceGrpcAsyncIOTransport, "grpc_asyncio", "false"), -]) -@mock.patch.object(TranscoderServiceClient, "DEFAULT_ENDPOINT", modify_default_endpoint(TranscoderServiceClient)) -@mock.patch.object(TranscoderServiceAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(TranscoderServiceAsyncClient)) -@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"}) -def test_transcoder_service_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env): - # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default - # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists. - - # Check the case client_cert_source is provided. Whether client cert is used depends on - # GOOGLE_API_USE_CLIENT_CERTIFICATE value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - options = client_options.ClientOptions(client_cert_source=client_cert_source_callback) - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - - if use_client_cert_env == "false": - expected_client_cert_source = None - expected_host = client.DEFAULT_ENDPOINT - else: - expected_client_cert_source = client_cert_source_callback - expected_host = client.DEFAULT_MTLS_ENDPOINT - - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=expected_host, - scopes=None, - client_cert_source_for_mtls=expected_client_cert_source, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case ADC client cert is provided. Whether client cert is used depends on - # GOOGLE_API_USE_CLIENT_CERTIFICATE value. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - with mock.patch.object(transport_class, '__init__') as patched: - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback): - if use_client_cert_env == "false": - expected_host = client.DEFAULT_ENDPOINT - expected_client_cert_source = None - else: - expected_host = client.DEFAULT_MTLS_ENDPOINT - expected_client_cert_source = client_cert_source_callback - - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=expected_host, - scopes=None, - client_cert_source_for_mtls=expected_client_cert_source, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # Check the case client_cert_source and ADC client cert are not provided. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}): - with mock.patch.object(transport_class, '__init__') as patched: - with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False): - patched.return_value = None - client = client_class(transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - -@pytest.mark.parametrize("client_class", [ - TranscoderServiceClient, TranscoderServiceAsyncClient -]) -@mock.patch.object(TranscoderServiceClient, "DEFAULT_ENDPOINT", modify_default_endpoint(TranscoderServiceClient)) -@mock.patch.object(TranscoderServiceAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(TranscoderServiceAsyncClient)) -def test_transcoder_service_client_get_mtls_endpoint_and_cert_source(client_class): - mock_client_cert_source = mock.Mock() - - # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "true". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - mock_api_endpoint = "foo" - options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) - assert api_endpoint == mock_api_endpoint - assert cert_source == mock_client_cert_source - - # Test the case GOOGLE_API_USE_CLIENT_CERTIFICATE is "false". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "false"}): - mock_client_cert_source = mock.Mock() - mock_api_endpoint = "foo" - options = client_options.ClientOptions(client_cert_source=mock_client_cert_source, api_endpoint=mock_api_endpoint) - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source(options) - assert api_endpoint == mock_api_endpoint - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "never". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_ENDPOINT - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "always". - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert doesn't exist. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=False): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_ENDPOINT - assert cert_source is None - - # Test the case GOOGLE_API_USE_MTLS_ENDPOINT is "auto" and default cert exists. - with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "true"}): - with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True): - with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=mock_client_cert_source): - api_endpoint, cert_source = client_class.get_mtls_endpoint_and_cert_source() - assert api_endpoint == client_class.DEFAULT_MTLS_ENDPOINT - assert cert_source == mock_client_cert_source - - -@pytest.mark.parametrize("client_class,transport_class,transport_name", [ - (TranscoderServiceClient, transports.TranscoderServiceGrpcTransport, "grpc"), - (TranscoderServiceAsyncClient, transports.TranscoderServiceGrpcAsyncIOTransport, "grpc_asyncio"), -]) -def test_transcoder_service_client_client_options_scopes(client_class, transport_class, transport_name): - # Check the case scopes are provided. - options = client_options.ClientOptions( - scopes=["1", "2"], - ) - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file=None, - host=client.DEFAULT_ENDPOINT, - scopes=["1", "2"], - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - -@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ - (TranscoderServiceClient, transports.TranscoderServiceGrpcTransport, "grpc", grpc_helpers), - (TranscoderServiceAsyncClient, transports.TranscoderServiceGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), -]) -def test_transcoder_service_client_client_options_credentials_file(client_class, transport_class, transport_name, grpc_helpers): - # Check the case credentials file is provided. - options = client_options.ClientOptions( - credentials_file="credentials.json" - ) - - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file="credentials.json", - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - -def test_transcoder_service_client_client_options_from_dict(): - with mock.patch('google.cloud.video.transcoder_v1.services.transcoder_service.transports.TranscoderServiceGrpcTransport.__init__') as grpc_transport: - grpc_transport.return_value = None - client = TranscoderServiceClient( - client_options={'api_endpoint': 'squid.clam.whelk'} - ) - grpc_transport.assert_called_once_with( - credentials=None, - credentials_file=None, - host="squid.clam.whelk", - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - -@pytest.mark.parametrize("client_class,transport_class,transport_name,grpc_helpers", [ - (TranscoderServiceClient, transports.TranscoderServiceGrpcTransport, "grpc", grpc_helpers), - (TranscoderServiceAsyncClient, transports.TranscoderServiceGrpcAsyncIOTransport, "grpc_asyncio", grpc_helpers_async), -]) -def test_transcoder_service_client_create_channel_credentials_file(client_class, transport_class, transport_name, grpc_helpers): - # Check the case credentials file is provided. - options = client_options.ClientOptions( - credentials_file="credentials.json" - ) - - with mock.patch.object(transport_class, '__init__') as patched: - patched.return_value = None - client = client_class(client_options=options, transport=transport_name) - patched.assert_called_once_with( - credentials=None, - credentials_file="credentials.json", - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) - - # test that the credentials from file are saved and used as the credentials. - with mock.patch.object( - google.auth, "load_credentials_from_file", autospec=True - ) as load_creds, mock.patch.object( - google.auth, "default", autospec=True - ) as adc, mock.patch.object( - grpc_helpers, "create_channel" - ) as create_channel: - creds = ga_credentials.AnonymousCredentials() - file_creds = ga_credentials.AnonymousCredentials() - load_creds.return_value = (file_creds, None) - adc.return_value = (creds, None) - client = client_class(client_options=options, transport=transport_name) - create_channel.assert_called_with( - "transcoder.googleapis.com:443", - credentials=file_creds, - credentials_file=None, - quota_project_id=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', -), - scopes=None, - default_host="transcoder.googleapis.com", - ssl_credentials=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - -@pytest.mark.parametrize("request_type", [ - services.CreateJobRequest, - dict, -]) -def test_create_job(request_type, transport: str = 'grpc'): - client = TranscoderServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_job), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = resources.Job( - name='name_value', - input_uri='input_uri_value', - output_uri='output_uri_value', - state=resources.Job.ProcessingState.PENDING, - ttl_after_completion_days=2670, - template_id='template_id_value', - ) - response = client.create_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == services.CreateJobRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, resources.Job) - assert response.name == 'name_value' - assert response.input_uri == 'input_uri_value' - assert response.output_uri == 'output_uri_value' - assert response.state == resources.Job.ProcessingState.PENDING - assert response.ttl_after_completion_days == 2670 - - -def test_create_job_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = TranscoderServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_job), - '__call__') as call: - client.create_job() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == services.CreateJobRequest() - -@pytest.mark.asyncio -async def test_create_job_async(transport: str = 'grpc_asyncio', request_type=services.CreateJobRequest): - client = TranscoderServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_job), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(resources.Job( - name='name_value', - input_uri='input_uri_value', - output_uri='output_uri_value', - state=resources.Job.ProcessingState.PENDING, - ttl_after_completion_days=2670, - )) - response = await client.create_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == services.CreateJobRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, resources.Job) - assert response.name == 'name_value' - assert response.input_uri == 'input_uri_value' - assert response.output_uri == 'output_uri_value' - assert response.state == resources.Job.ProcessingState.PENDING - assert response.ttl_after_completion_days == 2670 - - -@pytest.mark.asyncio -async def test_create_job_async_from_dict(): - await test_create_job_async(request_type=dict) - - -def test_create_job_field_headers(): - client = TranscoderServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = services.CreateJobRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_job), - '__call__') as call: - call.return_value = resources.Job() - client.create_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_create_job_field_headers_async(): - client = TranscoderServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = services.CreateJobRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_job), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.Job()) - await client.create_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_create_job_flattened(): - client = TranscoderServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_job), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = resources.Job() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.create_job( - parent='parent_value', - job=resources.Job(name='name_value'), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - arg = args[0].job - mock_val = resources.Job(name='name_value') - assert arg == mock_val - - -def test_create_job_flattened_error(): - client = TranscoderServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_job( - services.CreateJobRequest(), - parent='parent_value', - job=resources.Job(name='name_value'), - ) - -@pytest.mark.asyncio -async def test_create_job_flattened_async(): - client = TranscoderServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_job), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = resources.Job() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.Job()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.create_job( - parent='parent_value', - job=resources.Job(name='name_value'), - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - arg = args[0].job - mock_val = resources.Job(name='name_value') - assert arg == mock_val - -@pytest.mark.asyncio -async def test_create_job_flattened_error_async(): - client = TranscoderServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.create_job( - services.CreateJobRequest(), - parent='parent_value', - job=resources.Job(name='name_value'), - ) - - -@pytest.mark.parametrize("request_type", [ - services.ListJobsRequest, - dict, -]) -def test_list_jobs(request_type, transport: str = 'grpc'): - client = TranscoderServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_jobs), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = services.ListJobsResponse( - next_page_token='next_page_token_value', - unreachable=['unreachable_value'], - ) - response = client.list_jobs(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == services.ListJobsRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListJobsPager) - assert response.next_page_token == 'next_page_token_value' - assert response.unreachable == ['unreachable_value'] - - -def test_list_jobs_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = TranscoderServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_jobs), - '__call__') as call: - client.list_jobs() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == services.ListJobsRequest() - -@pytest.mark.asyncio -async def test_list_jobs_async(transport: str = 'grpc_asyncio', request_type=services.ListJobsRequest): - client = TranscoderServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_jobs), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(services.ListJobsResponse( - next_page_token='next_page_token_value', - unreachable=['unreachable_value'], - )) - response = await client.list_jobs(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == services.ListJobsRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListJobsAsyncPager) - assert response.next_page_token == 'next_page_token_value' - assert response.unreachable == ['unreachable_value'] - - -@pytest.mark.asyncio -async def test_list_jobs_async_from_dict(): - await test_list_jobs_async(request_type=dict) - - -def test_list_jobs_field_headers(): - client = TranscoderServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = services.ListJobsRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_jobs), - '__call__') as call: - call.return_value = services.ListJobsResponse() - client.list_jobs(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_list_jobs_field_headers_async(): - client = TranscoderServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = services.ListJobsRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_jobs), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(services.ListJobsResponse()) - await client.list_jobs(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_list_jobs_flattened(): - client = TranscoderServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_jobs), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = services.ListJobsResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list_jobs( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - - -def test_list_jobs_flattened_error(): - client = TranscoderServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_jobs( - services.ListJobsRequest(), - parent='parent_value', - ) - -@pytest.mark.asyncio -async def test_list_jobs_flattened_async(): - client = TranscoderServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_jobs), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = services.ListJobsResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(services.ListJobsResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.list_jobs( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_list_jobs_flattened_error_async(): - client = TranscoderServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.list_jobs( - services.ListJobsRequest(), - parent='parent_value', - ) - - -def test_list_jobs_pager(transport_name: str = "grpc"): - client = TranscoderServiceClient( - credentials=ga_credentials.AnonymousCredentials, - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_jobs), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - services.ListJobsResponse( - jobs=[ - resources.Job(), - resources.Job(), - resources.Job(), - ], - next_page_token='abc', - ), - services.ListJobsResponse( - jobs=[], - next_page_token='def', - ), - services.ListJobsResponse( - jobs=[ - resources.Job(), - ], - next_page_token='ghi', - ), - services.ListJobsResponse( - jobs=[ - resources.Job(), - resources.Job(), - ], - ), - RuntimeError, - ) - - metadata = () - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ('parent', ''), - )), - ) - pager = client.list_jobs(request={}) - - assert pager._metadata == metadata - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, resources.Job) - for i in results) -def test_list_jobs_pages(transport_name: str = "grpc"): - client = TranscoderServiceClient( - credentials=ga_credentials.AnonymousCredentials, - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_jobs), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - services.ListJobsResponse( - jobs=[ - resources.Job(), - resources.Job(), - resources.Job(), - ], - next_page_token='abc', - ), - services.ListJobsResponse( - jobs=[], - next_page_token='def', - ), - services.ListJobsResponse( - jobs=[ - resources.Job(), - ], - next_page_token='ghi', - ), - services.ListJobsResponse( - jobs=[ - resources.Job(), - resources.Job(), - ], - ), - RuntimeError, - ) - pages = list(client.list_jobs(request={}).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.asyncio -async def test_list_jobs_async_pager(): - client = TranscoderServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_jobs), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - services.ListJobsResponse( - jobs=[ - resources.Job(), - resources.Job(), - resources.Job(), - ], - next_page_token='abc', - ), - services.ListJobsResponse( - jobs=[], - next_page_token='def', - ), - services.ListJobsResponse( - jobs=[ - resources.Job(), - ], - next_page_token='ghi', - ), - services.ListJobsResponse( - jobs=[ - resources.Job(), - resources.Job(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_jobs(request={},) - assert async_pager.next_page_token == 'abc' - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, resources.Job) - for i in responses) - - -@pytest.mark.asyncio -async def test_list_jobs_async_pages(): - client = TranscoderServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_jobs), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - services.ListJobsResponse( - jobs=[ - resources.Job(), - resources.Job(), - resources.Job(), - ], - next_page_token='abc', - ), - services.ListJobsResponse( - jobs=[], - next_page_token='def', - ), - services.ListJobsResponse( - jobs=[ - resources.Job(), - ], - next_page_token='ghi', - ), - services.ListJobsResponse( - jobs=[ - resources.Job(), - resources.Job(), - ], - ), - RuntimeError, - ) - pages = [] - async for page_ in (await client.list_jobs(request={})).pages: # pragma: no branch - pages.append(page_) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.parametrize("request_type", [ - services.GetJobRequest, - dict, -]) -def test_get_job(request_type, transport: str = 'grpc'): - client = TranscoderServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_job), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = resources.Job( - name='name_value', - input_uri='input_uri_value', - output_uri='output_uri_value', - state=resources.Job.ProcessingState.PENDING, - ttl_after_completion_days=2670, - template_id='template_id_value', - ) - response = client.get_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == services.GetJobRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, resources.Job) - assert response.name == 'name_value' - assert response.input_uri == 'input_uri_value' - assert response.output_uri == 'output_uri_value' - assert response.state == resources.Job.ProcessingState.PENDING - assert response.ttl_after_completion_days == 2670 - - -def test_get_job_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = TranscoderServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_job), - '__call__') as call: - client.get_job() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == services.GetJobRequest() - -@pytest.mark.asyncio -async def test_get_job_async(transport: str = 'grpc_asyncio', request_type=services.GetJobRequest): - client = TranscoderServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_job), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(resources.Job( - name='name_value', - input_uri='input_uri_value', - output_uri='output_uri_value', - state=resources.Job.ProcessingState.PENDING, - ttl_after_completion_days=2670, - )) - response = await client.get_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == services.GetJobRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, resources.Job) - assert response.name == 'name_value' - assert response.input_uri == 'input_uri_value' - assert response.output_uri == 'output_uri_value' - assert response.state == resources.Job.ProcessingState.PENDING - assert response.ttl_after_completion_days == 2670 - - -@pytest.mark.asyncio -async def test_get_job_async_from_dict(): - await test_get_job_async(request_type=dict) - - -def test_get_job_field_headers(): - client = TranscoderServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = services.GetJobRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_job), - '__call__') as call: - call.return_value = resources.Job() - client.get_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_get_job_field_headers_async(): - client = TranscoderServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = services.GetJobRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_job), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.Job()) - await client.get_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_get_job_flattened(): - client = TranscoderServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_job), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = resources.Job() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get_job( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_get_job_flattened_error(): - client = TranscoderServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_job( - services.GetJobRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_get_job_flattened_async(): - client = TranscoderServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_job), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = resources.Job() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.Job()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.get_job( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_get_job_flattened_error_async(): - client = TranscoderServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.get_job( - services.GetJobRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - services.DeleteJobRequest, - dict, -]) -def test_delete_job(request_type, transport: str = 'grpc'): - client = TranscoderServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_job), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.delete_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == services.DeleteJobRequest() - - # Establish that the response is the type that we expect. - assert response is None - - -def test_delete_job_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = TranscoderServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_job), - '__call__') as call: - client.delete_job() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == services.DeleteJobRequest() - -@pytest.mark.asyncio -async def test_delete_job_async(transport: str = 'grpc_asyncio', request_type=services.DeleteJobRequest): - client = TranscoderServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_job), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == services.DeleteJobRequest() - - # Establish that the response is the type that we expect. - assert response is None - - -@pytest.mark.asyncio -async def test_delete_job_async_from_dict(): - await test_delete_job_async(request_type=dict) - - -def test_delete_job_field_headers(): - client = TranscoderServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = services.DeleteJobRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_job), - '__call__') as call: - call.return_value = None - client.delete_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_delete_job_field_headers_async(): - client = TranscoderServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = services.DeleteJobRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_job), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_job(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_delete_job_flattened(): - client = TranscoderServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_job), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.delete_job( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_delete_job_flattened_error(): - client = TranscoderServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_job( - services.DeleteJobRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_delete_job_flattened_async(): - client = TranscoderServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_job), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.delete_job( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_delete_job_flattened_error_async(): - client = TranscoderServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.delete_job( - services.DeleteJobRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - services.CreateJobTemplateRequest, - dict, -]) -def test_create_job_template(request_type, transport: str = 'grpc'): - client = TranscoderServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_job_template), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = resources.JobTemplate( - name='name_value', - ) - response = client.create_job_template(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == services.CreateJobTemplateRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, resources.JobTemplate) - assert response.name == 'name_value' - - -def test_create_job_template_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = TranscoderServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_job_template), - '__call__') as call: - client.create_job_template() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == services.CreateJobTemplateRequest() - -@pytest.mark.asyncio -async def test_create_job_template_async(transport: str = 'grpc_asyncio', request_type=services.CreateJobTemplateRequest): - client = TranscoderServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_job_template), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(resources.JobTemplate( - name='name_value', - )) - response = await client.create_job_template(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == services.CreateJobTemplateRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, resources.JobTemplate) - assert response.name == 'name_value' - - -@pytest.mark.asyncio -async def test_create_job_template_async_from_dict(): - await test_create_job_template_async(request_type=dict) - - -def test_create_job_template_field_headers(): - client = TranscoderServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = services.CreateJobTemplateRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_job_template), - '__call__') as call: - call.return_value = resources.JobTemplate() - client.create_job_template(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_create_job_template_field_headers_async(): - client = TranscoderServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = services.CreateJobTemplateRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_job_template), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.JobTemplate()) - await client.create_job_template(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_create_job_template_flattened(): - client = TranscoderServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_job_template), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = resources.JobTemplate() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.create_job_template( - parent='parent_value', - job_template=resources.JobTemplate(name='name_value'), - job_template_id='job_template_id_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - arg = args[0].job_template - mock_val = resources.JobTemplate(name='name_value') - assert arg == mock_val - arg = args[0].job_template_id - mock_val = 'job_template_id_value' - assert arg == mock_val - - -def test_create_job_template_flattened_error(): - client = TranscoderServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.create_job_template( - services.CreateJobTemplateRequest(), - parent='parent_value', - job_template=resources.JobTemplate(name='name_value'), - job_template_id='job_template_id_value', - ) - -@pytest.mark.asyncio -async def test_create_job_template_flattened_async(): - client = TranscoderServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.create_job_template), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = resources.JobTemplate() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.JobTemplate()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.create_job_template( - parent='parent_value', - job_template=resources.JobTemplate(name='name_value'), - job_template_id='job_template_id_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - arg = args[0].job_template - mock_val = resources.JobTemplate(name='name_value') - assert arg == mock_val - arg = args[0].job_template_id - mock_val = 'job_template_id_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_create_job_template_flattened_error_async(): - client = TranscoderServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.create_job_template( - services.CreateJobTemplateRequest(), - parent='parent_value', - job_template=resources.JobTemplate(name='name_value'), - job_template_id='job_template_id_value', - ) - - -@pytest.mark.parametrize("request_type", [ - services.ListJobTemplatesRequest, - dict, -]) -def test_list_job_templates(request_type, transport: str = 'grpc'): - client = TranscoderServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_job_templates), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = services.ListJobTemplatesResponse( - next_page_token='next_page_token_value', - unreachable=['unreachable_value'], - ) - response = client.list_job_templates(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == services.ListJobTemplatesRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListJobTemplatesPager) - assert response.next_page_token == 'next_page_token_value' - assert response.unreachable == ['unreachable_value'] - - -def test_list_job_templates_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = TranscoderServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_job_templates), - '__call__') as call: - client.list_job_templates() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == services.ListJobTemplatesRequest() - -@pytest.mark.asyncio -async def test_list_job_templates_async(transport: str = 'grpc_asyncio', request_type=services.ListJobTemplatesRequest): - client = TranscoderServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_job_templates), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(services.ListJobTemplatesResponse( - next_page_token='next_page_token_value', - unreachable=['unreachable_value'], - )) - response = await client.list_job_templates(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == services.ListJobTemplatesRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, pagers.ListJobTemplatesAsyncPager) - assert response.next_page_token == 'next_page_token_value' - assert response.unreachable == ['unreachable_value'] - - -@pytest.mark.asyncio -async def test_list_job_templates_async_from_dict(): - await test_list_job_templates_async(request_type=dict) - - -def test_list_job_templates_field_headers(): - client = TranscoderServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = services.ListJobTemplatesRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_job_templates), - '__call__') as call: - call.return_value = services.ListJobTemplatesResponse() - client.list_job_templates(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_list_job_templates_field_headers_async(): - client = TranscoderServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = services.ListJobTemplatesRequest() - - request.parent = 'parent_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_job_templates), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(services.ListJobTemplatesResponse()) - await client.list_job_templates(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'parent=parent_value', - ) in kw['metadata'] - - -def test_list_job_templates_flattened(): - client = TranscoderServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_job_templates), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = services.ListJobTemplatesResponse() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.list_job_templates( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - - -def test_list_job_templates_flattened_error(): - client = TranscoderServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.list_job_templates( - services.ListJobTemplatesRequest(), - parent='parent_value', - ) - -@pytest.mark.asyncio -async def test_list_job_templates_flattened_async(): - client = TranscoderServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_job_templates), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = services.ListJobTemplatesResponse() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(services.ListJobTemplatesResponse()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.list_job_templates( - parent='parent_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].parent - mock_val = 'parent_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_list_job_templates_flattened_error_async(): - client = TranscoderServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.list_job_templates( - services.ListJobTemplatesRequest(), - parent='parent_value', - ) - - -def test_list_job_templates_pager(transport_name: str = "grpc"): - client = TranscoderServiceClient( - credentials=ga_credentials.AnonymousCredentials, - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_job_templates), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - services.ListJobTemplatesResponse( - job_templates=[ - resources.JobTemplate(), - resources.JobTemplate(), - resources.JobTemplate(), - ], - next_page_token='abc', - ), - services.ListJobTemplatesResponse( - job_templates=[], - next_page_token='def', - ), - services.ListJobTemplatesResponse( - job_templates=[ - resources.JobTemplate(), - ], - next_page_token='ghi', - ), - services.ListJobTemplatesResponse( - job_templates=[ - resources.JobTemplate(), - resources.JobTemplate(), - ], - ), - RuntimeError, - ) - - metadata = () - metadata = tuple(metadata) + ( - gapic_v1.routing_header.to_grpc_metadata(( - ('parent', ''), - )), - ) - pager = client.list_job_templates(request={}) - - assert pager._metadata == metadata - - results = list(pager) - assert len(results) == 6 - assert all(isinstance(i, resources.JobTemplate) - for i in results) -def test_list_job_templates_pages(transport_name: str = "grpc"): - client = TranscoderServiceClient( - credentials=ga_credentials.AnonymousCredentials, - transport=transport_name, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_job_templates), - '__call__') as call: - # Set the response to a series of pages. - call.side_effect = ( - services.ListJobTemplatesResponse( - job_templates=[ - resources.JobTemplate(), - resources.JobTemplate(), - resources.JobTemplate(), - ], - next_page_token='abc', - ), - services.ListJobTemplatesResponse( - job_templates=[], - next_page_token='def', - ), - services.ListJobTemplatesResponse( - job_templates=[ - resources.JobTemplate(), - ], - next_page_token='ghi', - ), - services.ListJobTemplatesResponse( - job_templates=[ - resources.JobTemplate(), - resources.JobTemplate(), - ], - ), - RuntimeError, - ) - pages = list(client.list_job_templates(request={}).pages) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.asyncio -async def test_list_job_templates_async_pager(): - client = TranscoderServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_job_templates), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - services.ListJobTemplatesResponse( - job_templates=[ - resources.JobTemplate(), - resources.JobTemplate(), - resources.JobTemplate(), - ], - next_page_token='abc', - ), - services.ListJobTemplatesResponse( - job_templates=[], - next_page_token='def', - ), - services.ListJobTemplatesResponse( - job_templates=[ - resources.JobTemplate(), - ], - next_page_token='ghi', - ), - services.ListJobTemplatesResponse( - job_templates=[ - resources.JobTemplate(), - resources.JobTemplate(), - ], - ), - RuntimeError, - ) - async_pager = await client.list_job_templates(request={},) - assert async_pager.next_page_token == 'abc' - responses = [] - async for response in async_pager: # pragma: no branch - responses.append(response) - - assert len(responses) == 6 - assert all(isinstance(i, resources.JobTemplate) - for i in responses) - - -@pytest.mark.asyncio -async def test_list_job_templates_async_pages(): - client = TranscoderServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials, - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.list_job_templates), - '__call__', new_callable=mock.AsyncMock) as call: - # Set the response to a series of pages. - call.side_effect = ( - services.ListJobTemplatesResponse( - job_templates=[ - resources.JobTemplate(), - resources.JobTemplate(), - resources.JobTemplate(), - ], - next_page_token='abc', - ), - services.ListJobTemplatesResponse( - job_templates=[], - next_page_token='def', - ), - services.ListJobTemplatesResponse( - job_templates=[ - resources.JobTemplate(), - ], - next_page_token='ghi', - ), - services.ListJobTemplatesResponse( - job_templates=[ - resources.JobTemplate(), - resources.JobTemplate(), - ], - ), - RuntimeError, - ) - pages = [] - async for page_ in (await client.list_job_templates(request={})).pages: # pragma: no branch - pages.append(page_) - for page_, token in zip(pages, ['abc','def','ghi', '']): - assert page_.raw_page.next_page_token == token - -@pytest.mark.parametrize("request_type", [ - services.GetJobTemplateRequest, - dict, -]) -def test_get_job_template(request_type, transport: str = 'grpc'): - client = TranscoderServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_job_template), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = resources.JobTemplate( - name='name_value', - ) - response = client.get_job_template(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == services.GetJobTemplateRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, resources.JobTemplate) - assert response.name == 'name_value' - - -def test_get_job_template_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = TranscoderServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_job_template), - '__call__') as call: - client.get_job_template() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == services.GetJobTemplateRequest() - -@pytest.mark.asyncio -async def test_get_job_template_async(transport: str = 'grpc_asyncio', request_type=services.GetJobTemplateRequest): - client = TranscoderServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_job_template), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(resources.JobTemplate( - name='name_value', - )) - response = await client.get_job_template(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == services.GetJobTemplateRequest() - - # Establish that the response is the type that we expect. - assert isinstance(response, resources.JobTemplate) - assert response.name == 'name_value' - - -@pytest.mark.asyncio -async def test_get_job_template_async_from_dict(): - await test_get_job_template_async(request_type=dict) - - -def test_get_job_template_field_headers(): - client = TranscoderServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = services.GetJobTemplateRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_job_template), - '__call__') as call: - call.return_value = resources.JobTemplate() - client.get_job_template(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_get_job_template_field_headers_async(): - client = TranscoderServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = services.GetJobTemplateRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_job_template), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.JobTemplate()) - await client.get_job_template(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_get_job_template_flattened(): - client = TranscoderServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_job_template), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = resources.JobTemplate() - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.get_job_template( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_get_job_template_flattened_error(): - client = TranscoderServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.get_job_template( - services.GetJobTemplateRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_get_job_template_flattened_async(): - client = TranscoderServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.get_job_template), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = resources.JobTemplate() - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.JobTemplate()) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.get_job_template( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_get_job_template_flattened_error_async(): - client = TranscoderServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.get_job_template( - services.GetJobTemplateRequest(), - name='name_value', - ) - - -@pytest.mark.parametrize("request_type", [ - services.DeleteJobTemplateRequest, - dict, -]) -def test_delete_job_template(request_type, transport: str = 'grpc'): - client = TranscoderServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_job_template), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - response = client.delete_job_template(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == services.DeleteJobTemplateRequest() - - # Establish that the response is the type that we expect. - assert response is None - - -def test_delete_job_template_empty_call(): - # This test is a coverage failsafe to make sure that totally empty calls, - # i.e. request == None and no flattened fields passed, work. - client = TranscoderServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport='grpc', - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_job_template), - '__call__') as call: - client.delete_job_template() - call.assert_called() - _, args, _ = call.mock_calls[0] - assert args[0] == services.DeleteJobTemplateRequest() - -@pytest.mark.asyncio -async def test_delete_job_template_async(transport: str = 'grpc_asyncio', request_type=services.DeleteJobTemplateRequest): - client = TranscoderServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # Everything is optional in proto3 as far as the runtime is concerned, - # and we are mocking out the actual API, so just send an empty request. - request = request_type() - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_job_template), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - response = await client.delete_job_template(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == services.DeleteJobTemplateRequest() - - # Establish that the response is the type that we expect. - assert response is None - - -@pytest.mark.asyncio -async def test_delete_job_template_async_from_dict(): - await test_delete_job_template_async(request_type=dict) - - -def test_delete_job_template_field_headers(): - client = TranscoderServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = services.DeleteJobTemplateRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_job_template), - '__call__') as call: - call.return_value = None - client.delete_job_template(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -@pytest.mark.asyncio -async def test_delete_job_template_field_headers_async(): - client = TranscoderServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Any value that is part of the HTTP/1.1 URI should be sent as - # a field header. Set these to a non-empty value. - request = services.DeleteJobTemplateRequest() - - request.name = 'name_value' - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_job_template), - '__call__') as call: - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - await client.delete_job_template(request) - - # Establish that the underlying gRPC stub method was called. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - assert args[0] == request - - # Establish that the field header was sent. - _, _, kw = call.mock_calls[0] - assert ( - 'x-goog-request-params', - 'name=name_value', - ) in kw['metadata'] - - -def test_delete_job_template_flattened(): - client = TranscoderServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_job_template), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - client.delete_job_template( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) == 1 - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - - -def test_delete_job_template_flattened_error(): - client = TranscoderServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - client.delete_job_template( - services.DeleteJobTemplateRequest(), - name='name_value', - ) - -@pytest.mark.asyncio -async def test_delete_job_template_flattened_async(): - client = TranscoderServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Mock the actual call within the gRPC stub, and fake the request. - with mock.patch.object( - type(client.transport.delete_job_template), - '__call__') as call: - # Designate an appropriate return value for the call. - call.return_value = None - - call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) - # Call the method with a truthy value for each flattened field, - # using the keyword arguments to the method. - response = await client.delete_job_template( - name='name_value', - ) - - # Establish that the underlying call was made with the expected - # request object values. - assert len(call.mock_calls) - _, args, _ = call.mock_calls[0] - arg = args[0].name - mock_val = 'name_value' - assert arg == mock_val - -@pytest.mark.asyncio -async def test_delete_job_template_flattened_error_async(): - client = TranscoderServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Attempting to call a method with both a request object and flattened - # fields is an error. - with pytest.raises(ValueError): - await client.delete_job_template( - services.DeleteJobTemplateRequest(), - name='name_value', - ) - - -def test_credentials_transport_error(): - # It is an error to provide credentials and a transport instance. - transport = transports.TranscoderServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = TranscoderServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport, - ) - - # It is an error to provide a credentials file and a transport instance. - transport = transports.TranscoderServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = TranscoderServiceClient( - client_options={"credentials_file": "credentials.json"}, - transport=transport, - ) - - # It is an error to provide an api_key and a transport instance. - transport = transports.TranscoderServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - options = client_options.ClientOptions() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = TranscoderServiceClient( - client_options=options, - transport=transport, - ) - - # It is an error to provide an api_key and a credential. - options = mock.Mock() - options.api_key = "api_key" - with pytest.raises(ValueError): - client = TranscoderServiceClient( - client_options=options, - credentials=ga_credentials.AnonymousCredentials() - ) - - # It is an error to provide scopes and a transport instance. - transport = transports.TranscoderServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - with pytest.raises(ValueError): - client = TranscoderServiceClient( - client_options={"scopes": ["1", "2"]}, - transport=transport, - ) - - -def test_transport_instance(): - # A client may be instantiated with a custom transport instance. - transport = transports.TranscoderServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - client = TranscoderServiceClient(transport=transport) - assert client.transport is transport - -def test_transport_get_channel(): - # A client may be instantiated with a custom transport instance. - transport = transports.TranscoderServiceGrpcTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel - - transport = transports.TranscoderServiceGrpcAsyncIOTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - channel = transport.grpc_channel - assert channel - -@pytest.mark.parametrize("transport_class", [ - transports.TranscoderServiceGrpcTransport, - transports.TranscoderServiceGrpcAsyncIOTransport, -]) -def test_transport_adc(transport_class): - # Test default credentials are used if not provided. - with mock.patch.object(google.auth, 'default') as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class() - adc.assert_called_once() - -@pytest.mark.parametrize("transport_name", [ - "grpc", -]) -def test_transport_kind(transport_name): - transport = TranscoderServiceClient.get_transport_class(transport_name)( - credentials=ga_credentials.AnonymousCredentials(), - ) - assert transport.kind == transport_name - -def test_transport_grpc_default(): - # A client should use the gRPC transport by default. - client = TranscoderServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - ) - assert isinstance( - client.transport, - transports.TranscoderServiceGrpcTransport, - ) - -def test_transcoder_service_base_transport_error(): - # Passing both a credentials object and credentials_file should raise an error - with pytest.raises(core_exceptions.DuplicateCredentialArgs): - transport = transports.TranscoderServiceTransport( - credentials=ga_credentials.AnonymousCredentials(), - credentials_file="credentials.json" - ) - - -def test_transcoder_service_base_transport(): - # Instantiate the base transport. - with mock.patch('google.cloud.video.transcoder_v1.services.transcoder_service.transports.TranscoderServiceTransport.__init__') as Transport: - Transport.return_value = None - transport = transports.TranscoderServiceTransport( - credentials=ga_credentials.AnonymousCredentials(), - ) - - # Every method on the transport should just blindly - # raise NotImplementedError. - methods = ( - 'create_job', - 'list_jobs', - 'get_job', - 'delete_job', - 'create_job_template', - 'list_job_templates', - 'get_job_template', - 'delete_job_template', - ) - for method in methods: - with pytest.raises(NotImplementedError): - getattr(transport, method)(request=object()) - - with pytest.raises(NotImplementedError): - transport.close() - - # Catch all for all remaining methods and properties - remainder = [ - 'kind', - ] - for r in remainder: - with pytest.raises(NotImplementedError): - getattr(transport, r)() - - -def test_transcoder_service_base_transport_with_credentials_file(): - # Instantiate the base transport with a credentials file - with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.cloud.video.transcoder_v1.services.transcoder_service.transports.TranscoderServiceTransport._prep_wrapped_messages') as Transport: - Transport.return_value = None - load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.TranscoderServiceTransport( - credentials_file="credentials.json", - quota_project_id="octopus", - ) - load_creds.assert_called_once_with("credentials.json", - scopes=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', -), - quota_project_id="octopus", - ) - - -def test_transcoder_service_base_transport_with_adc(): - # Test the default credentials are used if credentials and credentials_file are None. - with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.cloud.video.transcoder_v1.services.transcoder_service.transports.TranscoderServiceTransport._prep_wrapped_messages') as Transport: - Transport.return_value = None - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport = transports.TranscoderServiceTransport() - adc.assert_called_once() - - -def test_transcoder_service_auth_adc(): - # If no credentials are provided, we should use ADC credentials. - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - TranscoderServiceClient() - adc.assert_called_once_with( - scopes=None, - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', -), - quota_project_id=None, - ) - - -@pytest.mark.parametrize( - "transport_class", - [ - transports.TranscoderServiceGrpcTransport, - transports.TranscoderServiceGrpcAsyncIOTransport, - ], -) -def test_transcoder_service_transport_auth_adc(transport_class): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - adc.return_value = (ga_credentials.AnonymousCredentials(), None) - transport_class(quota_project_id="octopus", scopes=["1", "2"]) - adc.assert_called_once_with( - scopes=["1", "2"], - default_scopes=( 'https://www.googleapis.com/auth/cloud-platform',), - quota_project_id="octopus", - ) - - -@pytest.mark.parametrize( - "transport_class", - [ - transports.TranscoderServiceGrpcTransport, - transports.TranscoderServiceGrpcAsyncIOTransport, - ], -) -def test_transcoder_service_transport_auth_gdch_credentials(transport_class): - host = 'https://language.com' - api_audience_tests = [None, 'https://language2.com'] - api_audience_expect = [host, 'https://language2.com'] - for t, e in zip(api_audience_tests, api_audience_expect): - with mock.patch.object(google.auth, 'default', autospec=True) as adc: - gdch_mock = mock.MagicMock() - type(gdch_mock).with_gdch_audience = mock.PropertyMock(return_value=gdch_mock) - adc.return_value = (gdch_mock, None) - transport_class(host=host, api_audience=t) - gdch_mock.with_gdch_audience.assert_called_once_with( - e - ) - - -@pytest.mark.parametrize( - "transport_class,grpc_helpers", - [ - (transports.TranscoderServiceGrpcTransport, grpc_helpers), - (transports.TranscoderServiceGrpcAsyncIOTransport, grpc_helpers_async) - ], -) -def test_transcoder_service_transport_create_channel(transport_class, grpc_helpers): - # If credentials and host are not provided, the transport class should use - # ADC credentials. - with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch.object( - grpc_helpers, "create_channel", autospec=True - ) as create_channel: - creds = ga_credentials.AnonymousCredentials() - adc.return_value = (creds, None) - transport_class( - quota_project_id="octopus", - scopes=["1", "2"] - ) - - create_channel.assert_called_with( - "transcoder.googleapis.com:443", - credentials=creds, - credentials_file=None, - quota_project_id="octopus", - default_scopes=( - 'https://www.googleapis.com/auth/cloud-platform', -), - scopes=["1", "2"], - default_host="transcoder.googleapis.com", - ssl_credentials=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - -@pytest.mark.parametrize("transport_class", [transports.TranscoderServiceGrpcTransport, transports.TranscoderServiceGrpcAsyncIOTransport]) -def test_transcoder_service_grpc_transport_client_cert_source_for_mtls( - transport_class -): - cred = ga_credentials.AnonymousCredentials() - - # Check ssl_channel_credentials is used if provided. - with mock.patch.object(transport_class, "create_channel") as mock_create_channel: - mock_ssl_channel_creds = mock.Mock() - transport_class( - host="squid.clam.whelk", - credentials=cred, - ssl_channel_credentials=mock_ssl_channel_creds - ) - mock_create_channel.assert_called_once_with( - "squid.clam.whelk:443", - credentials=cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_channel_creds, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - - # Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls - # is used. - with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()): - with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred: - transport_class( - credentials=cred, - client_cert_source_for_mtls=client_cert_source_callback - ) - expected_cert, expected_key = client_cert_source_callback() - mock_ssl_cred.assert_called_once_with( - certificate_chain=expected_cert, - private_key=expected_key - ) - - -@pytest.mark.parametrize("transport_name", [ - "grpc", - "grpc_asyncio", -]) -def test_transcoder_service_host_no_port(transport_name): - client = TranscoderServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions(api_endpoint='transcoder.googleapis.com'), - transport=transport_name, - ) - assert client.transport._host == ( - 'transcoder.googleapis.com:443' - ) - -@pytest.mark.parametrize("transport_name", [ - "grpc", - "grpc_asyncio", -]) -def test_transcoder_service_host_with_port(transport_name): - client = TranscoderServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - client_options=client_options.ClientOptions(api_endpoint='transcoder.googleapis.com:8000'), - transport=transport_name, - ) - assert client.transport._host == ( - 'transcoder.googleapis.com:8000' - ) - -def test_transcoder_service_grpc_transport_channel(): - channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials()) - - # Check that channel is used if provided. - transport = transports.TranscoderServiceGrpcTransport( - host="squid.clam.whelk", - channel=channel, - ) - assert transport.grpc_channel == channel - assert transport._host == "squid.clam.whelk:443" - assert transport._ssl_channel_credentials == None - - -def test_transcoder_service_grpc_asyncio_transport_channel(): - channel = aio.secure_channel('http://localhost/', grpc.local_channel_credentials()) - - # Check that channel is used if provided. - transport = transports.TranscoderServiceGrpcAsyncIOTransport( - host="squid.clam.whelk", - channel=channel, - ) - assert transport.grpc_channel == channel - assert transport._host == "squid.clam.whelk:443" - assert transport._ssl_channel_credentials == None - - -# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are -# removed from grpc/grpc_asyncio transport constructor. -@pytest.mark.parametrize("transport_class", [transports.TranscoderServiceGrpcTransport, transports.TranscoderServiceGrpcAsyncIOTransport]) -def test_transcoder_service_transport_channel_mtls_with_client_cert_source( - transport_class -): - with mock.patch("grpc.ssl_channel_credentials", autospec=True) as grpc_ssl_channel_cred: - with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: - mock_ssl_cred = mock.Mock() - grpc_ssl_channel_cred.return_value = mock_ssl_cred - - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - - cred = ga_credentials.AnonymousCredentials() - with pytest.warns(DeprecationWarning): - with mock.patch.object(google.auth, 'default') as adc: - adc.return_value = (cred, None) - transport = transport_class( - host="squid.clam.whelk", - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=client_cert_source_callback, - ) - adc.assert_called_once() - - grpc_ssl_channel_cred.assert_called_once_with( - certificate_chain=b"cert bytes", private_key=b"key bytes" - ) - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - assert transport.grpc_channel == mock_grpc_channel - assert transport._ssl_channel_credentials == mock_ssl_cred - - -# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are -# removed from grpc/grpc_asyncio transport constructor. -@pytest.mark.parametrize("transport_class", [transports.TranscoderServiceGrpcTransport, transports.TranscoderServiceGrpcAsyncIOTransport]) -def test_transcoder_service_transport_channel_mtls_with_adc( - transport_class -): - mock_ssl_cred = mock.Mock() - with mock.patch.multiple( - "google.auth.transport.grpc.SslCredentials", - __init__=mock.Mock(return_value=None), - ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred), - ): - with mock.patch.object(transport_class, "create_channel") as grpc_create_channel: - mock_grpc_channel = mock.Mock() - grpc_create_channel.return_value = mock_grpc_channel - mock_cred = mock.Mock() - - with pytest.warns(DeprecationWarning): - transport = transport_class( - host="squid.clam.whelk", - credentials=mock_cred, - api_mtls_endpoint="mtls.squid.clam.whelk", - client_cert_source=None, - ) - - grpc_create_channel.assert_called_once_with( - "mtls.squid.clam.whelk:443", - credentials=mock_cred, - credentials_file=None, - scopes=None, - ssl_credentials=mock_ssl_cred, - quota_project_id=None, - options=[ - ("grpc.max_send_message_length", -1), - ("grpc.max_receive_message_length", -1), - ], - ) - assert transport.grpc_channel == mock_grpc_channel - - -def test_job_path(): - project = "squid" - location = "clam" - job = "whelk" - expected = "projects/{project}/locations/{location}/jobs/{job}".format(project=project, location=location, job=job, ) - actual = TranscoderServiceClient.job_path(project, location, job) - assert expected == actual - - -def test_parse_job_path(): - expected = { - "project": "octopus", - "location": "oyster", - "job": "nudibranch", - } - path = TranscoderServiceClient.job_path(**expected) - - # Check that the path construction is reversible. - actual = TranscoderServiceClient.parse_job_path(path) - assert expected == actual - -def test_job_template_path(): - project = "cuttlefish" - location = "mussel" - job_template = "winkle" - expected = "projects/{project}/locations/{location}/jobTemplates/{job_template}".format(project=project, location=location, job_template=job_template, ) - actual = TranscoderServiceClient.job_template_path(project, location, job_template) - assert expected == actual - - -def test_parse_job_template_path(): - expected = { - "project": "nautilus", - "location": "scallop", - "job_template": "abalone", - } - path = TranscoderServiceClient.job_template_path(**expected) - - # Check that the path construction is reversible. - actual = TranscoderServiceClient.parse_job_template_path(path) - assert expected == actual - -def test_common_billing_account_path(): - billing_account = "squid" - expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, ) - actual = TranscoderServiceClient.common_billing_account_path(billing_account) - assert expected == actual - - -def test_parse_common_billing_account_path(): - expected = { - "billing_account": "clam", - } - path = TranscoderServiceClient.common_billing_account_path(**expected) - - # Check that the path construction is reversible. - actual = TranscoderServiceClient.parse_common_billing_account_path(path) - assert expected == actual - -def test_common_folder_path(): - folder = "whelk" - expected = "folders/{folder}".format(folder=folder, ) - actual = TranscoderServiceClient.common_folder_path(folder) - assert expected == actual - - -def test_parse_common_folder_path(): - expected = { - "folder": "octopus", - } - path = TranscoderServiceClient.common_folder_path(**expected) - - # Check that the path construction is reversible. - actual = TranscoderServiceClient.parse_common_folder_path(path) - assert expected == actual - -def test_common_organization_path(): - organization = "oyster" - expected = "organizations/{organization}".format(organization=organization, ) - actual = TranscoderServiceClient.common_organization_path(organization) - assert expected == actual - - -def test_parse_common_organization_path(): - expected = { - "organization": "nudibranch", - } - path = TranscoderServiceClient.common_organization_path(**expected) - - # Check that the path construction is reversible. - actual = TranscoderServiceClient.parse_common_organization_path(path) - assert expected == actual - -def test_common_project_path(): - project = "cuttlefish" - expected = "projects/{project}".format(project=project, ) - actual = TranscoderServiceClient.common_project_path(project) - assert expected == actual - - -def test_parse_common_project_path(): - expected = { - "project": "mussel", - } - path = TranscoderServiceClient.common_project_path(**expected) - - # Check that the path construction is reversible. - actual = TranscoderServiceClient.parse_common_project_path(path) - assert expected == actual - -def test_common_location_path(): - project = "winkle" - location = "nautilus" - expected = "projects/{project}/locations/{location}".format(project=project, location=location, ) - actual = TranscoderServiceClient.common_location_path(project, location) - assert expected == actual - - -def test_parse_common_location_path(): - expected = { - "project": "scallop", - "location": "abalone", - } - path = TranscoderServiceClient.common_location_path(**expected) - - # Check that the path construction is reversible. - actual = TranscoderServiceClient.parse_common_location_path(path) - assert expected == actual - - -def test_client_with_default_client_info(): - client_info = gapic_v1.client_info.ClientInfo() - - with mock.patch.object(transports.TranscoderServiceTransport, '_prep_wrapped_messages') as prep: - client = TranscoderServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - client_info=client_info, - ) - prep.assert_called_once_with(client_info) - - with mock.patch.object(transports.TranscoderServiceTransport, '_prep_wrapped_messages') as prep: - transport_class = TranscoderServiceClient.get_transport_class() - transport = transport_class( - credentials=ga_credentials.AnonymousCredentials(), - client_info=client_info, - ) - prep.assert_called_once_with(client_info) - -@pytest.mark.asyncio -async def test_transport_close_async(): - client = TranscoderServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), - transport="grpc_asyncio", - ) - with mock.patch.object(type(getattr(client.transport, "grpc_channel")), "close") as close: - async with client: - close.assert_not_called() - close.assert_called_once() - - -def test_transport_close(): - transports = { - "grpc": "_grpc_channel", - } - - for transport, close_name in transports.items(): - client = TranscoderServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport - ) - with mock.patch.object(type(getattr(client.transport, close_name)), "close") as close: - with client: - close.assert_not_called() - close.assert_called_once() - -def test_client_ctx(): - transports = [ - 'grpc', - ] - for transport in transports: - client = TranscoderServiceClient( - credentials=ga_credentials.AnonymousCredentials(), - transport=transport - ) - # Test client calls underlying transport. - with mock.patch.object(type(client.transport), "close") as close: - close.assert_not_called() - with client: - pass - close.assert_called() - -@pytest.mark.parametrize("client_class,transport_class", [ - (TranscoderServiceClient, transports.TranscoderServiceGrpcTransport), - (TranscoderServiceAsyncClient, transports.TranscoderServiceGrpcAsyncIOTransport), -]) -def test_api_key_credentials(client_class, transport_class): - with mock.patch.object( - google.auth._default, "get_api_key_credentials", create=True - ) as get_api_key_credentials: - mock_cred = mock.Mock() - get_api_key_credentials.return_value = mock_cred - options = client_options.ClientOptions() - options.api_key = "api_key" - with mock.patch.object(transport_class, "__init__") as patched: - patched.return_value = None - client = client_class(client_options=options) - patched.assert_called_once_with( - credentials=mock_cred, - credentials_file=None, - host=client.DEFAULT_ENDPOINT, - scopes=None, - client_cert_source_for_mtls=None, - quota_project_id=None, - client_info=transports.base.DEFAULT_CLIENT_INFO, - always_use_jwt_access=True, - api_audience=None, - ) diff --git a/samples/generated_samples/snippet_metadata_google.cloud.video.transcoder.v1.json b/samples/generated_samples/snippet_metadata_google.cloud.video.transcoder.v1.json index 0e60a70..2c7572e 100644 --- a/samples/generated_samples/snippet_metadata_google.cloud.video.transcoder.v1.json +++ b/samples/generated_samples/snippet_metadata_google.cloud.video.transcoder.v1.json @@ -8,7 +8,7 @@ ], "language": "PYTHON", "name": "google-cloud-video-transcoder", - "version": "1.5.0" + "version": "0.1.0" }, "snippets": [ { diff --git a/setup.py b/setup.py index 0bcfe47..a2cc3ef 100644 --- a/setup.py +++ b/setup.py @@ -40,6 +40,7 @@ dependencies = [ "google-api-core[grpc] >= 1.34.0, <3.0.0dev,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.*,!=2.4.*,!=2.5.*,!=2.6.*,!=2.7.*,!=2.8.*,!=2.9.*,!=2.10.*", "proto-plus >= 1.22.0, <2.0.0dev", + "proto-plus >= 1.22.2, <2.0.0dev; python_version>='3.11'", "protobuf>=3.19.5,<5.0.0dev,!=3.20.0,!=3.20.1,!=4.21.0,!=4.21.1,!=4.21.2,!=4.21.3,!=4.21.4,!=4.21.5", ] url = "https://github.com/googleapis/python-video-transcoder" @@ -79,6 +80,7 @@ "Programming Language :: Python :: 3.8", "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", "Operating System :: OS Independent", "Topic :: Internet", ], diff --git a/owl-bot-staging/v1/testing/constraints-3.12.txt b/testing/constraints-3.12.txt similarity index 100% rename from owl-bot-staging/v1/testing/constraints-3.12.txt rename to testing/constraints-3.12.txt pFad - Phonifier reborn

Pfad - The Proxy pFad of © 2024 Garber Painting. All rights reserved.

Note: This service is not intended for secure transactions such as banking, social media, email, or purchasing. Use at your own risk. We assume no liability whatsoever for broken pages.


Alternative Proxies:

Alternative Proxy

pFad Proxy

pFad v3 Proxy

pFad v4 Proxy