diff --git a/.flake8 b/.flake8 index 29227d4..2e43874 100644 --- a/.flake8 +++ b/.flake8 @@ -16,7 +16,7 @@ # Generated by synthtool. DO NOT EDIT! [flake8] -ignore = E203, E266, E501, W503 +ignore = E203, E231, E266, E501, W503 exclude = # Exclude generated code. **/proto/** diff --git a/.github/.OwlBot.lock.yaml b/.github/.OwlBot.lock.yaml index 44c78f7..757c9dc 100644 --- a/.github/.OwlBot.lock.yaml +++ b/.github/.OwlBot.lock.yaml @@ -13,4 +13,5 @@ # limitations under the License. docker: image: gcr.io/cloud-devrel-public-resources/owlbot-python:latest - digest: sha256:4e1991042fe54b991db9ca17c8fb386e61b22fe4d1472a568bf0fcac85dcf5d3 + digest: sha256:81ed5ecdfc7cac5b699ba4537376f3563f6f04122c4ec9e735d3b3dc1d43dd32 +# created: 2022-05-05T22:08:23.383410683Z diff --git a/.github/auto-approve.yml b/.github/auto-approve.yml new file mode 100644 index 0000000..311ebbb --- /dev/null +++ b/.github/auto-approve.yml @@ -0,0 +1,3 @@ +# https://github.com/googleapis/repo-automation-bots/tree/main/packages/auto-approve +processes: + - "OwlBotTemplateChanges" diff --git a/.github/auto-label.yaml b/.github/auto-label.yaml new file mode 100644 index 0000000..41bff0b --- /dev/null +++ b/.github/auto-label.yaml @@ -0,0 +1,15 @@ +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +requestsize: + enabled: true diff --git a/.github/release-please.yml b/.github/release-please.yml index 466597e..6def37a 100644 --- a/.github/release-please.yml +++ b/.github/release-please.yml @@ -1,2 +1,8 @@ releaseType: python handleGHRelease: true +# NOTE: this section is generated by synthtool.languages.python +# See https://github.com/googleapis/synthtool/blob/master/synthtool/languages/python.py +branches: +- branch: v0 + handleGHRelease: true + releaseType: python diff --git a/.kokoro/docker/docs/Dockerfile b/.kokoro/docker/docs/Dockerfile index 4e1b1fb..238b87b 100644 --- a/.kokoro/docker/docs/Dockerfile +++ b/.kokoro/docker/docs/Dockerfile @@ -12,7 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. -from ubuntu:20.04 +from ubuntu:22.04 ENV DEBIAN_FRONTEND noninteractive @@ -60,8 +60,24 @@ RUN apt-get update \ && rm -rf /var/lib/apt/lists/* \ && rm -f /var/cache/apt/archives/*.deb +###################### Install python 3.8.11 + +# Download python 3.8.11 +RUN wget https://www.python.org/ftp/python/3.8.11/Python-3.8.11.tgz + +# Extract files +RUN tar -xvf Python-3.8.11.tgz + +# Install python 3.8.11 +RUN ./Python-3.8.11/configure --enable-optimizations +RUN make altinstall + +###################### Install pip RUN wget -O /tmp/get-pip.py 'https://bootstrap.pypa.io/get-pip.py' \ - && python3.8 /tmp/get-pip.py \ + && python3 /tmp/get-pip.py \ && rm /tmp/get-pip.py +# Test pip +RUN python3 -m pip + CMD ["python3.8"] diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 62eb5a7..46d2371 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -22,7 +22,7 @@ repos: - id: end-of-file-fixer - id: check-yaml - repo: https://github.com/psf/black - rev: 19.10b0 + rev: 22.3.0 hooks: - id: black - repo: https://gitlab.com/pycqa/flake8 diff --git a/CHANGELOG.md b/CHANGELOG.md index 486e8ab..a1f87d3 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,6 +1,18 @@ # Changelog -### [1.3.1](https://github.com/googleapis/python-video-transcoder/compare/v1.3.0...v1.3.1) (2022-03-05) +## [1.3.2](https://github.com/googleapis/python-video-transcoder/compare/v1.3.1...v1.3.2) (2022-06-03) + + +### Bug Fixes + +* **deps:** require protobuf <4.0.0dev ([#195](https://github.com/googleapis/python-video-transcoder/issues/195)) ([8d9c679](https://github.com/googleapis/python-video-transcoder/commit/8d9c679a2f0b460c1f34456ab2e46437c4cbdc16)) + + +### Documentation + +* fix changelog header to consistent size ([#196](https://github.com/googleapis/python-video-transcoder/issues/196)) ([04ee631](https://github.com/googleapis/python-video-transcoder/commit/04ee631bb65cd5bb0121f7dcfce9b78e67d19198)) + +## [1.3.1](https://github.com/googleapis/python-video-transcoder/compare/v1.3.0...v1.3.1) (2022-03-05) ### Bug Fixes @@ -30,7 +42,7 @@ * **samples:** update samples to use mapping_ attribute of AudioStream ([#142](https://github.com/googleapis/python-video-transcoder/issues/142)) ([7fbc619](https://github.com/googleapis/python-video-transcoder/commit/7fbc61917562c269439828df82b474700c95ea23)) * **samples:** add samples and tests for adding captions to a job ([#131](https://github.com/googleapis/python-video-transcoder/issues/131)) ([e30431f](https://github.com/googleapis/python-video-transcoder/commit/e30431fec7c15666afbb5bc975f7077389aac06d)) -### [1.2.1](https://www.github.com/googleapis/python-video-transcoder/compare/v1.2.0...v1.2.1) (2021-11-04) +## [1.2.1](https://www.github.com/googleapis/python-video-transcoder/compare/v1.2.0...v1.2.1) (2021-11-04) ### Bug Fixes @@ -62,7 +74,7 @@ * remove Encryption settings that were published erroneously ([#102](https://www.github.com/googleapis/python-video-transcoder/issues/102)) ([824009a](https://www.github.com/googleapis/python-video-transcoder/commit/824009ac01700341071b50af2741ef6493dcbcf5)) -### [1.0.1](https://www.github.com/googleapis/python-video-transcoder/compare/v1.0.0...v1.0.1) (2021-09-30) +## [1.0.1](https://www.github.com/googleapis/python-video-transcoder/compare/v1.0.0...v1.0.1) (2021-09-30) ### Bug Fixes @@ -76,7 +88,7 @@ * bump release level to production/stable ([#79](https://www.github.com/googleapis/python-video-transcoder/issues/79)) ([45ba870](https://www.github.com/googleapis/python-video-transcoder/commit/45ba87048ef73c666c00248c6da3637fd418d70a)) -### [0.5.1](https://www.github.com/googleapis/python-video-transcoder/compare/v0.5.0...v0.5.1) (2021-09-24) +## [0.5.1](https://www.github.com/googleapis/python-video-transcoder/compare/v0.5.0...v0.5.1) (2021-09-24) ### Bug Fixes @@ -98,7 +110,7 @@ * Indicate v1beta1 deprecation ([d862900](https://www.github.com/googleapis/python-video-transcoder/commit/d86290047e9464e4026c264a6dfea51936b21c2c)) * Update proto documentation ([d862900](https://www.github.com/googleapis/python-video-transcoder/commit/d86290047e9464e4026c264a6dfea51936b21c2c)) -### [0.4.1](https://www.github.com/googleapis/python-video-transcoder/compare/v0.4.0...v0.4.1) (2021-07-27) +## [0.4.1](https://www.github.com/googleapis/python-video-transcoder/compare/v0.4.0...v0.4.1) (2021-07-27) ### Bug Fixes @@ -134,7 +146,7 @@ * omit mention of Python 2.7 in 'CONTRIBUTING.rst' ([#1127](https://www.github.com/googleapis/python-video-transcoder/issues/1127)) ([#58](https://www.github.com/googleapis/python-video-transcoder/issues/58)) ([1659ce8](https://www.github.com/googleapis/python-video-transcoder/commit/1659ce88ef94139a271be9719a4adaf4e3a600c0)), closes [#1126](https://www.github.com/googleapis/python-video-transcoder/issues/1126) -### [0.3.1](https://www.github.com/googleapis/python-video-transcoder/compare/v0.3.0...v0.3.1) (2021-05-28) +## [0.3.1](https://www.github.com/googleapis/python-video-transcoder/compare/v0.3.0...v0.3.1) (2021-05-28) ### Bug Fixes @@ -148,7 +160,7 @@ * add `from_service_account_info` ([#32](https://www.github.com/googleapis/python-video-transcoder/issues/32)) ([4076914](https://www.github.com/googleapis/python-video-transcoder/commit/4076914adfde514417b5a39a0e5fcd905e5f6e8f)) -### [0.2.1](https://www.github.com/googleapis/python-video-transcoder/compare/v0.2.0...v0.2.1) (2021-02-12) +## [0.2.1](https://www.github.com/googleapis/python-video-transcoder/compare/v0.2.0...v0.2.1) (2021-02-12) ### Bug Fixes diff --git a/docs/conf.py b/docs/conf.py index 16a74d8..05a11fc 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -24,9 +24,9 @@ # All configuration values have a default; values that are commented out # serve to show the default. -import sys import os import shlex +import sys # If extensions (or modules to document with autodoc) are in another directory, # add these directories to sys.path here. If the directory is relative to the @@ -361,7 +361,10 @@ intersphinx_mapping = { "python": ("https://python.readthedocs.org/en/latest/", None), "google-auth": ("https://googleapis.dev/python/google-auth/latest/", None), - "google.api_core": ("https://googleapis.dev/python/google-api-core/latest/", None,), + "google.api_core": ( + "https://googleapis.dev/python/google-api-core/latest/", + None, + ), "grpc": ("https://grpc.github.io/grpc/python/", None), "proto-plus": ("https://proto-plus-python.readthedocs.io/en/latest/", None), "protobuf": ("https://googleapis.dev/python/protobuf/latest/", None), diff --git a/google/cloud/video/transcoder/__init__.py b/google/cloud/video/transcoder/__init__.py index 5c74336..39af9a1 100644 --- a/google/cloud/video/transcoder/__init__.py +++ b/google/cloud/video/transcoder/__init__.py @@ -14,41 +14,44 @@ # limitations under the License. # +from google.cloud.video.transcoder_v1.services.transcoder_service.async_client import ( + TranscoderServiceAsyncClient, +) from google.cloud.video.transcoder_v1.services.transcoder_service.client import ( TranscoderServiceClient, ) -from google.cloud.video.transcoder_v1.services.transcoder_service.async_client import ( - TranscoderServiceAsyncClient, +from google.cloud.video.transcoder_v1.types.resources import ( + AdBreak, + AudioStream, + EditAtom, + ElementaryStream, + Input, + Job, + JobConfig, + JobTemplate, + Manifest, + MuxStream, + Output, + Overlay, + PreprocessingConfig, + PubsubDestination, + SegmentSettings, + SpriteSheet, + TextStream, + VideoStream, +) +from google.cloud.video.transcoder_v1.types.services import ( + CreateJobRequest, + CreateJobTemplateRequest, + DeleteJobRequest, + DeleteJobTemplateRequest, + GetJobRequest, + GetJobTemplateRequest, + ListJobsRequest, + ListJobsResponse, + ListJobTemplatesRequest, + ListJobTemplatesResponse, ) - -from google.cloud.video.transcoder_v1.types.resources import AdBreak -from google.cloud.video.transcoder_v1.types.resources import AudioStream -from google.cloud.video.transcoder_v1.types.resources import EditAtom -from google.cloud.video.transcoder_v1.types.resources import ElementaryStream -from google.cloud.video.transcoder_v1.types.resources import Input -from google.cloud.video.transcoder_v1.types.resources import Job -from google.cloud.video.transcoder_v1.types.resources import JobConfig -from google.cloud.video.transcoder_v1.types.resources import JobTemplate -from google.cloud.video.transcoder_v1.types.resources import Manifest -from google.cloud.video.transcoder_v1.types.resources import MuxStream -from google.cloud.video.transcoder_v1.types.resources import Output -from google.cloud.video.transcoder_v1.types.resources import Overlay -from google.cloud.video.transcoder_v1.types.resources import PreprocessingConfig -from google.cloud.video.transcoder_v1.types.resources import PubsubDestination -from google.cloud.video.transcoder_v1.types.resources import SegmentSettings -from google.cloud.video.transcoder_v1.types.resources import SpriteSheet -from google.cloud.video.transcoder_v1.types.resources import TextStream -from google.cloud.video.transcoder_v1.types.resources import VideoStream -from google.cloud.video.transcoder_v1.types.services import CreateJobRequest -from google.cloud.video.transcoder_v1.types.services import CreateJobTemplateRequest -from google.cloud.video.transcoder_v1.types.services import DeleteJobRequest -from google.cloud.video.transcoder_v1.types.services import DeleteJobTemplateRequest -from google.cloud.video.transcoder_v1.types.services import GetJobRequest -from google.cloud.video.transcoder_v1.types.services import GetJobTemplateRequest -from google.cloud.video.transcoder_v1.types.services import ListJobsRequest -from google.cloud.video.transcoder_v1.types.services import ListJobsResponse -from google.cloud.video.transcoder_v1.types.services import ListJobTemplatesRequest -from google.cloud.video.transcoder_v1.types.services import ListJobTemplatesResponse __all__ = ( "TranscoderServiceClient", diff --git a/google/cloud/video/transcoder_v1/__init__.py b/google/cloud/video/transcoder_v1/__init__.py index 69f350a..34fdac3 100644 --- a/google/cloud/video/transcoder_v1/__init__.py +++ b/google/cloud/video/transcoder_v1/__init__.py @@ -14,37 +14,42 @@ # limitations under the License. # -from .services.transcoder_service import TranscoderServiceClient -from .services.transcoder_service import TranscoderServiceAsyncClient - -from .types.resources import AdBreak -from .types.resources import AudioStream -from .types.resources import EditAtom -from .types.resources import ElementaryStream -from .types.resources import Input -from .types.resources import Job -from .types.resources import JobConfig -from .types.resources import JobTemplate -from .types.resources import Manifest -from .types.resources import MuxStream -from .types.resources import Output -from .types.resources import Overlay -from .types.resources import PreprocessingConfig -from .types.resources import PubsubDestination -from .types.resources import SegmentSettings -from .types.resources import SpriteSheet -from .types.resources import TextStream -from .types.resources import VideoStream -from .types.services import CreateJobRequest -from .types.services import CreateJobTemplateRequest -from .types.services import DeleteJobRequest -from .types.services import DeleteJobTemplateRequest -from .types.services import GetJobRequest -from .types.services import GetJobTemplateRequest -from .types.services import ListJobsRequest -from .types.services import ListJobsResponse -from .types.services import ListJobTemplatesRequest -from .types.services import ListJobTemplatesResponse +from .services.transcoder_service import ( + TranscoderServiceAsyncClient, + TranscoderServiceClient, +) +from .types.resources import ( + AdBreak, + AudioStream, + EditAtom, + ElementaryStream, + Input, + Job, + JobConfig, + JobTemplate, + Manifest, + MuxStream, + Output, + Overlay, + PreprocessingConfig, + PubsubDestination, + SegmentSettings, + SpriteSheet, + TextStream, + VideoStream, +) +from .types.services import ( + CreateJobRequest, + CreateJobTemplateRequest, + DeleteJobRequest, + DeleteJobTemplateRequest, + GetJobRequest, + GetJobTemplateRequest, + ListJobsRequest, + ListJobsResponse, + ListJobTemplatesRequest, + ListJobTemplatesResponse, +) __all__ = ( "TranscoderServiceAsyncClient", diff --git a/google/cloud/video/transcoder_v1/services/transcoder_service/__init__.py b/google/cloud/video/transcoder_v1/services/transcoder_service/__init__.py index e133525..af4fc08 100644 --- a/google/cloud/video/transcoder_v1/services/transcoder_service/__init__.py +++ b/google/cloud/video/transcoder_v1/services/transcoder_service/__init__.py @@ -13,8 +13,8 @@ # See the License for the specific language governing permissions and # limitations under the License. # -from .client import TranscoderServiceClient from .async_client import TranscoderServiceAsyncClient +from .client import TranscoderServiceClient __all__ = ( "TranscoderServiceClient", diff --git a/google/cloud/video/transcoder_v1/services/transcoder_service/async_client.py b/google/cloud/video/transcoder_v1/services/transcoder_service/async_client.py index 5af26c3..e7e31d8 100644 --- a/google/cloud/video/transcoder_v1/services/transcoder_service/async_client.py +++ b/google/cloud/video/transcoder_v1/services/transcoder_service/async_client.py @@ -16,29 +16,30 @@ from collections import OrderedDict import functools import re -from typing import Dict, Optional, Sequence, Tuple, Type, Union -import pkg_resources +from typing import Dict, Mapping, Optional, Sequence, Tuple, Type, Union -from google.api_core.client_options import ClientOptions from google.api_core import exceptions as core_exceptions from google.api_core import gapic_v1 from google.api_core import retry as retries +from google.api_core.client_options import ClientOptions from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore +import pkg_resources try: OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] except AttributeError: # pragma: NO COVER OptionalRetry = Union[retries.Retry, object] # type: ignore -from google.cloud.video.transcoder_v1.services.transcoder_service import pagers -from google.cloud.video.transcoder_v1.types import resources -from google.cloud.video.transcoder_v1.types import services from google.protobuf import timestamp_pb2 # type: ignore from google.rpc import status_pb2 # type: ignore -from .transports.base import TranscoderServiceTransport, DEFAULT_CLIENT_INFO -from .transports.grpc_asyncio import TranscoderServiceGrpcAsyncIOTransport + +from google.cloud.video.transcoder_v1.services.transcoder_service import pagers +from google.cloud.video.transcoder_v1.types import resources, services + from .client import TranscoderServiceClient +from .transports.base import DEFAULT_CLIENT_INFO, TranscoderServiceTransport +from .transports.grpc_asyncio import TranscoderServiceGrpcAsyncIOTransport class TranscoderServiceAsyncClient: @@ -232,9 +233,9 @@ async def create_job( from google.cloud.video import transcoder_v1 - def sample_create_job(): + async def sample_create_job(): # Create a client - client = transcoder_v1.TranscoderServiceClient() + client = transcoder_v1.TranscoderServiceAsyncClient() # Initialize request argument(s) job = transcoder_v1.Job() @@ -246,7 +247,7 @@ def sample_create_job(): ) # Make the request - response = client.create_job(request=request) + response = await client.create_job(request=request) # Handle the response print(response) @@ -313,7 +314,12 @@ def sample_create_job(): ) # Send the request. - response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -333,9 +339,9 @@ async def list_jobs( from google.cloud.video import transcoder_v1 - def sample_list_jobs(): + async def sample_list_jobs(): # Create a client - client = transcoder_v1.TranscoderServiceClient() + client = transcoder_v1.TranscoderServiceAsyncClient() # Initialize request argument(s) request = transcoder_v1.ListJobsRequest( @@ -346,7 +352,7 @@ def sample_list_jobs(): page_result = client.list_jobs(request=request) # Handle the response - for response in page_result: + async for response in page_result: print(response) Args: @@ -407,12 +413,20 @@ def sample_list_jobs(): ) # Send the request. - response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # This method is paged; wrap the response in a pager, which provides # an `__aiter__` convenience method. response = pagers.ListJobsAsyncPager( - method=rpc, request=request, response=response, metadata=metadata, + method=rpc, + request=request, + response=response, + metadata=metadata, ) # Done; return the response. @@ -433,9 +447,9 @@ async def get_job( from google.cloud.video import transcoder_v1 - def sample_get_job(): + async def sample_get_job(): # Create a client - client = transcoder_v1.TranscoderServiceClient() + client = transcoder_v1.TranscoderServiceAsyncClient() # Initialize request argument(s) request = transcoder_v1.GetJobRequest( @@ -443,7 +457,7 @@ def sample_get_job(): ) # Make the request - response = client.get_job(request=request) + response = await client.get_job(request=request) # Handle the response print(response) @@ -501,7 +515,12 @@ def sample_get_job(): ) # Send the request. - response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -521,9 +540,9 @@ async def delete_job( from google.cloud.video import transcoder_v1 - def sample_delete_job(): + async def sample_delete_job(): # Create a client - client = transcoder_v1.TranscoderServiceClient() + client = transcoder_v1.TranscoderServiceAsyncClient() # Initialize request argument(s) request = transcoder_v1.DeleteJobRequest( @@ -531,7 +550,7 @@ def sample_delete_job(): ) # Make the request - client.delete_job(request=request) + await client.delete_job(request=request) Args: request (Union[google.cloud.video.transcoder_v1.types.DeleteJobRequest, dict]): @@ -583,7 +602,10 @@ def sample_delete_job(): # Send the request. await rpc( - request, retry=retry, timeout=timeout, metadata=metadata, + request, + retry=retry, + timeout=timeout, + metadata=metadata, ) async def create_job_template( @@ -603,9 +625,9 @@ async def create_job_template( from google.cloud.video import transcoder_v1 - def sample_create_job_template(): + async def sample_create_job_template(): # Create a client - client = transcoder_v1.TranscoderServiceClient() + client = transcoder_v1.TranscoderServiceAsyncClient() # Initialize request argument(s) request = transcoder_v1.CreateJobTemplateRequest( @@ -614,7 +636,7 @@ def sample_create_job_template(): ) # Make the request - response = client.create_job_template(request=request) + response = await client.create_job_template(request=request) # Handle the response print(response) @@ -696,7 +718,12 @@ def sample_create_job_template(): ) # Send the request. - response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -716,9 +743,9 @@ async def list_job_templates( from google.cloud.video import transcoder_v1 - def sample_list_job_templates(): + async def sample_list_job_templates(): # Create a client - client = transcoder_v1.TranscoderServiceClient() + client = transcoder_v1.TranscoderServiceAsyncClient() # Initialize request argument(s) request = transcoder_v1.ListJobTemplatesRequest( @@ -729,7 +756,7 @@ def sample_list_job_templates(): page_result = client.list_job_templates(request=request) # Handle the response - for response in page_result: + async for response in page_result: print(response) Args: @@ -790,12 +817,20 @@ def sample_list_job_templates(): ) # Send the request. - response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # This method is paged; wrap the response in a pager, which provides # an `__aiter__` convenience method. response = pagers.ListJobTemplatesAsyncPager( - method=rpc, request=request, response=response, metadata=metadata, + method=rpc, + request=request, + response=response, + metadata=metadata, ) # Done; return the response. @@ -816,9 +851,9 @@ async def get_job_template( from google.cloud.video import transcoder_v1 - def sample_get_job_template(): + async def sample_get_job_template(): # Create a client - client = transcoder_v1.TranscoderServiceClient() + client = transcoder_v1.TranscoderServiceAsyncClient() # Initialize request argument(s) request = transcoder_v1.GetJobTemplateRequest( @@ -826,7 +861,7 @@ def sample_get_job_template(): ) # Make the request - response = client.get_job_template(request=request) + response = await client.get_job_template(request=request) # Handle the response print(response) @@ -885,7 +920,12 @@ def sample_get_job_template(): ) # Send the request. - response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = await rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -905,9 +945,9 @@ async def delete_job_template( from google.cloud.video import transcoder_v1 - def sample_delete_job_template(): + async def sample_delete_job_template(): # Create a client - client = transcoder_v1.TranscoderServiceClient() + client = transcoder_v1.TranscoderServiceAsyncClient() # Initialize request argument(s) request = transcoder_v1.DeleteJobTemplateRequest( @@ -915,7 +955,7 @@ def sample_delete_job_template(): ) # Make the request - client.delete_job_template(request=request) + await client.delete_job_template(request=request) Args: request (Union[google.cloud.video.transcoder_v1.types.DeleteJobTemplateRequest, dict]): @@ -967,7 +1007,10 @@ def sample_delete_job_template(): # Send the request. await rpc( - request, retry=retry, timeout=timeout, metadata=metadata, + request, + retry=retry, + timeout=timeout, + metadata=metadata, ) async def __aenter__(self): diff --git a/google/cloud/video/transcoder_v1/services/transcoder_service/client.py b/google/cloud/video/transcoder_v1/services/transcoder_service/client.py index c595163..854f5a2 100644 --- a/google/cloud/video/transcoder_v1/services/transcoder_service/client.py +++ b/google/cloud/video/transcoder_v1/services/transcoder_service/client.py @@ -16,30 +16,31 @@ from collections import OrderedDict import os import re -from typing import Dict, Optional, Sequence, Tuple, Type, Union -import pkg_resources +from typing import Dict, Mapping, Optional, Sequence, Tuple, Type, Union from google.api_core import client_options as client_options_lib from google.api_core import exceptions as core_exceptions from google.api_core import gapic_v1 from google.api_core import retry as retries from google.auth import credentials as ga_credentials # type: ignore +from google.auth.exceptions import MutualTLSChannelError # type: ignore from google.auth.transport import mtls # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore -from google.auth.exceptions import MutualTLSChannelError # type: ignore from google.oauth2 import service_account # type: ignore +import pkg_resources try: OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault] except AttributeError: # pragma: NO COVER OptionalRetry = Union[retries.Retry, object] # type: ignore -from google.cloud.video.transcoder_v1.services.transcoder_service import pagers -from google.cloud.video.transcoder_v1.types import resources -from google.cloud.video.transcoder_v1.types import services from google.protobuf import timestamp_pb2 # type: ignore from google.rpc import status_pb2 # type: ignore -from .transports.base import TranscoderServiceTransport, DEFAULT_CLIENT_INFO + +from google.cloud.video.transcoder_v1.services.transcoder_service import pagers +from google.cloud.video.transcoder_v1.types import resources, services + +from .transports.base import DEFAULT_CLIENT_INFO, TranscoderServiceTransport from .transports.grpc import TranscoderServiceGrpcTransport from .transports.grpc_asyncio import TranscoderServiceGrpcAsyncIOTransport @@ -59,7 +60,8 @@ class TranscoderServiceClientMeta(type): _transport_registry["grpc_asyncio"] = TranscoderServiceGrpcAsyncIOTransport def get_transport_class( - cls, label: str = None, + cls, + label: str = None, ) -> Type[TranscoderServiceTransport]: """Returns an appropriate transport class. @@ -172,10 +174,16 @@ def transport(self) -> TranscoderServiceTransport: return self._transport @staticmethod - def job_path(project: str, location: str, job: str,) -> str: + def job_path( + project: str, + location: str, + job: str, + ) -> str: """Returns a fully-qualified job string.""" return "projects/{project}/locations/{location}/jobs/{job}".format( - project=project, location=location, job=job, + project=project, + location=location, + job=job, ) @staticmethod @@ -188,10 +196,16 @@ def parse_job_path(path: str) -> Dict[str, str]: return m.groupdict() if m else {} @staticmethod - def job_template_path(project: str, location: str, job_template: str,) -> str: + def job_template_path( + project: str, + location: str, + job_template: str, + ) -> str: """Returns a fully-qualified job_template string.""" return "projects/{project}/locations/{location}/jobTemplates/{job_template}".format( - project=project, location=location, job_template=job_template, + project=project, + location=location, + job_template=job_template, ) @staticmethod @@ -204,7 +218,9 @@ def parse_job_template_path(path: str) -> Dict[str, str]: return m.groupdict() if m else {} @staticmethod - def common_billing_account_path(billing_account: str,) -> str: + def common_billing_account_path( + billing_account: str, + ) -> str: """Returns a fully-qualified billing_account string.""" return "billingAccounts/{billing_account}".format( billing_account=billing_account, @@ -217,9 +233,13 @@ def parse_common_billing_account_path(path: str) -> Dict[str, str]: return m.groupdict() if m else {} @staticmethod - def common_folder_path(folder: str,) -> str: + def common_folder_path( + folder: str, + ) -> str: """Returns a fully-qualified folder string.""" - return "folders/{folder}".format(folder=folder,) + return "folders/{folder}".format( + folder=folder, + ) @staticmethod def parse_common_folder_path(path: str) -> Dict[str, str]: @@ -228,9 +248,13 @@ def parse_common_folder_path(path: str) -> Dict[str, str]: return m.groupdict() if m else {} @staticmethod - def common_organization_path(organization: str,) -> str: + def common_organization_path( + organization: str, + ) -> str: """Returns a fully-qualified organization string.""" - return "organizations/{organization}".format(organization=organization,) + return "organizations/{organization}".format( + organization=organization, + ) @staticmethod def parse_common_organization_path(path: str) -> Dict[str, str]: @@ -239,9 +263,13 @@ def parse_common_organization_path(path: str) -> Dict[str, str]: return m.groupdict() if m else {} @staticmethod - def common_project_path(project: str,) -> str: + def common_project_path( + project: str, + ) -> str: """Returns a fully-qualified project string.""" - return "projects/{project}".format(project=project,) + return "projects/{project}".format( + project=project, + ) @staticmethod def parse_common_project_path(path: str) -> Dict[str, str]: @@ -250,10 +278,14 @@ def parse_common_project_path(path: str) -> Dict[str, str]: return m.groupdict() if m else {} @staticmethod - def common_location_path(project: str, location: str,) -> str: + def common_location_path( + project: str, + location: str, + ) -> str: """Returns a fully-qualified location string.""" return "projects/{project}/locations/{location}".format( - project=project, location=location, + project=project, + location=location, ) @staticmethod @@ -524,7 +556,12 @@ def sample_create_job(): ) # Send the request. - response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -618,12 +655,20 @@ def sample_list_jobs(): ) # Send the request. - response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # This method is paged; wrap the response in a pager, which provides # an `__iter__` convenience method. response = pagers.ListJobsPager( - method=rpc, request=request, response=response, metadata=metadata, + method=rpc, + request=request, + response=response, + metadata=metadata, ) # Done; return the response. @@ -712,7 +757,12 @@ def sample_get_job(): ) # Send the request. - response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -794,7 +844,10 @@ def sample_delete_job(): # Send the request. rpc( - request, retry=retry, timeout=timeout, metadata=metadata, + request, + retry=retry, + timeout=timeout, + metadata=metadata, ) def create_job_template( @@ -907,7 +960,12 @@ def sample_create_job_template(): ) # Send the request. - response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -1001,12 +1059,20 @@ def sample_list_job_templates(): ) # Send the request. - response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # This method is paged; wrap the response in a pager, which provides # an `__iter__` convenience method. response = pagers.ListJobTemplatesPager( - method=rpc, request=request, response=response, metadata=metadata, + method=rpc, + request=request, + response=response, + metadata=metadata, ) # Done; return the response. @@ -1096,7 +1162,12 @@ def sample_get_job_template(): ) # Send the request. - response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,) + response = rpc( + request, + retry=retry, + timeout=timeout, + metadata=metadata, + ) # Done; return the response. return response @@ -1178,7 +1249,10 @@ def sample_delete_job_template(): # Send the request. rpc( - request, retry=retry, timeout=timeout, metadata=metadata, + request, + retry=retry, + timeout=timeout, + metadata=metadata, ) def __enter__(self): diff --git a/google/cloud/video/transcoder_v1/services/transcoder_service/pagers.py b/google/cloud/video/transcoder_v1/services/transcoder_service/pagers.py index 8877493..10a728f 100644 --- a/google/cloud/video/transcoder_v1/services/transcoder_service/pagers.py +++ b/google/cloud/video/transcoder_v1/services/transcoder_service/pagers.py @@ -18,14 +18,13 @@ AsyncIterator, Awaitable, Callable, + Iterator, + Optional, Sequence, Tuple, - Optional, - Iterator, ) -from google.cloud.video.transcoder_v1.types import resources -from google.cloud.video.transcoder_v1.types import services +from google.cloud.video.transcoder_v1.types import resources, services class ListJobsPager: diff --git a/google/cloud/video/transcoder_v1/services/transcoder_service/transports/__init__.py b/google/cloud/video/transcoder_v1/services/transcoder_service/transports/__init__.py index f382a59..cc28a5c 100644 --- a/google/cloud/video/transcoder_v1/services/transcoder_service/transports/__init__.py +++ b/google/cloud/video/transcoder_v1/services/transcoder_service/transports/__init__.py @@ -20,7 +20,6 @@ from .grpc import TranscoderServiceGrpcTransport from .grpc_asyncio import TranscoderServiceGrpcAsyncIOTransport - # Compile a registry of transports. _transport_registry = OrderedDict() # type: Dict[str, Type[TranscoderServiceTransport]] _transport_registry["grpc"] = TranscoderServiceGrpcTransport diff --git a/google/cloud/video/transcoder_v1/services/transcoder_service/transports/base.py b/google/cloud/video/transcoder_v1/services/transcoder_service/transports/base.py index 1c24a9f..f30a98d 100644 --- a/google/cloud/video/transcoder_v1/services/transcoder_service/transports/base.py +++ b/google/cloud/video/transcoder_v1/services/transcoder_service/transports/base.py @@ -15,19 +15,18 @@ # import abc from typing import Awaitable, Callable, Dict, Optional, Sequence, Union -import pkg_resources -import google.auth # type: ignore import google.api_core from google.api_core import exceptions as core_exceptions from google.api_core import gapic_v1 from google.api_core import retry as retries +import google.auth # type: ignore from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore - -from google.cloud.video.transcoder_v1.types import resources -from google.cloud.video.transcoder_v1.types import services from google.protobuf import empty_pb2 # type: ignore +import pkg_resources + +from google.cloud.video.transcoder_v1.types import resources, services try: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( @@ -82,6 +81,7 @@ def __init__( always_use_jwt_access (Optional[bool]): Whether self signed JWT should be used for service account credentials. """ + # Save the hostname. Default to port 443 (HTTPS) if none is specified. if ":" not in host: host += ":443" @@ -123,37 +123,53 @@ def _prep_wrapped_messages(self, client_info): # Precompute the wrapped methods. self._wrapped_methods = { self.create_job: gapic_v1.method.wrap_method( - self.create_job, default_timeout=60.0, client_info=client_info, + self.create_job, + default_timeout=60.0, + client_info=client_info, ), self.list_jobs: gapic_v1.method.wrap_method( - self.list_jobs, default_timeout=60.0, client_info=client_info, + self.list_jobs, + default_timeout=60.0, + client_info=client_info, ), self.get_job: gapic_v1.method.wrap_method( - self.get_job, default_timeout=60.0, client_info=client_info, + self.get_job, + default_timeout=60.0, + client_info=client_info, ), self.delete_job: gapic_v1.method.wrap_method( - self.delete_job, default_timeout=60.0, client_info=client_info, + self.delete_job, + default_timeout=60.0, + client_info=client_info, ), self.create_job_template: gapic_v1.method.wrap_method( - self.create_job_template, default_timeout=60.0, client_info=client_info, + self.create_job_template, + default_timeout=60.0, + client_info=client_info, ), self.list_job_templates: gapic_v1.method.wrap_method( - self.list_job_templates, default_timeout=60.0, client_info=client_info, + self.list_job_templates, + default_timeout=60.0, + client_info=client_info, ), self.get_job_template: gapic_v1.method.wrap_method( - self.get_job_template, default_timeout=60.0, client_info=client_info, + self.get_job_template, + default_timeout=60.0, + client_info=client_info, ), self.delete_job_template: gapic_v1.method.wrap_method( - self.delete_job_template, default_timeout=60.0, client_info=client_info, + self.delete_job_template, + default_timeout=60.0, + client_info=client_info, ), } def close(self): """Closes resources associated with the transport. - .. warning:: - Only call this method if the transport is NOT shared - with other clients - this may cause errors in other clients! + .. warning:: + Only call this method if the transport is NOT shared + with other clients - this may cause errors in other clients! """ raise NotImplementedError() @@ -229,5 +245,9 @@ def delete_job_template( ]: raise NotImplementedError() + @property + def kind(self) -> str: + raise NotImplementedError() + __all__ = ("TranscoderServiceTransport",) diff --git a/google/cloud/video/transcoder_v1/services/transcoder_service/transports/grpc.py b/google/cloud/video/transcoder_v1/services/transcoder_service/transports/grpc.py index 6bdeddf..85037d0 100644 --- a/google/cloud/video/transcoder_v1/services/transcoder_service/transports/grpc.py +++ b/google/cloud/video/transcoder_v1/services/transcoder_service/transports/grpc.py @@ -13,21 +13,19 @@ # See the License for the specific language governing permissions and # limitations under the License. # -import warnings from typing import Callable, Dict, Optional, Sequence, Tuple, Union +import warnings -from google.api_core import grpc_helpers -from google.api_core import gapic_v1 +from google.api_core import gapic_v1, grpc_helpers import google.auth # type: ignore from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore - +from google.protobuf import empty_pb2 # type: ignore import grpc # type: ignore -from google.cloud.video.transcoder_v1.types import resources -from google.cloud.video.transcoder_v1.types import services -from google.protobuf import empty_pb2 # type: ignore -from .base import TranscoderServiceTransport, DEFAULT_CLIENT_INFO +from google.cloud.video.transcoder_v1.types import resources, services + +from .base import DEFAULT_CLIENT_INFO, TranscoderServiceTransport class TranscoderServiceGrpcTransport(TranscoderServiceTransport): @@ -232,8 +230,7 @@ def create_channel( @property def grpc_channel(self) -> grpc.Channel: - """Return the channel designed to connect to this service. - """ + """Return the channel designed to connect to this service.""" return self._grpc_channel @property @@ -443,5 +440,9 @@ def delete_job_template( def close(self): self.grpc_channel.close() + @property + def kind(self) -> str: + return "grpc" + __all__ = ("TranscoderServiceGrpcTransport",) diff --git a/google/cloud/video/transcoder_v1/services/transcoder_service/transports/grpc_asyncio.py b/google/cloud/video/transcoder_v1/services/transcoder_service/transports/grpc_asyncio.py index e68807f..9522b90 100644 --- a/google/cloud/video/transcoder_v1/services/transcoder_service/transports/grpc_asyncio.py +++ b/google/cloud/video/transcoder_v1/services/transcoder_service/transports/grpc_asyncio.py @@ -13,21 +13,19 @@ # See the License for the specific language governing permissions and # limitations under the License. # -import warnings from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple, Union +import warnings -from google.api_core import gapic_v1 -from google.api_core import grpc_helpers_async +from google.api_core import gapic_v1, grpc_helpers_async from google.auth import credentials as ga_credentials # type: ignore from google.auth.transport.grpc import SslCredentials # type: ignore - +from google.protobuf import empty_pb2 # type: ignore import grpc # type: ignore from grpc.experimental import aio # type: ignore -from google.cloud.video.transcoder_v1.types import resources -from google.cloud.video.transcoder_v1.types import services -from google.protobuf import empty_pb2 # type: ignore -from .base import TranscoderServiceTransport, DEFAULT_CLIENT_INFO +from google.cloud.video.transcoder_v1.types import resources, services + +from .base import DEFAULT_CLIENT_INFO, TranscoderServiceTransport from .grpc import TranscoderServiceGrpcTransport diff --git a/google/cloud/video/transcoder_v1/types/resources.py b/google/cloud/video/transcoder_v1/types/resources.py index 5d3e00e..94ea294 100644 --- a/google/cloud/video/transcoder_v1/types/resources.py +++ b/google/cloud/video/transcoder_v1/types/resources.py @@ -13,12 +13,10 @@ # See the License for the specific language governing permissions and # limitations under the License. # -import proto # type: ignore - from google.protobuf import duration_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore from google.rpc import status_pb2 # type: ignore - +import proto # type: ignore __protobuf__ = proto.module( package="google.cloud.video.transcoder.v1", @@ -116,21 +114,58 @@ class ProcessingState(proto.Enum): SUCCEEDED = 3 FAILED = 4 - name = proto.Field(proto.STRING, number=1,) - input_uri = proto.Field(proto.STRING, number=2,) - output_uri = proto.Field(proto.STRING, number=3,) - template_id = proto.Field(proto.STRING, number=4, oneof="job_config",) + name = proto.Field( + proto.STRING, + number=1, + ) + input_uri = proto.Field( + proto.STRING, + number=2, + ) + output_uri = proto.Field( + proto.STRING, + number=3, + ) + template_id = proto.Field( + proto.STRING, + number=4, + oneof="job_config", + ) config = proto.Field( - proto.MESSAGE, number=5, oneof="job_config", message="JobConfig", + proto.MESSAGE, + number=5, + oneof="job_config", + message="JobConfig", + ) + state = proto.Field( + proto.ENUM, + number=8, + enum=ProcessingState, ) - state = proto.Field(proto.ENUM, number=8, enum=ProcessingState,) create_time = proto.Field( - proto.MESSAGE, number=12, message=timestamp_pb2.Timestamp, + proto.MESSAGE, + number=12, + message=timestamp_pb2.Timestamp, + ) + start_time = proto.Field( + proto.MESSAGE, + number=13, + message=timestamp_pb2.Timestamp, + ) + end_time = proto.Field( + proto.MESSAGE, + number=14, + message=timestamp_pb2.Timestamp, + ) + ttl_after_completion_days = proto.Field( + proto.INT32, + number=15, + ) + error = proto.Field( + proto.MESSAGE, + number=17, + message=status_pb2.Status, ) - start_time = proto.Field(proto.MESSAGE, number=13, message=timestamp_pb2.Timestamp,) - end_time = proto.Field(proto.MESSAGE, number=14, message=timestamp_pb2.Timestamp,) - ttl_after_completion_days = proto.Field(proto.INT32, number=15,) - error = proto.Field(proto.MESSAGE, number=17, message=status_pb2.Status,) class JobTemplate(proto.Message): @@ -144,8 +179,15 @@ class JobTemplate(proto.Message): The configuration for this template. """ - name = proto.Field(proto.STRING, number=1,) - config = proto.Field(proto.MESSAGE, number=2, message="JobConfig",) + name = proto.Field( + proto.STRING, + number=1, + ) + config = proto.Field( + proto.MESSAGE, + number=2, + message="JobConfig", + ) class JobConfig(proto.Message): @@ -178,20 +220,56 @@ class JobConfig(proto.Message): descending Z-order. """ - inputs = proto.RepeatedField(proto.MESSAGE, number=1, message="Input",) - edit_list = proto.RepeatedField(proto.MESSAGE, number=2, message="EditAtom",) + inputs = proto.RepeatedField( + proto.MESSAGE, + number=1, + message="Input", + ) + edit_list = proto.RepeatedField( + proto.MESSAGE, + number=2, + message="EditAtom", + ) elementary_streams = proto.RepeatedField( - proto.MESSAGE, number=3, message="ElementaryStream", + proto.MESSAGE, + number=3, + message="ElementaryStream", + ) + mux_streams = proto.RepeatedField( + proto.MESSAGE, + number=4, + message="MuxStream", + ) + manifests = proto.RepeatedField( + proto.MESSAGE, + number=5, + message="Manifest", + ) + output = proto.Field( + proto.MESSAGE, + number=6, + message="Output", + ) + ad_breaks = proto.RepeatedField( + proto.MESSAGE, + number=7, + message="AdBreak", ) - mux_streams = proto.RepeatedField(proto.MESSAGE, number=4, message="MuxStream",) - manifests = proto.RepeatedField(proto.MESSAGE, number=5, message="Manifest",) - output = proto.Field(proto.MESSAGE, number=6, message="Output",) - ad_breaks = proto.RepeatedField(proto.MESSAGE, number=7, message="AdBreak",) pubsub_destination = proto.Field( - proto.MESSAGE, number=8, message="PubsubDestination", + proto.MESSAGE, + number=8, + message="PubsubDestination", + ) + sprite_sheets = proto.RepeatedField( + proto.MESSAGE, + number=9, + message="SpriteSheet", + ) + overlays = proto.RepeatedField( + proto.MESSAGE, + number=10, + message="Overlay", ) - sprite_sheets = proto.RepeatedField(proto.MESSAGE, number=9, message="SpriteSheet",) - overlays = proto.RepeatedField(proto.MESSAGE, number=10, message="Overlay",) class Input(proto.Message): @@ -211,10 +289,18 @@ class Input(proto.Message): Preprocessing configurations. """ - key = proto.Field(proto.STRING, number=1,) - uri = proto.Field(proto.STRING, number=2,) + key = proto.Field( + proto.STRING, + number=1, + ) + uri = proto.Field( + proto.STRING, + number=2, + ) preprocessing_config = proto.Field( - proto.MESSAGE, number=3, message="PreprocessingConfig", + proto.MESSAGE, + number=3, + message="PreprocessingConfig", ) @@ -228,7 +314,10 @@ class Output(proto.Message): from ``Job.output_uri``. """ - uri = proto.Field(proto.STRING, number=1,) + uri = proto.Field( + proto.STRING, + number=1, + ) class EditAtom(proto.Message): @@ -251,13 +340,23 @@ class EditAtom(proto.Message): file timeline. The default is ``0s``. """ - key = proto.Field(proto.STRING, number=1,) - inputs = proto.RepeatedField(proto.STRING, number=2,) + key = proto.Field( + proto.STRING, + number=1, + ) + inputs = proto.RepeatedField( + proto.STRING, + number=2, + ) end_time_offset = proto.Field( - proto.MESSAGE, number=3, message=duration_pb2.Duration, + proto.MESSAGE, + number=3, + message=duration_pb2.Duration, ) start_time_offset = proto.Field( - proto.MESSAGE, number=4, message=duration_pb2.Duration, + proto.MESSAGE, + number=4, + message=duration_pb2.Duration, ) @@ -271,7 +370,9 @@ class AdBreak(proto.Message): """ start_time_offset = proto.Field( - proto.MESSAGE, number=1, message=duration_pb2.Duration, + proto.MESSAGE, + number=1, + message=duration_pb2.Duration, ) @@ -305,15 +406,27 @@ class ElementaryStream(proto.Message): This field is a member of `oneof`_ ``elementary_stream``. """ - key = proto.Field(proto.STRING, number=4,) + key = proto.Field( + proto.STRING, + number=4, + ) video_stream = proto.Field( - proto.MESSAGE, number=1, oneof="elementary_stream", message="VideoStream", + proto.MESSAGE, + number=1, + oneof="elementary_stream", + message="VideoStream", ) audio_stream = proto.Field( - proto.MESSAGE, number=2, oneof="elementary_stream", message="AudioStream", + proto.MESSAGE, + number=2, + oneof="elementary_stream", + message="AudioStream", ) text_stream = proto.Field( - proto.MESSAGE, number=3, oneof="elementary_stream", message="TextStream", + proto.MESSAGE, + number=3, + oneof="elementary_stream", + message="TextStream", ) @@ -349,11 +462,27 @@ class MuxStream(proto.Message): Segment settings for ``ts``, ``fmp4`` and ``vtt``. """ - key = proto.Field(proto.STRING, number=1,) - file_name = proto.Field(proto.STRING, number=2,) - container = proto.Field(proto.STRING, number=3,) - elementary_streams = proto.RepeatedField(proto.STRING, number=4,) - segment_settings = proto.Field(proto.MESSAGE, number=5, message="SegmentSettings",) + key = proto.Field( + proto.STRING, + number=1, + ) + file_name = proto.Field( + proto.STRING, + number=2, + ) + container = proto.Field( + proto.STRING, + number=3, + ) + elementary_streams = proto.RepeatedField( + proto.STRING, + number=4, + ) + segment_settings = proto.Field( + proto.MESSAGE, + number=5, + message="SegmentSettings", + ) class Manifest(proto.Message): @@ -381,9 +510,19 @@ class ManifestType(proto.Enum): HLS = 1 DASH = 2 - file_name = proto.Field(proto.STRING, number=1,) - type_ = proto.Field(proto.ENUM, number=2, enum=ManifestType,) - mux_streams = proto.RepeatedField(proto.STRING, number=3,) + file_name = proto.Field( + proto.STRING, + number=1, + ) + type_ = proto.Field( + proto.ENUM, + number=2, + enum=ManifestType, + ) + mux_streams = proto.RepeatedField( + proto.STRING, + number=3, + ) class PubsubDestination(proto.Message): @@ -396,7 +535,10 @@ class PubsubDestination(proto.Message): ``projects/{project}/topics/{topic}``. """ - topic = proto.Field(proto.STRING, number=1,) + topic = proto.Field( + proto.STRING, + number=1, + ) class SpriteSheet(proto.Message): @@ -475,26 +617,55 @@ class SpriteSheet(proto.Message): ratio. """ - format_ = proto.Field(proto.STRING, number=1,) - file_prefix = proto.Field(proto.STRING, number=2,) - sprite_width_pixels = proto.Field(proto.INT32, number=3,) - sprite_height_pixels = proto.Field(proto.INT32, number=4,) - column_count = proto.Field(proto.INT32, number=5,) - row_count = proto.Field(proto.INT32, number=6,) + format_ = proto.Field( + proto.STRING, + number=1, + ) + file_prefix = proto.Field( + proto.STRING, + number=2, + ) + sprite_width_pixels = proto.Field( + proto.INT32, + number=3, + ) + sprite_height_pixels = proto.Field( + proto.INT32, + number=4, + ) + column_count = proto.Field( + proto.INT32, + number=5, + ) + row_count = proto.Field( + proto.INT32, + number=6, + ) start_time_offset = proto.Field( - proto.MESSAGE, number=7, message=duration_pb2.Duration, + proto.MESSAGE, + number=7, + message=duration_pb2.Duration, ) end_time_offset = proto.Field( - proto.MESSAGE, number=8, message=duration_pb2.Duration, + proto.MESSAGE, + number=8, + message=duration_pb2.Duration, + ) + total_count = proto.Field( + proto.INT32, + number=9, + oneof="extraction_strategy", ) - total_count = proto.Field(proto.INT32, number=9, oneof="extraction_strategy",) interval = proto.Field( proto.MESSAGE, number=10, oneof="extraction_strategy", message=duration_pb2.Duration, ) - quality = proto.Field(proto.INT32, number=11,) + quality = proto.Field( + proto.INT32, + number=11, + ) class Overlay(proto.Message): @@ -524,8 +695,14 @@ class NormalizedCoordinate(proto.Message): Normalized y coordinate. """ - x = proto.Field(proto.DOUBLE, number=1,) - y = proto.Field(proto.DOUBLE, number=2,) + x = proto.Field( + proto.DOUBLE, + number=1, + ) + y = proto.Field( + proto.DOUBLE, + number=2, + ) class Image(proto.Message): r"""Overlaid jpeg image. @@ -547,11 +724,19 @@ class Image(proto.Message): value greater than ``0.0``. """ - uri = proto.Field(proto.STRING, number=1,) + uri = proto.Field( + proto.STRING, + number=1, + ) resolution = proto.Field( - proto.MESSAGE, number=2, message="Overlay.NormalizedCoordinate", + proto.MESSAGE, + number=2, + message="Overlay.NormalizedCoordinate", + ) + alpha = proto.Field( + proto.DOUBLE, + number=3, ) - alpha = proto.Field(proto.DOUBLE, number=3,) class AnimationStatic(proto.Message): r"""Display static overlay object. @@ -570,10 +755,14 @@ class AnimationStatic(proto.Message): """ xy = proto.Field( - proto.MESSAGE, number=1, message="Overlay.NormalizedCoordinate", + proto.MESSAGE, + number=1, + message="Overlay.NormalizedCoordinate", ) start_time_offset = proto.Field( - proto.MESSAGE, number=2, message=duration_pb2.Duration, + proto.MESSAGE, + number=2, + message=duration_pb2.Duration, ) class AnimationFade(proto.Message): @@ -598,15 +787,25 @@ class AnimationFade(proto.Message): ``start_time_offset`` + 1s """ - fade_type = proto.Field(proto.ENUM, number=1, enum="Overlay.FadeType",) + fade_type = proto.Field( + proto.ENUM, + number=1, + enum="Overlay.FadeType", + ) xy = proto.Field( - proto.MESSAGE, number=2, message="Overlay.NormalizedCoordinate", + proto.MESSAGE, + number=2, + message="Overlay.NormalizedCoordinate", ) start_time_offset = proto.Field( - proto.MESSAGE, number=3, message=duration_pb2.Duration, + proto.MESSAGE, + number=3, + message=duration_pb2.Duration, ) end_time_offset = proto.Field( - proto.MESSAGE, number=4, message=duration_pb2.Duration, + proto.MESSAGE, + number=4, + message=duration_pb2.Duration, ) class AnimationEnd(proto.Message): @@ -621,7 +820,9 @@ class AnimationEnd(proto.Message): """ start_time_offset = proto.Field( - proto.MESSAGE, number=1, message=duration_pb2.Duration, + proto.MESSAGE, + number=1, + message=duration_pb2.Duration, ) class Animation(proto.Message): @@ -668,8 +869,16 @@ class Animation(proto.Message): message="Overlay.AnimationEnd", ) - image = proto.Field(proto.MESSAGE, number=1, message=Image,) - animations = proto.RepeatedField(proto.MESSAGE, number=2, message=Animation,) + image = proto.Field( + proto.MESSAGE, + number=1, + message=Image, + ) + animations = proto.RepeatedField( + proto.MESSAGE, + number=2, + message=Animation, + ) class PreprocessingConfig(proto.Message): @@ -711,9 +920,18 @@ class Color(proto.Message): change. The default is 0. """ - saturation = proto.Field(proto.DOUBLE, number=1,) - contrast = proto.Field(proto.DOUBLE, number=2,) - brightness = proto.Field(proto.DOUBLE, number=3,) + saturation = proto.Field( + proto.DOUBLE, + number=1, + ) + contrast = proto.Field( + proto.DOUBLE, + number=2, + ) + brightness = proto.Field( + proto.DOUBLE, + number=3, + ) class Denoise(proto.Message): r"""Denoise preprocessing configuration. @@ -733,8 +951,14 @@ class Denoise(proto.Message): - ``grain`` """ - strength = proto.Field(proto.DOUBLE, number=1,) - tune = proto.Field(proto.STRING, number=2,) + strength = proto.Field( + proto.DOUBLE, + number=1, + ) + tune = proto.Field( + proto.STRING, + number=2, + ) class Deblock(proto.Message): r"""Deblock preprocessing configuration. @@ -749,8 +973,14 @@ class Deblock(proto.Message): Enable deblocker. The default is ``false``. """ - strength = proto.Field(proto.DOUBLE, number=1,) - enabled = proto.Field(proto.BOOL, number=2,) + strength = proto.Field( + proto.DOUBLE, + number=1, + ) + enabled = proto.Field( + proto.BOOL, + number=2, + ) class Audio(proto.Message): r"""Audio preprocessing configuration. @@ -778,9 +1008,18 @@ class Audio(proto.Message): ``false``. """ - lufs = proto.Field(proto.DOUBLE, number=1,) - high_boost = proto.Field(proto.BOOL, number=2,) - low_boost = proto.Field(proto.BOOL, number=3,) + lufs = proto.Field( + proto.DOUBLE, + number=1, + ) + high_boost = proto.Field( + proto.BOOL, + number=2, + ) + low_boost = proto.Field( + proto.BOOL, + number=3, + ) class Crop(proto.Message): r"""Video cropping configuration for the input video. The cropped @@ -801,10 +1040,22 @@ class Crop(proto.Message): The default is 0. """ - top_pixels = proto.Field(proto.INT32, number=1,) - bottom_pixels = proto.Field(proto.INT32, number=2,) - left_pixels = proto.Field(proto.INT32, number=3,) - right_pixels = proto.Field(proto.INT32, number=4,) + top_pixels = proto.Field( + proto.INT32, + number=1, + ) + bottom_pixels = proto.Field( + proto.INT32, + number=2, + ) + left_pixels = proto.Field( + proto.INT32, + number=3, + ) + right_pixels = proto.Field( + proto.INT32, + number=4, + ) class Pad(proto.Message): r"""Pad filter configuration for the input video. The padded @@ -826,17 +1077,53 @@ class Pad(proto.Message): default is 0. """ - top_pixels = proto.Field(proto.INT32, number=1,) - bottom_pixels = proto.Field(proto.INT32, number=2,) - left_pixels = proto.Field(proto.INT32, number=3,) - right_pixels = proto.Field(proto.INT32, number=4,) + top_pixels = proto.Field( + proto.INT32, + number=1, + ) + bottom_pixels = proto.Field( + proto.INT32, + number=2, + ) + left_pixels = proto.Field( + proto.INT32, + number=3, + ) + right_pixels = proto.Field( + proto.INT32, + number=4, + ) - color = proto.Field(proto.MESSAGE, number=1, message=Color,) - denoise = proto.Field(proto.MESSAGE, number=2, message=Denoise,) - deblock = proto.Field(proto.MESSAGE, number=3, message=Deblock,) - audio = proto.Field(proto.MESSAGE, number=4, message=Audio,) - crop = proto.Field(proto.MESSAGE, number=5, message=Crop,) - pad = proto.Field(proto.MESSAGE, number=6, message=Pad,) + color = proto.Field( + proto.MESSAGE, + number=1, + message=Color, + ) + denoise = proto.Field( + proto.MESSAGE, + number=2, + message=Denoise, + ) + deblock = proto.Field( + proto.MESSAGE, + number=3, + message=Deblock, + ) + audio = proto.Field( + proto.MESSAGE, + number=4, + message=Audio, + ) + crop = proto.Field( + proto.MESSAGE, + number=5, + message=Crop, + ) + pad = proto.Field( + proto.MESSAGE, + number=6, + message=Pad, + ) class VideoStream(proto.Message): @@ -1001,28 +1288,89 @@ class H264CodecSettings(proto.Message): ``H264CodecSettings`` message. """ - width_pixels = proto.Field(proto.INT32, number=1,) - height_pixels = proto.Field(proto.INT32, number=2,) - frame_rate = proto.Field(proto.DOUBLE, number=3,) - bitrate_bps = proto.Field(proto.INT32, number=4,) - pixel_format = proto.Field(proto.STRING, number=5,) - rate_control_mode = proto.Field(proto.STRING, number=6,) - crf_level = proto.Field(proto.INT32, number=7,) - allow_open_gop = proto.Field(proto.BOOL, number=8,) - gop_frame_count = proto.Field(proto.INT32, number=9, oneof="gop_mode",) + width_pixels = proto.Field( + proto.INT32, + number=1, + ) + height_pixels = proto.Field( + proto.INT32, + number=2, + ) + frame_rate = proto.Field( + proto.DOUBLE, + number=3, + ) + bitrate_bps = proto.Field( + proto.INT32, + number=4, + ) + pixel_format = proto.Field( + proto.STRING, + number=5, + ) + rate_control_mode = proto.Field( + proto.STRING, + number=6, + ) + crf_level = proto.Field( + proto.INT32, + number=7, + ) + allow_open_gop = proto.Field( + proto.BOOL, + number=8, + ) + gop_frame_count = proto.Field( + proto.INT32, + number=9, + oneof="gop_mode", + ) gop_duration = proto.Field( - proto.MESSAGE, number=10, oneof="gop_mode", message=duration_pb2.Duration, - ) - enable_two_pass = proto.Field(proto.BOOL, number=11,) - vbv_size_bits = proto.Field(proto.INT32, number=12,) - vbv_fullness_bits = proto.Field(proto.INT32, number=13,) - entropy_coder = proto.Field(proto.STRING, number=14,) - b_pyramid = proto.Field(proto.BOOL, number=15,) - b_frame_count = proto.Field(proto.INT32, number=16,) - aq_strength = proto.Field(proto.DOUBLE, number=17,) - profile = proto.Field(proto.STRING, number=18,) - tune = proto.Field(proto.STRING, number=19,) - preset = proto.Field(proto.STRING, number=20,) + proto.MESSAGE, + number=10, + oneof="gop_mode", + message=duration_pb2.Duration, + ) + enable_two_pass = proto.Field( + proto.BOOL, + number=11, + ) + vbv_size_bits = proto.Field( + proto.INT32, + number=12, + ) + vbv_fullness_bits = proto.Field( + proto.INT32, + number=13, + ) + entropy_coder = proto.Field( + proto.STRING, + number=14, + ) + b_pyramid = proto.Field( + proto.BOOL, + number=15, + ) + b_frame_count = proto.Field( + proto.INT32, + number=16, + ) + aq_strength = proto.Field( + proto.DOUBLE, + number=17, + ) + profile = proto.Field( + proto.STRING, + number=18, + ) + tune = proto.Field( + proto.STRING, + number=19, + ) + preset = proto.Field( + proto.STRING, + number=20, + ) class H265CodecSettings(proto.Message): r"""H265 codec settings. @@ -1174,27 +1522,85 @@ class H265CodecSettings(proto.Message): ``H265CodecSettings`` message. """ - width_pixels = proto.Field(proto.INT32, number=1,) - height_pixels = proto.Field(proto.INT32, number=2,) - frame_rate = proto.Field(proto.DOUBLE, number=3,) - bitrate_bps = proto.Field(proto.INT32, number=4,) - pixel_format = proto.Field(proto.STRING, number=5,) - rate_control_mode = proto.Field(proto.STRING, number=6,) - crf_level = proto.Field(proto.INT32, number=7,) - allow_open_gop = proto.Field(proto.BOOL, number=8,) - gop_frame_count = proto.Field(proto.INT32, number=9, oneof="gop_mode",) + width_pixels = proto.Field( + proto.INT32, + number=1, + ) + height_pixels = proto.Field( + proto.INT32, + number=2, + ) + frame_rate = proto.Field( + proto.DOUBLE, + number=3, + ) + bitrate_bps = proto.Field( + proto.INT32, + number=4, + ) + pixel_format = proto.Field( + proto.STRING, + number=5, + ) + rate_control_mode = proto.Field( + proto.STRING, + number=6, + ) + crf_level = proto.Field( + proto.INT32, + number=7, + ) + allow_open_gop = proto.Field( + proto.BOOL, + number=8, + ) + gop_frame_count = proto.Field( + proto.INT32, + number=9, + oneof="gop_mode", + ) gop_duration = proto.Field( - proto.MESSAGE, number=10, oneof="gop_mode", message=duration_pb2.Duration, - ) - enable_two_pass = proto.Field(proto.BOOL, number=11,) - vbv_size_bits = proto.Field(proto.INT32, number=12,) - vbv_fullness_bits = proto.Field(proto.INT32, number=13,) - b_pyramid = proto.Field(proto.BOOL, number=14,) - b_frame_count = proto.Field(proto.INT32, number=15,) - aq_strength = proto.Field(proto.DOUBLE, number=16,) - profile = proto.Field(proto.STRING, number=17,) - tune = proto.Field(proto.STRING, number=18,) - preset = proto.Field(proto.STRING, number=19,) + proto.MESSAGE, + number=10, + oneof="gop_mode", + message=duration_pb2.Duration, + ) + enable_two_pass = proto.Field( + proto.BOOL, + number=11, + ) + vbv_size_bits = proto.Field( + proto.INT32, + number=12, + ) + vbv_fullness_bits = proto.Field( + proto.INT32, + number=13, + ) + b_pyramid = proto.Field( + proto.BOOL, + number=14, + ) + b_frame_count = proto.Field( + proto.INT32, + number=15, + ) + aq_strength = proto.Field( + proto.DOUBLE, + number=16, + ) + profile = proto.Field( + proto.STRING, + number=17, + ) + tune = proto.Field( + proto.STRING, + number=18, + ) + preset = proto.Field( + proto.STRING, + number=19, + ) class Vp9CodecSettings(proto.Message): r"""VP9 codec settings. @@ -1286,27 +1692,67 @@ class Vp9CodecSettings(proto.Message): ``Vp9CodecSettings`` message. """ - width_pixels = proto.Field(proto.INT32, number=1,) - height_pixels = proto.Field(proto.INT32, number=2,) - frame_rate = proto.Field(proto.DOUBLE, number=3,) - bitrate_bps = proto.Field(proto.INT32, number=4,) - pixel_format = proto.Field(proto.STRING, number=5,) - rate_control_mode = proto.Field(proto.STRING, number=6,) - crf_level = proto.Field(proto.INT32, number=7,) - gop_frame_count = proto.Field(proto.INT32, number=8, oneof="gop_mode",) + width_pixels = proto.Field( + proto.INT32, + number=1, + ) + height_pixels = proto.Field( + proto.INT32, + number=2, + ) + frame_rate = proto.Field( + proto.DOUBLE, + number=3, + ) + bitrate_bps = proto.Field( + proto.INT32, + number=4, + ) + pixel_format = proto.Field( + proto.STRING, + number=5, + ) + rate_control_mode = proto.Field( + proto.STRING, + number=6, + ) + crf_level = proto.Field( + proto.INT32, + number=7, + ) + gop_frame_count = proto.Field( + proto.INT32, + number=8, + oneof="gop_mode", + ) gop_duration = proto.Field( - proto.MESSAGE, number=9, oneof="gop_mode", message=duration_pb2.Duration, + proto.MESSAGE, + number=9, + oneof="gop_mode", + message=duration_pb2.Duration, + ) + profile = proto.Field( + proto.STRING, + number=10, ) - profile = proto.Field(proto.STRING, number=10,) h264 = proto.Field( - proto.MESSAGE, number=1, oneof="codec_settings", message=H264CodecSettings, + proto.MESSAGE, + number=1, + oneof="codec_settings", + message=H264CodecSettings, ) h265 = proto.Field( - proto.MESSAGE, number=2, oneof="codec_settings", message=H265CodecSettings, + proto.MESSAGE, + number=2, + oneof="codec_settings", + message=H265CodecSettings, ) vp9 = proto.Field( - proto.MESSAGE, number=3, oneof="codec_settings", message=Vp9CodecSettings, + proto.MESSAGE, + number=3, + oneof="codec_settings", + message=Vp9CodecSettings, ) @@ -1378,19 +1824,56 @@ class AudioMapping(proto.Message): default is 0. """ - atom_key = proto.Field(proto.STRING, number=1,) - input_key = proto.Field(proto.STRING, number=2,) - input_track = proto.Field(proto.INT32, number=3,) - input_channel = proto.Field(proto.INT32, number=4,) - output_channel = proto.Field(proto.INT32, number=5,) - gain_db = proto.Field(proto.DOUBLE, number=6,) + atom_key = proto.Field( + proto.STRING, + number=1, + ) + input_key = proto.Field( + proto.STRING, + number=2, + ) + input_track = proto.Field( + proto.INT32, + number=3, + ) + input_channel = proto.Field( + proto.INT32, + number=4, + ) + output_channel = proto.Field( + proto.INT32, + number=5, + ) + gain_db = proto.Field( + proto.DOUBLE, + number=6, + ) - codec = proto.Field(proto.STRING, number=1,) - bitrate_bps = proto.Field(proto.INT32, number=2,) - channel_count = proto.Field(proto.INT32, number=3,) - channel_layout = proto.RepeatedField(proto.STRING, number=4,) - mapping_ = proto.RepeatedField(proto.MESSAGE, number=5, message=AudioMapping,) - sample_rate_hertz = proto.Field(proto.INT32, number=6,) + codec = proto.Field( + proto.STRING, + number=1, + ) + bitrate_bps = proto.Field( + proto.INT32, + number=2, + ) + channel_count = proto.Field( + proto.INT32, + number=3, + ) + channel_layout = proto.RepeatedField( + proto.STRING, + number=4, + ) + mapping_ = proto.RepeatedField( + proto.MESSAGE, + number=5, + message=AudioMapping, + ) + sample_rate_hertz = proto.Field( + proto.INT32, + number=6, + ) class TextStream(proto.Message): @@ -1428,12 +1911,28 @@ class TextMapping(proto.Message): in the input file. """ - atom_key = proto.Field(proto.STRING, number=1,) - input_key = proto.Field(proto.STRING, number=2,) - input_track = proto.Field(proto.INT32, number=3,) + atom_key = proto.Field( + proto.STRING, + number=1, + ) + input_key = proto.Field( + proto.STRING, + number=2, + ) + input_track = proto.Field( + proto.INT32, + number=3, + ) - codec = proto.Field(proto.STRING, number=1,) - mapping_ = proto.RepeatedField(proto.MESSAGE, number=3, message=TextMapping,) + codec = proto.Field( + proto.STRING, + number=1, + ) + mapping_ = proto.RepeatedField( + proto.MESSAGE, + number=3, + message=TextMapping, + ) class SegmentSettings(proto.Message): @@ -1452,9 +1951,14 @@ class SegmentSettings(proto.Message): """ segment_duration = proto.Field( - proto.MESSAGE, number=1, message=duration_pb2.Duration, + proto.MESSAGE, + number=1, + message=duration_pb2.Duration, + ) + individual_segments = proto.Field( + proto.BOOL, + number=3, ) - individual_segments = proto.Field(proto.BOOL, number=3,) __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/google/cloud/video/transcoder_v1/types/services.py b/google/cloud/video/transcoder_v1/types/services.py index 65b5af1..3ddd5e0 100644 --- a/google/cloud/video/transcoder_v1/types/services.py +++ b/google/cloud/video/transcoder_v1/types/services.py @@ -17,7 +17,6 @@ from google.cloud.video.transcoder_v1.types import resources - __protobuf__ = proto.module( package="google.cloud.video.transcoder.v1", manifest={ @@ -47,8 +46,15 @@ class CreateJobRequest(proto.Message): job. """ - parent = proto.Field(proto.STRING, number=1,) - job = proto.Field(proto.MESSAGE, number=2, message=resources.Job,) + parent = proto.Field( + proto.STRING, + number=1, + ) + job = proto.Field( + proto.MESSAGE, + number=2, + message=resources.Job, + ) class ListJobsRequest(proto.Message): @@ -73,11 +79,26 @@ class ListJobsRequest(proto.Message): https://google.aip.dev/132#ordering. """ - parent = proto.Field(proto.STRING, number=1,) - page_size = proto.Field(proto.INT32, number=2,) - page_token = proto.Field(proto.STRING, number=3,) - filter = proto.Field(proto.STRING, number=4,) - order_by = proto.Field(proto.STRING, number=5,) + parent = proto.Field( + proto.STRING, + number=1, + ) + page_size = proto.Field( + proto.INT32, + number=2, + ) + page_token = proto.Field( + proto.STRING, + number=3, + ) + filter = proto.Field( + proto.STRING, + number=4, + ) + order_by = proto.Field( + proto.STRING, + number=5, + ) class GetJobRequest(proto.Message): @@ -89,7 +110,10 @@ class GetJobRequest(proto.Message): ``projects/{project}/locations/{location}/jobs/{job}`` """ - name = proto.Field(proto.STRING, number=1,) + name = proto.Field( + proto.STRING, + number=1, + ) class DeleteJobRequest(proto.Message): @@ -105,8 +129,14 @@ class DeleteJobRequest(proto.Message): on the server. """ - name = proto.Field(proto.STRING, number=1,) - allow_missing = proto.Field(proto.BOOL, number=2,) + name = proto.Field( + proto.STRING, + number=1, + ) + allow_missing = proto.Field( + proto.BOOL, + number=2, + ) class ListJobsResponse(proto.Message): @@ -125,9 +155,19 @@ class ListJobsResponse(proto.Message): def raw_page(self): return self - jobs = proto.RepeatedField(proto.MESSAGE, number=1, message=resources.Job,) - next_page_token = proto.Field(proto.STRING, number=2,) - unreachable = proto.RepeatedField(proto.STRING, number=3,) + jobs = proto.RepeatedField( + proto.MESSAGE, + number=1, + message=resources.Job, + ) + next_page_token = proto.Field( + proto.STRING, + number=2, + ) + unreachable = proto.RepeatedField( + proto.STRING, + number=3, + ) class CreateJobTemplateRequest(proto.Message): @@ -150,9 +190,19 @@ class CreateJobTemplateRequest(proto.Message): ``[a-zA-Z][a-zA-Z0-9_-]*``. """ - parent = proto.Field(proto.STRING, number=1,) - job_template = proto.Field(proto.MESSAGE, number=2, message=resources.JobTemplate,) - job_template_id = proto.Field(proto.STRING, number=3,) + parent = proto.Field( + proto.STRING, + number=1, + ) + job_template = proto.Field( + proto.MESSAGE, + number=2, + message=resources.JobTemplate, + ) + job_template_id = proto.Field( + proto.STRING, + number=3, + ) class ListJobTemplatesRequest(proto.Message): @@ -177,11 +227,26 @@ class ListJobTemplatesRequest(proto.Message): https://google.aip.dev/132#ordering. """ - parent = proto.Field(proto.STRING, number=1,) - page_size = proto.Field(proto.INT32, number=2,) - page_token = proto.Field(proto.STRING, number=3,) - filter = proto.Field(proto.STRING, number=4,) - order_by = proto.Field(proto.STRING, number=5,) + parent = proto.Field( + proto.STRING, + number=1, + ) + page_size = proto.Field( + proto.INT32, + number=2, + ) + page_token = proto.Field( + proto.STRING, + number=3, + ) + filter = proto.Field( + proto.STRING, + number=4, + ) + order_by = proto.Field( + proto.STRING, + number=5, + ) class GetJobTemplateRequest(proto.Message): @@ -193,7 +258,10 @@ class GetJobTemplateRequest(proto.Message): ``projects/{project}/locations/{location}/jobTemplates/{job_template}`` """ - name = proto.Field(proto.STRING, number=1,) + name = proto.Field( + proto.STRING, + number=1, + ) class DeleteJobTemplateRequest(proto.Message): @@ -209,8 +277,14 @@ class DeleteJobTemplateRequest(proto.Message): will be taken on the server. """ - name = proto.Field(proto.STRING, number=1,) - allow_missing = proto.Field(proto.BOOL, number=2,) + name = proto.Field( + proto.STRING, + number=1, + ) + allow_missing = proto.Field( + proto.BOOL, + number=2, + ) class ListJobTemplatesResponse(proto.Message): @@ -231,10 +305,18 @@ def raw_page(self): return self job_templates = proto.RepeatedField( - proto.MESSAGE, number=1, message=resources.JobTemplate, + proto.MESSAGE, + number=1, + message=resources.JobTemplate, + ) + next_page_token = proto.Field( + proto.STRING, + number=2, + ) + unreachable = proto.RepeatedField( + proto.STRING, + number=3, ) - next_page_token = proto.Field(proto.STRING, number=2,) - unreachable = proto.RepeatedField(proto.STRING, number=3,) __all__ = tuple(sorted(__protobuf__.manifest)) diff --git a/noxfile.py b/noxfile.py index 2a2001c..7c1742d 100644 --- a/noxfile.py +++ b/noxfile.py @@ -17,19 +17,45 @@ # Generated by synthtool. DO NOT EDIT! from __future__ import absolute_import + import os import pathlib import shutil +import warnings import nox - -BLACK_VERSION = "black==19.10b0" -BLACK_PATHS = ["docs", "google", "tests", "noxfile.py", "setup.py"] +BLACK_VERSION = "black==22.3.0" +ISORT_VERSION = "isort==5.10.1" +LINT_PATHS = ["docs", "google", "tests", "noxfile.py", "setup.py"] DEFAULT_PYTHON_VERSION = "3.8" -SYSTEM_TEST_PYTHON_VERSIONS = ["3.8"] + UNIT_TEST_PYTHON_VERSIONS = ["3.6", "3.7", "3.8", "3.9", "3.10"] +UNIT_TEST_STANDARD_DEPENDENCIES = [ + "mock", + "asyncmock", + "pytest", + "pytest-cov", + "pytest-asyncio", +] +UNIT_TEST_EXTERNAL_DEPENDENCIES = [] +UNIT_TEST_LOCAL_DEPENDENCIES = [] +UNIT_TEST_DEPENDENCIES = [] +UNIT_TEST_EXTRAS = [] +UNIT_TEST_EXTRAS_BY_PYTHON = {} + +SYSTEM_TEST_PYTHON_VERSIONS = ["3.8"] +SYSTEM_TEST_STANDARD_DEPENDENCIES = [ + "mock", + "pytest", + "google-cloud-testutils", +] +SYSTEM_TEST_EXTERNAL_DEPENDENCIES = [] +SYSTEM_TEST_LOCAL_DEPENDENCIES = [] +SYSTEM_TEST_DEPENDENCIES = [] +SYSTEM_TEST_EXTRAS = [] +SYSTEM_TEST_EXTRAS_BY_PYTHON = {} CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute() @@ -57,7 +83,9 @@ def lint(session): """ session.install("flake8", BLACK_VERSION) session.run( - "black", "--check", *BLACK_PATHS, + "black", + "--check", + *LINT_PATHS, ) session.run("flake8", "google", "tests") @@ -67,7 +95,28 @@ def blacken(session): """Run black. Format code to uniform standard.""" session.install(BLACK_VERSION) session.run( - "black", *BLACK_PATHS, + "black", + *LINT_PATHS, + ) + + +@nox.session(python=DEFAULT_PYTHON_VERSION) +def format(session): + """ + Run isort to sort imports. Then run black + to format code to uniform standard. + """ + session.install(BLACK_VERSION, ISORT_VERSION) + # Use the --fss option to sort imports using strict alphabetical order. + # See https://pycqa.github.io/isort/docs/configuration/options.html#force-sort-within-sections + session.run( + "isort", + "--fss", + *LINT_PATHS, + ) + session.run( + "black", + *LINT_PATHS, ) @@ -78,23 +127,41 @@ def lint_setup_py(session): session.run("python", "setup.py", "check", "--restructuredtext", "--strict") +def install_unittest_dependencies(session, *constraints): + standard_deps = UNIT_TEST_STANDARD_DEPENDENCIES + UNIT_TEST_DEPENDENCIES + session.install(*standard_deps, *constraints) + + if UNIT_TEST_EXTERNAL_DEPENDENCIES: + warnings.warn( + "'unit_test_external_dependencies' is deprecated. Instead, please " + "use 'unit_test_dependencies' or 'unit_test_local_dependencies'.", + DeprecationWarning, + ) + session.install(*UNIT_TEST_EXTERNAL_DEPENDENCIES, *constraints) + + if UNIT_TEST_LOCAL_DEPENDENCIES: + session.install(*UNIT_TEST_LOCAL_DEPENDENCIES, *constraints) + + if UNIT_TEST_EXTRAS_BY_PYTHON: + extras = UNIT_TEST_EXTRAS_BY_PYTHON.get(session.python, []) + elif UNIT_TEST_EXTRAS: + extras = UNIT_TEST_EXTRAS + else: + extras = [] + + if extras: + session.install("-e", f".[{','.join(extras)}]", *constraints) + else: + session.install("-e", ".", *constraints) + + def default(session): # Install all test dependencies, then install this package in-place. constraints_path = str( CURRENT_DIRECTORY / "testing" / f"constraints-{session.python}.txt" ) - session.install( - "mock", - "asyncmock", - "pytest", - "pytest-cov", - "pytest-asyncio", - "-c", - constraints_path, - ) - - session.install("-e", ".", "-c", constraints_path) + install_unittest_dependencies(session, "-c", constraints_path) # Run py.test against the unit tests. session.run( @@ -118,6 +185,35 @@ def unit(session): default(session) +def install_systemtest_dependencies(session, *constraints): + + # Use pre-release gRPC for system tests. + session.install("--pre", "grpcio") + + session.install(*SYSTEM_TEST_STANDARD_DEPENDENCIES, *constraints) + + if SYSTEM_TEST_EXTERNAL_DEPENDENCIES: + session.install(*SYSTEM_TEST_EXTERNAL_DEPENDENCIES, *constraints) + + if SYSTEM_TEST_LOCAL_DEPENDENCIES: + session.install("-e", *SYSTEM_TEST_LOCAL_DEPENDENCIES, *constraints) + + if SYSTEM_TEST_DEPENDENCIES: + session.install("-e", *SYSTEM_TEST_DEPENDENCIES, *constraints) + + if SYSTEM_TEST_EXTRAS_BY_PYTHON: + extras = SYSTEM_TEST_EXTRAS_BY_PYTHON.get(session.python, []) + elif SYSTEM_TEST_EXTRAS: + extras = SYSTEM_TEST_EXTRAS + else: + extras = [] + + if extras: + session.install("-e", f".[{','.join(extras)}]", *constraints) + else: + session.install("-e", ".", *constraints) + + @nox.session(python=SYSTEM_TEST_PYTHON_VERSIONS) def system(session): """Run the system test suite.""" @@ -140,13 +236,7 @@ def system(session): if not system_test_exists and not system_test_folder_exists: session.skip("System tests were not found") - # Use pre-release gRPC for system tests. - session.install("--pre", "grpcio") - - # Install all test dependencies, then install this package into the - # virtualenv's dist-packages. - session.install("mock", "pytest", "google-cloud-testutils", "-c", constraints_path) - session.install("-e", ".", "-c", constraints_path) + install_systemtest_dependencies(session, "-c", constraints_path) # Run py.test against the system tests. if system_test_exists: diff --git a/samples/generated_samples/snippet_metadata_transcoder_v1.json b/samples/generated_samples/snippet_metadata_transcoder_v1.json index b28a2a3..29b6253 100644 --- a/samples/generated_samples/snippet_metadata_transcoder_v1.json +++ b/samples/generated_samples/snippet_metadata_transcoder_v1.json @@ -1,16 +1,69 @@ { + "clientLibrary": { + "apis": [ + { + "id": "google.cloud.video.transcoder.v1", + "version": "v1" + } + ], + "language": "PYTHON", + "name": "google-cloud-video-transcoder" + }, "snippets": [ { + "canonical": true, "clientMethod": { "async": true, + "client": { + "fullName": "google.cloud.video.transcoder_v1.TranscoderServiceAsyncClient", + "shortName": "TranscoderServiceAsyncClient" + }, + "fullName": "google.cloud.video.transcoder_v1.TranscoderServiceAsyncClient.create_job_template", "method": { + "fullName": "google.cloud.video.transcoder.v1.TranscoderService.CreateJobTemplate", "service": { + "fullName": "google.cloud.video.transcoder.v1.TranscoderService", "shortName": "TranscoderService" }, "shortName": "CreateJobTemplate" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.video.transcoder_v1.types.CreateJobTemplateRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "job_template", + "type": "google.cloud.video.transcoder_v1.types.JobTemplate" + }, + { + "name": "job_template_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.video.transcoder_v1.types.JobTemplate", + "shortName": "create_job_template" }, + "description": "Sample for CreateJobTemplate", "file": "transcoder_v1_generated_transcoder_service_create_job_template_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "transcoder_v1_generated_TranscoderService_CreateJobTemplate_async", "segments": [ { @@ -43,18 +96,62 @@ "start": 43, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "transcoder_v1_generated_transcoder_service_create_job_template_async.py" }, { + "canonical": true, "clientMethod": { + "client": { + "fullName": "google.cloud.video.transcoder_v1.TranscoderServiceClient", + "shortName": "TranscoderServiceClient" + }, + "fullName": "google.cloud.video.transcoder_v1.TranscoderServiceClient.create_job_template", "method": { + "fullName": "google.cloud.video.transcoder.v1.TranscoderService.CreateJobTemplate", "service": { + "fullName": "google.cloud.video.transcoder.v1.TranscoderService", "shortName": "TranscoderService" }, "shortName": "CreateJobTemplate" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.video.transcoder_v1.types.CreateJobTemplateRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "job_template", + "type": "google.cloud.video.transcoder_v1.types.JobTemplate" + }, + { + "name": "job_template_id", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.video.transcoder_v1.types.JobTemplate", + "shortName": "create_job_template" }, + "description": "Sample for CreateJobTemplate", "file": "transcoder_v1_generated_transcoder_service_create_job_template_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "transcoder_v1_generated_TranscoderService_CreateJobTemplate_sync", "segments": [ { @@ -87,19 +184,59 @@ "start": 43, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "transcoder_v1_generated_transcoder_service_create_job_template_sync.py" }, { + "canonical": true, "clientMethod": { "async": true, + "client": { + "fullName": "google.cloud.video.transcoder_v1.TranscoderServiceAsyncClient", + "shortName": "TranscoderServiceAsyncClient" + }, + "fullName": "google.cloud.video.transcoder_v1.TranscoderServiceAsyncClient.create_job", "method": { + "fullName": "google.cloud.video.transcoder.v1.TranscoderService.CreateJob", "service": { + "fullName": "google.cloud.video.transcoder.v1.TranscoderService", "shortName": "TranscoderService" }, "shortName": "CreateJob" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.video.transcoder_v1.types.CreateJobRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "job", + "type": "google.cloud.video.transcoder_v1.types.Job" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.video.transcoder_v1.types.Job", + "shortName": "create_job" }, + "description": "Sample for CreateJob", "file": "transcoder_v1_generated_transcoder_service_create_job_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "transcoder_v1_generated_TranscoderService_CreateJob_async", "segments": [ { @@ -132,18 +269,58 @@ "start": 46, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "transcoder_v1_generated_transcoder_service_create_job_async.py" }, { + "canonical": true, "clientMethod": { + "client": { + "fullName": "google.cloud.video.transcoder_v1.TranscoderServiceClient", + "shortName": "TranscoderServiceClient" + }, + "fullName": "google.cloud.video.transcoder_v1.TranscoderServiceClient.create_job", "method": { + "fullName": "google.cloud.video.transcoder.v1.TranscoderService.CreateJob", "service": { + "fullName": "google.cloud.video.transcoder.v1.TranscoderService", "shortName": "TranscoderService" }, "shortName": "CreateJob" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.video.transcoder_v1.types.CreateJobRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "job", + "type": "google.cloud.video.transcoder_v1.types.Job" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.video.transcoder_v1.types.Job", + "shortName": "create_job" }, + "description": "Sample for CreateJob", "file": "transcoder_v1_generated_transcoder_service_create_job_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "transcoder_v1_generated_TranscoderService_CreateJob_sync", "segments": [ { @@ -176,19 +353,54 @@ "start": 46, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "transcoder_v1_generated_transcoder_service_create_job_sync.py" }, { + "canonical": true, "clientMethod": { "async": true, + "client": { + "fullName": "google.cloud.video.transcoder_v1.TranscoderServiceAsyncClient", + "shortName": "TranscoderServiceAsyncClient" + }, + "fullName": "google.cloud.video.transcoder_v1.TranscoderServiceAsyncClient.delete_job_template", "method": { + "fullName": "google.cloud.video.transcoder.v1.TranscoderService.DeleteJobTemplate", "service": { + "fullName": "google.cloud.video.transcoder.v1.TranscoderService", "shortName": "TranscoderService" }, "shortName": "DeleteJobTemplate" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.video.transcoder_v1.types.DeleteJobTemplateRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "delete_job_template" }, + "description": "Sample for DeleteJobTemplate", "file": "transcoder_v1_generated_transcoder_service_delete_job_template_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "transcoder_v1_generated_TranscoderService_DeleteJobTemplate_async", "segments": [ { @@ -219,18 +431,53 @@ "end": 43, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "transcoder_v1_generated_transcoder_service_delete_job_template_async.py" }, { + "canonical": true, "clientMethod": { + "client": { + "fullName": "google.cloud.video.transcoder_v1.TranscoderServiceClient", + "shortName": "TranscoderServiceClient" + }, + "fullName": "google.cloud.video.transcoder_v1.TranscoderServiceClient.delete_job_template", "method": { + "fullName": "google.cloud.video.transcoder.v1.TranscoderService.DeleteJobTemplate", "service": { + "fullName": "google.cloud.video.transcoder.v1.TranscoderService", "shortName": "TranscoderService" }, "shortName": "DeleteJobTemplate" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.video.transcoder_v1.types.DeleteJobTemplateRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "delete_job_template" }, + "description": "Sample for DeleteJobTemplate", "file": "transcoder_v1_generated_transcoder_service_delete_job_template_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "transcoder_v1_generated_TranscoderService_DeleteJobTemplate_sync", "segments": [ { @@ -261,19 +508,54 @@ "end": 43, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "transcoder_v1_generated_transcoder_service_delete_job_template_sync.py" }, { + "canonical": true, "clientMethod": { "async": true, + "client": { + "fullName": "google.cloud.video.transcoder_v1.TranscoderServiceAsyncClient", + "shortName": "TranscoderServiceAsyncClient" + }, + "fullName": "google.cloud.video.transcoder_v1.TranscoderServiceAsyncClient.delete_job", "method": { + "fullName": "google.cloud.video.transcoder.v1.TranscoderService.DeleteJob", "service": { + "fullName": "google.cloud.video.transcoder.v1.TranscoderService", "shortName": "TranscoderService" }, "shortName": "DeleteJob" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.video.transcoder_v1.types.DeleteJobRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "delete_job" }, + "description": "Sample for DeleteJob", "file": "transcoder_v1_generated_transcoder_service_delete_job_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "transcoder_v1_generated_TranscoderService_DeleteJob_async", "segments": [ { @@ -304,18 +586,53 @@ "end": 43, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "transcoder_v1_generated_transcoder_service_delete_job_async.py" }, { + "canonical": true, "clientMethod": { + "client": { + "fullName": "google.cloud.video.transcoder_v1.TranscoderServiceClient", + "shortName": "TranscoderServiceClient" + }, + "fullName": "google.cloud.video.transcoder_v1.TranscoderServiceClient.delete_job", "method": { + "fullName": "google.cloud.video.transcoder.v1.TranscoderService.DeleteJob", "service": { + "fullName": "google.cloud.video.transcoder.v1.TranscoderService", "shortName": "TranscoderService" }, "shortName": "DeleteJob" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.video.transcoder_v1.types.DeleteJobRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "shortName": "delete_job" }, + "description": "Sample for DeleteJob", "file": "transcoder_v1_generated_transcoder_service_delete_job_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "transcoder_v1_generated_TranscoderService_DeleteJob_sync", "segments": [ { @@ -346,19 +663,55 @@ "end": 43, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "transcoder_v1_generated_transcoder_service_delete_job_sync.py" }, { + "canonical": true, "clientMethod": { "async": true, + "client": { + "fullName": "google.cloud.video.transcoder_v1.TranscoderServiceAsyncClient", + "shortName": "TranscoderServiceAsyncClient" + }, + "fullName": "google.cloud.video.transcoder_v1.TranscoderServiceAsyncClient.get_job_template", "method": { + "fullName": "google.cloud.video.transcoder.v1.TranscoderService.GetJobTemplate", "service": { + "fullName": "google.cloud.video.transcoder.v1.TranscoderService", "shortName": "TranscoderService" }, "shortName": "GetJobTemplate" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.video.transcoder_v1.types.GetJobTemplateRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.video.transcoder_v1.types.JobTemplate", + "shortName": "get_job_template" }, + "description": "Sample for GetJobTemplate", "file": "transcoder_v1_generated_transcoder_service_get_job_template_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "transcoder_v1_generated_TranscoderService_GetJobTemplate_async", "segments": [ { @@ -391,18 +744,54 @@ "start": 42, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "transcoder_v1_generated_transcoder_service_get_job_template_async.py" }, { + "canonical": true, "clientMethod": { + "client": { + "fullName": "google.cloud.video.transcoder_v1.TranscoderServiceClient", + "shortName": "TranscoderServiceClient" + }, + "fullName": "google.cloud.video.transcoder_v1.TranscoderServiceClient.get_job_template", "method": { + "fullName": "google.cloud.video.transcoder.v1.TranscoderService.GetJobTemplate", "service": { + "fullName": "google.cloud.video.transcoder.v1.TranscoderService", "shortName": "TranscoderService" }, "shortName": "GetJobTemplate" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.video.transcoder_v1.types.GetJobTemplateRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.video.transcoder_v1.types.JobTemplate", + "shortName": "get_job_template" }, + "description": "Sample for GetJobTemplate", "file": "transcoder_v1_generated_transcoder_service_get_job_template_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "transcoder_v1_generated_TranscoderService_GetJobTemplate_sync", "segments": [ { @@ -435,19 +824,55 @@ "start": 42, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "transcoder_v1_generated_transcoder_service_get_job_template_sync.py" }, { + "canonical": true, "clientMethod": { "async": true, + "client": { + "fullName": "google.cloud.video.transcoder_v1.TranscoderServiceAsyncClient", + "shortName": "TranscoderServiceAsyncClient" + }, + "fullName": "google.cloud.video.transcoder_v1.TranscoderServiceAsyncClient.get_job", "method": { + "fullName": "google.cloud.video.transcoder.v1.TranscoderService.GetJob", "service": { + "fullName": "google.cloud.video.transcoder.v1.TranscoderService", "shortName": "TranscoderService" }, "shortName": "GetJob" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.video.transcoder_v1.types.GetJobRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.video.transcoder_v1.types.Job", + "shortName": "get_job" }, + "description": "Sample for GetJob", "file": "transcoder_v1_generated_transcoder_service_get_job_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "transcoder_v1_generated_TranscoderService_GetJob_async", "segments": [ { @@ -480,18 +905,54 @@ "start": 42, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "transcoder_v1_generated_transcoder_service_get_job_async.py" }, { + "canonical": true, "clientMethod": { + "client": { + "fullName": "google.cloud.video.transcoder_v1.TranscoderServiceClient", + "shortName": "TranscoderServiceClient" + }, + "fullName": "google.cloud.video.transcoder_v1.TranscoderServiceClient.get_job", "method": { + "fullName": "google.cloud.video.transcoder.v1.TranscoderService.GetJob", "service": { + "fullName": "google.cloud.video.transcoder.v1.TranscoderService", "shortName": "TranscoderService" }, "shortName": "GetJob" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.video.transcoder_v1.types.GetJobRequest" + }, + { + "name": "name", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.video.transcoder_v1.types.Job", + "shortName": "get_job" }, + "description": "Sample for GetJob", "file": "transcoder_v1_generated_transcoder_service_get_job_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "transcoder_v1_generated_TranscoderService_GetJob_sync", "segments": [ { @@ -524,19 +985,55 @@ "start": 42, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "transcoder_v1_generated_transcoder_service_get_job_sync.py" }, { + "canonical": true, "clientMethod": { "async": true, + "client": { + "fullName": "google.cloud.video.transcoder_v1.TranscoderServiceAsyncClient", + "shortName": "TranscoderServiceAsyncClient" + }, + "fullName": "google.cloud.video.transcoder_v1.TranscoderServiceAsyncClient.list_job_templates", "method": { + "fullName": "google.cloud.video.transcoder.v1.TranscoderService.ListJobTemplates", "service": { + "fullName": "google.cloud.video.transcoder.v1.TranscoderService", "shortName": "TranscoderService" }, "shortName": "ListJobTemplates" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.video.transcoder_v1.types.ListJobTemplatesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.video.transcoder_v1.services.transcoder_service.pagers.ListJobTemplatesAsyncPager", + "shortName": "list_job_templates" }, + "description": "Sample for ListJobTemplates", "file": "transcoder_v1_generated_transcoder_service_list_job_templates_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "transcoder_v1_generated_TranscoderService_ListJobTemplates_async", "segments": [ { @@ -569,18 +1066,54 @@ "start": 42, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "transcoder_v1_generated_transcoder_service_list_job_templates_async.py" }, { + "canonical": true, "clientMethod": { + "client": { + "fullName": "google.cloud.video.transcoder_v1.TranscoderServiceClient", + "shortName": "TranscoderServiceClient" + }, + "fullName": "google.cloud.video.transcoder_v1.TranscoderServiceClient.list_job_templates", "method": { + "fullName": "google.cloud.video.transcoder.v1.TranscoderService.ListJobTemplates", "service": { + "fullName": "google.cloud.video.transcoder.v1.TranscoderService", "shortName": "TranscoderService" }, "shortName": "ListJobTemplates" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.video.transcoder_v1.types.ListJobTemplatesRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.video.transcoder_v1.services.transcoder_service.pagers.ListJobTemplatesPager", + "shortName": "list_job_templates" }, + "description": "Sample for ListJobTemplates", "file": "transcoder_v1_generated_transcoder_service_list_job_templates_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "transcoder_v1_generated_TranscoderService_ListJobTemplates_sync", "segments": [ { @@ -613,19 +1146,55 @@ "start": 42, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "transcoder_v1_generated_transcoder_service_list_job_templates_sync.py" }, { + "canonical": true, "clientMethod": { "async": true, + "client": { + "fullName": "google.cloud.video.transcoder_v1.TranscoderServiceAsyncClient", + "shortName": "TranscoderServiceAsyncClient" + }, + "fullName": "google.cloud.video.transcoder_v1.TranscoderServiceAsyncClient.list_jobs", "method": { + "fullName": "google.cloud.video.transcoder.v1.TranscoderService.ListJobs", "service": { + "fullName": "google.cloud.video.transcoder.v1.TranscoderService", "shortName": "TranscoderService" }, "shortName": "ListJobs" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.video.transcoder_v1.types.ListJobsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.video.transcoder_v1.services.transcoder_service.pagers.ListJobsAsyncPager", + "shortName": "list_jobs" }, + "description": "Sample for ListJobs", "file": "transcoder_v1_generated_transcoder_service_list_jobs_async.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "transcoder_v1_generated_TranscoderService_ListJobs_async", "segments": [ { @@ -658,18 +1227,54 @@ "start": 42, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "transcoder_v1_generated_transcoder_service_list_jobs_async.py" }, { + "canonical": true, "clientMethod": { + "client": { + "fullName": "google.cloud.video.transcoder_v1.TranscoderServiceClient", + "shortName": "TranscoderServiceClient" + }, + "fullName": "google.cloud.video.transcoder_v1.TranscoderServiceClient.list_jobs", "method": { + "fullName": "google.cloud.video.transcoder.v1.TranscoderService.ListJobs", "service": { + "fullName": "google.cloud.video.transcoder.v1.TranscoderService", "shortName": "TranscoderService" }, "shortName": "ListJobs" - } + }, + "parameters": [ + { + "name": "request", + "type": "google.cloud.video.transcoder_v1.types.ListJobsRequest" + }, + { + "name": "parent", + "type": "str" + }, + { + "name": "retry", + "type": "google.api_core.retry.Retry" + }, + { + "name": "timeout", + "type": "float" + }, + { + "name": "metadata", + "type": "Sequence[Tuple[str, str]" + } + ], + "resultType": "google.cloud.video.transcoder_v1.services.transcoder_service.pagers.ListJobsPager", + "shortName": "list_jobs" }, + "description": "Sample for ListJobs", "file": "transcoder_v1_generated_transcoder_service_list_jobs_sync.py", + "language": "PYTHON", + "origin": "API_DEFINITION", "regionTag": "transcoder_v1_generated_TranscoderService_ListJobs_sync", "segments": [ { @@ -702,7 +1307,8 @@ "start": 42, "type": "RESPONSE_HANDLING" } - ] + ], + "title": "transcoder_v1_generated_transcoder_service_list_jobs_sync.py" } ] } diff --git a/samples/snippets/create_job_from_ad_hoc.py b/samples/snippets/create_job_from_ad_hoc.py index 2e08c61..7fc6dce 100644 --- a/samples/snippets/create_job_from_ad_hoc.py +++ b/samples/snippets/create_job_from_ad_hoc.py @@ -100,7 +100,9 @@ def create_job_from_ad_hoc(project_id, location, input_uri, output_uri): parser = argparse.ArgumentParser() parser.add_argument("--project_id", help="Your Cloud project ID.", required=True) parser.add_argument( - "--location", help="The location to start this job in.", default="us-central1", + "--location", + help="The location to start this job in.", + default="us-central1", ) parser.add_argument( "--input_uri", @@ -114,5 +116,8 @@ def create_job_from_ad_hoc(project_id, location, input_uri, output_uri): ) args = parser.parse_args() create_job_from_ad_hoc( - args.project_id, args.location, args.input_uri, args.output_uri, + args.project_id, + args.location, + args.input_uri, + args.output_uri, ) diff --git a/samples/snippets/create_job_from_preset.py b/samples/snippets/create_job_from_preset.py index 3539b32..67c1672 100644 --- a/samples/snippets/create_job_from_preset.py +++ b/samples/snippets/create_job_from_preset.py @@ -59,7 +59,9 @@ def create_job_from_preset(project_id, location, input_uri, output_uri, preset): parser = argparse.ArgumentParser() parser.add_argument("--project_id", help="Your Cloud project ID.", required=True) parser.add_argument( - "--location", help="The location to start this job in.", default="us-central1", + "--location", + help="The location to start this job in.", + default="us-central1", ) parser.add_argument( "--input_uri", @@ -78,5 +80,9 @@ def create_job_from_preset(project_id, location, input_uri, output_uri, preset): ) args = parser.parse_args() create_job_from_preset( - args.project_id, args.location, args.input_uri, args.output_uri, args.preset, + args.project_id, + args.location, + args.input_uri, + args.output_uri, + args.preset, ) diff --git a/samples/snippets/create_job_from_template.py b/samples/snippets/create_job_from_template.py index 0a69704..685c3f6 100644 --- a/samples/snippets/create_job_from_template.py +++ b/samples/snippets/create_job_from_template.py @@ -59,7 +59,9 @@ def create_job_from_template(project_id, location, input_uri, output_uri, templa parser = argparse.ArgumentParser() parser.add_argument("--project_id", help="Your Cloud project ID.", required=True) parser.add_argument( - "--location", help="The location to start this job in.", default="us-central1", + "--location", + help="The location to start this job in.", + default="us-central1", ) parser.add_argument( "--input_uri", diff --git a/samples/snippets/create_job_with_animated_overlay.py b/samples/snippets/create_job_with_animated_overlay.py index a90c542..a3f3cbb 100644 --- a/samples/snippets/create_job_with_animated_overlay.py +++ b/samples/snippets/create_job_with_animated_overlay.py @@ -81,7 +81,8 @@ def create_job_with_animated_overlay( image=transcoder_v1.types.Overlay.Image( uri=overlay_image_uri, resolution=transcoder_v1.types.Overlay.NormalizedCoordinate( - x=0, y=0, + x=0, + y=0, ), alpha=1, ), @@ -90,20 +91,30 @@ def create_job_with_animated_overlay( animation_fade=transcoder_v1.types.Overlay.AnimationFade( fade_type=transcoder_v1.types.Overlay.FadeType.FADE_IN, xy=transcoder_v1.types.Overlay.NormalizedCoordinate( - x=0.5, y=0.5, + x=0.5, + y=0.5, + ), + start_time_offset=duration.Duration( + seconds=5, + ), + end_time_offset=duration.Duration( + seconds=10, ), - start_time_offset=duration.Duration(seconds=5,), - end_time_offset=duration.Duration(seconds=10,), ), ), transcoder_v1.types.Overlay.Animation( animation_fade=transcoder_v1.types.Overlay.AnimationFade( fade_type=transcoder_v1.types.Overlay.FadeType.FADE_OUT, xy=transcoder_v1.types.Overlay.NormalizedCoordinate( - x=0.5, y=0.5, + x=0.5, + y=0.5, + ), + start_time_offset=duration.Duration( + seconds=12, + ), + end_time_offset=duration.Duration( + seconds=15, ), - start_time_offset=duration.Duration(seconds=12,), - end_time_offset=duration.Duration(seconds=15,), ), ), ], @@ -121,7 +132,9 @@ def create_job_with_animated_overlay( parser = argparse.ArgumentParser() parser.add_argument("--project_id", help="Your Cloud project ID.", required=True) parser.add_argument( - "--location", help="The location to start this job in.", default="us-central1", + "--location", + help="The location to start this job in.", + default="us-central1", ) parser.add_argument( "--input_uri", diff --git a/samples/snippets/create_job_with_concatenated_inputs.py b/samples/snippets/create_job_with_concatenated_inputs.py index 0a2d3ad..4a64f88 100644 --- a/samples/snippets/create_job_with_concatenated_inputs.py +++ b/samples/snippets/create_job_with_concatenated_inputs.py @@ -80,8 +80,14 @@ def create_job_with_concatenated_inputs( job.output_uri = output_uri job.config = transcoder_v1.types.JobConfig( inputs=[ - transcoder_v1.types.Input(key="input1", uri=input1_uri,), - transcoder_v1.types.Input(key="input2", uri=input2_uri,), + transcoder_v1.types.Input( + key="input1", + uri=input1_uri, + ), + transcoder_v1.types.Input( + key="input2", + uri=input2_uri, + ), ], edit_list=[ transcoder_v1.types.EditAtom( @@ -135,7 +141,9 @@ def create_job_with_concatenated_inputs( parser = argparse.ArgumentParser() parser.add_argument("--project_id", help="Your Cloud project ID.", required=True) parser.add_argument( - "--location", help="The location to start this job in.", default="us-central1", + "--location", + help="The location to start this job in.", + default="us-central1", ) parser.add_argument( "--input1_uri", diff --git a/samples/snippets/create_job_with_embedded_captions.py b/samples/snippets/create_job_with_embedded_captions.py index c28a066..9943889 100644 --- a/samples/snippets/create_job_with_embedded_captions.py +++ b/samples/snippets/create_job_with_embedded_captions.py @@ -32,7 +32,11 @@ def create_job_with_embedded_captions( - project_id, location, input_video_uri, input_captions_uri, output_uri, + project_id, + location, + input_video_uri, + input_captions_uri, + output_uri, ): """Creates a job based on an ad-hoc job configuration that embeds captions in the output video. @@ -53,12 +57,19 @@ def create_job_with_embedded_captions( job.output_uri = output_uri job.config = transcoder_v1.types.JobConfig( inputs=[ - transcoder_v1.types.Input(key="input0", uri=input_video_uri,), - transcoder_v1.types.Input(key="caption-input0", uri=input_captions_uri,), + transcoder_v1.types.Input( + key="input0", + uri=input_video_uri, + ), + transcoder_v1.types.Input( + key="caption-input0", + uri=input_captions_uri, + ), ], edit_list=[ transcoder_v1.types.EditAtom( - key="atom0", inputs=["input0", "caption-input0"], + key="atom0", + inputs=["input0", "caption-input0"], ), ], elementary_streams=[ @@ -85,7 +96,9 @@ def create_job_with_embedded_captions( codec="cea608", mapping_=[ transcoder_v1.types.TextStream.TextMapping( - atom_key="atom0", input_key="caption-input0", input_track=0, + atom_key="atom0", + input_key="caption-input0", + input_track=0, ), ], ), @@ -103,7 +116,9 @@ def create_job_with_embedded_captions( elementary_streams=["video-stream0", "audio-stream0"], ), transcoder_v1.types.MuxStream( - key="sd-dash", container="fmp4", elementary_streams=["video-stream0"], + key="sd-dash", + container="fmp4", + elementary_streams=["video-stream0"], ), transcoder_v1.types.MuxStream( key="audio-dash", @@ -113,7 +128,9 @@ def create_job_with_embedded_captions( ], manifests=[ transcoder_v1.types.Manifest( - file_name="manifest.m3u8", type_="HLS", mux_streams=["sd-hls"], + file_name="manifest.m3u8", + type_="HLS", + mux_streams=["sd-hls"], ), transcoder_v1.types.Manifest( file_name="manifest.mpd", @@ -133,7 +150,9 @@ def create_job_with_embedded_captions( parser = argparse.ArgumentParser() parser.add_argument("--project_id", help="Your Cloud project ID.", required=True) parser.add_argument( - "--location", help="The location to start this job in.", default="us-central1", + "--location", + help="The location to start this job in.", + default="us-central1", ) parser.add_argument( "--input_video_uri", diff --git a/samples/snippets/create_job_with_periodic_images_spritesheet.py b/samples/snippets/create_job_with_periodic_images_spritesheet.py index 95621e2..1690ba6 100644 --- a/samples/snippets/create_job_with_periodic_images_spritesheet.py +++ b/samples/snippets/create_job_with_periodic_images_spritesheet.py @@ -88,14 +88,18 @@ def create_job_with_periodic_images_spritesheet( file_prefix="small-sprite-sheet", sprite_width_pixels=64, sprite_height_pixels=32, - interval=duration.Duration(seconds=7,), + interval=duration.Duration( + seconds=7, + ), ), # Generate a sprite sheet with 128x72px images. An image is taken every 7 seconds from the video. transcoder_v1.types.SpriteSheet( file_prefix="large-sprite-sheet", sprite_width_pixels=128, sprite_height_pixels=72, - interval=duration.Duration(seconds=7,), + interval=duration.Duration( + seconds=7, + ), ), ], ) @@ -110,7 +114,9 @@ def create_job_with_periodic_images_spritesheet( parser = argparse.ArgumentParser() parser.add_argument("--project_id", help="Your Cloud project ID.", required=True) parser.add_argument( - "--location", help="The location to start this job in.", default="us-central1", + "--location", + help="The location to start this job in.", + default="us-central1", ) parser.add_argument( "--input_uri", @@ -124,5 +130,8 @@ def create_job_with_periodic_images_spritesheet( ) args = parser.parse_args() create_job_with_periodic_images_spritesheet( - args.project_id, args.location, args.input_uri, args.output_uri, + args.project_id, + args.location, + args.input_uri, + args.output_uri, ) diff --git a/samples/snippets/create_job_with_set_number_images_spritesheet.py b/samples/snippets/create_job_with_set_number_images_spritesheet.py index b25c872..bc196fb 100644 --- a/samples/snippets/create_job_with_set_number_images_spritesheet.py +++ b/samples/snippets/create_job_with_set_number_images_spritesheet.py @@ -113,7 +113,9 @@ def create_job_with_set_number_images_spritesheet( parser = argparse.ArgumentParser() parser.add_argument("--project_id", help="Your Cloud project ID.", required=True) parser.add_argument( - "--location", help="The location to start this job in.", default="us-central1", + "--location", + help="The location to start this job in.", + default="us-central1", ) parser.add_argument( "--input_uri", @@ -127,5 +129,8 @@ def create_job_with_set_number_images_spritesheet( ) args = parser.parse_args() create_job_with_set_number_images_spritesheet( - args.project_id, args.location, args.input_uri, args.output_uri, + args.project_id, + args.location, + args.input_uri, + args.output_uri, ) diff --git a/samples/snippets/create_job_with_standalone_captions.py b/samples/snippets/create_job_with_standalone_captions.py index 49ccfc8..e819660 100644 --- a/samples/snippets/create_job_with_standalone_captions.py +++ b/samples/snippets/create_job_with_standalone_captions.py @@ -33,7 +33,11 @@ def create_job_with_standalone_captions( - project_id, location, input_video_uri, input_captions_uri, output_uri, + project_id, + location, + input_video_uri, + input_captions_uri, + output_uri, ): """Creates a job based on an ad-hoc job configuration that can use captions from a standalone file. @@ -54,12 +58,19 @@ def create_job_with_standalone_captions( job.output_uri = output_uri job.config = transcoder_v1.types.JobConfig( inputs=[ - transcoder_v1.types.Input(key="input0", uri=input_video_uri,), - transcoder_v1.types.Input(key="caption-input0", uri=input_captions_uri,), + transcoder_v1.types.Input( + key="input0", + uri=input_video_uri, + ), + transcoder_v1.types.Input( + key="caption-input0", + uri=input_captions_uri, + ), ], edit_list=[ transcoder_v1.types.EditAtom( - key="atom0", inputs=["input0", "caption-input0"], + key="atom0", + inputs=["input0", "caption-input0"], ), ], elementary_streams=[ @@ -86,7 +97,9 @@ def create_job_with_standalone_captions( codec="webvtt", mapping_=[ transcoder_v1.types.TextStream.TextMapping( - atom_key="atom0", input_key="caption-input0", input_track=0, + atom_key="atom0", + input_key="caption-input0", + input_track=0, ), ], ), @@ -108,7 +121,9 @@ def create_job_with_standalone_captions( container="vtt", elementary_streams=["vtt-stream0"], segment_settings=transcoder_v1.types.SegmentSettings( - segment_duration=duration.Duration(seconds=6,), + segment_duration=duration.Duration( + seconds=6, + ), individual_segments=True, ), ), @@ -132,7 +147,9 @@ def create_job_with_standalone_captions( parser = argparse.ArgumentParser() parser.add_argument("--project_id", help="Your Cloud project ID.", required=True) parser.add_argument( - "--location", help="The location to start this job in.", default="us-central1", + "--location", + help="The location to start this job in.", + default="us-central1", ) parser.add_argument( "--input_video_uri", diff --git a/samples/snippets/create_job_with_static_overlay.py b/samples/snippets/create_job_with_static_overlay.py index 5386a8a..37ce26b 100644 --- a/samples/snippets/create_job_with_static_overlay.py +++ b/samples/snippets/create_job_with_static_overlay.py @@ -81,7 +81,8 @@ def create_job_with_static_overlay( image=transcoder_v1.types.Overlay.Image( uri=overlay_image_uri, resolution=transcoder_v1.types.Overlay.NormalizedCoordinate( - x=1, y=0.5, + x=1, + y=0.5, ), alpha=1, ), @@ -89,14 +90,19 @@ def create_job_with_static_overlay( transcoder_v1.types.Overlay.Animation( animation_static=transcoder_v1.types.Overlay.AnimationStatic( xy=transcoder_v1.types.Overlay.NormalizedCoordinate( - x=0, y=0, + x=0, + y=0, + ), + start_time_offset=duration.Duration( + seconds=0, ), - start_time_offset=duration.Duration(seconds=0,), ), ), transcoder_v1.types.Overlay.Animation( animation_end=transcoder_v1.types.Overlay.AnimationEnd( - start_time_offset=duration.Duration(seconds=10,), + start_time_offset=duration.Duration( + seconds=10, + ), ), ), ], @@ -114,7 +120,9 @@ def create_job_with_static_overlay( parser = argparse.ArgumentParser() parser.add_argument("--project_id", help="Your Cloud project ID.", required=True) parser.add_argument( - "--location", help="The location to start this job in.", default="us-central1", + "--location", + help="The location to start this job in.", + default="us-central1", ) parser.add_argument( "--input_uri", diff --git a/samples/snippets/job_test.py b/samples/snippets/job_test.py index d924472..3cc8e6a 100644 --- a/samples/snippets/job_test.py +++ b/samples/snippets/job_test.py @@ -271,7 +271,10 @@ def test_create_job_with_animated_overlay(capsys, test_bucket): def test_create_job_with_set_number_spritesheet(capsys, test_bucket): create_job_with_set_number_images_spritesheet.create_job_with_set_number_images_spritesheet( - project_id, location, input_uri, output_uri_for_set_number_spritesheet, + project_id, + location, + input_uri, + output_uri_for_set_number_spritesheet, ) out, _ = capsys.readouterr() job_name_prefix = f"projects/{project_number}/locations/{location}/jobs/" @@ -319,7 +322,10 @@ def test_create_job_with_set_number_spritesheet(capsys, test_bucket): def test_create_job_with_periodic_spritesheet(capsys, test_bucket): create_job_with_periodic_images_spritesheet.create_job_with_periodic_images_spritesheet( - project_id, location, input_uri, output_uri_for_periodic_spritesheet, + project_id, + location, + input_uri, + output_uri_for_periodic_spritesheet, ) out, _ = capsys.readouterr() job_name_prefix = f"projects/{project_number}/locations/{location}/jobs/" @@ -407,7 +413,11 @@ def test_create_job_with_concatenated_inputs(capsys, test_bucket): def test_create_job_with_embedded_captions(capsys, test_bucket): create_job_with_embedded_captions.create_job_with_embedded_captions( - project_id, location, input_uri, captions_uri, output_uri_for_embedded_captions, + project_id, + location, + input_uri, + captions_uri, + output_uri_for_embedded_captions, ) out, _ = capsys.readouterr() job_name_prefix = f"projects/{project_number}/locations/{location}/jobs/" diff --git a/samples/snippets/noxfile.py b/samples/snippets/noxfile.py index 85f5836..3b3ffa5 100644 --- a/samples/snippets/noxfile.py +++ b/samples/snippets/noxfile.py @@ -22,14 +22,14 @@ import nox - # WARNING - WARNING - WARNING - WARNING - WARNING # WARNING - WARNING - WARNING - WARNING - WARNING # DO NOT EDIT THIS FILE EVER! # WARNING - WARNING - WARNING - WARNING - WARNING # WARNING - WARNING - WARNING - WARNING - WARNING -BLACK_VERSION = "black==19.10b0" +BLACK_VERSION = "black==22.3.0" +ISORT_VERSION = "isort==5.10.1" # Copy `noxfile_config.py` to your directory and modify it instead. @@ -168,12 +168,33 @@ def lint(session: nox.sessions.Session) -> None: @nox.session def blacken(session: nox.sessions.Session) -> None: + """Run black. Format code to uniform standard.""" session.install(BLACK_VERSION) python_files = [path for path in os.listdir(".") if path.endswith(".py")] session.run("black", *python_files) +# +# format = isort + black +# + + +@nox.session +def format(session: nox.sessions.Session) -> None: + """ + Run isort to sort imports. Then run black + to format code to uniform standard. + """ + session.install(BLACK_VERSION, ISORT_VERSION) + python_files = [path for path in os.listdir(".") if path.endswith(".py")] + + # Use the --fss option to sort imports using strict alphabetical order. + # See https://pycqa.github.io/isort/docs/configuration/options.html#force-sort-within-sections + session.run("isort", "--fss", *python_files) + session.run("black", *python_files) + + # # Sample Tests # @@ -253,7 +274,7 @@ def py(session: nox.sessions.Session) -> None: def _get_repo_root() -> Optional[str]: - """ Returns the root folder of the project. """ + """Returns the root folder of the project.""" # Get root of this repository. Assume we don't have directories nested deeper than 10 items. p = Path(os.getcwd()) for i in range(10): diff --git a/samples/snippets/requirements-test.txt b/samples/snippets/requirements-test.txt index bc29a49..abd7bd0 100644 --- a/samples/snippets/requirements-test.txt +++ b/samples/snippets/requirements-test.txt @@ -1,3 +1,3 @@ -backoff==1.11.1 -google-cloud-storage==2.1.0 -pytest==7.0.1 +backoff==2.0.1 +google-cloud-storage==2.3.0 +pytest==7.1.2 diff --git a/samples/snippets/requirements.txt b/samples/snippets/requirements.txt index 9d4974e..6bd06b8 100644 --- a/samples/snippets/requirements.txt +++ b/samples/snippets/requirements.txt @@ -1,3 +1,3 @@ -google-api-python-client==2.39.0 -grpcio==1.44.0 -google-cloud-video-transcoder==1.3.0 +google-api-python-client==2.49.0 +grpcio==1.46.3 +google-cloud-video-transcoder==1.3.1 diff --git a/scripts/readme-gen/readme_gen.py b/scripts/readme-gen/readme_gen.py index d309d6e..91b5967 100644 --- a/scripts/readme-gen/readme_gen.py +++ b/scripts/readme-gen/readme_gen.py @@ -28,7 +28,10 @@ jinja_env = jinja2.Environment( trim_blocks=True, loader=jinja2.FileSystemLoader( - os.path.abspath(os.path.join(os.path.dirname(__file__), 'templates')))) + os.path.abspath(os.path.join(os.path.dirname(__file__), "templates")) + ), + autoescape=True, +) README_TMPL = jinja_env.get_template('README.tmpl.rst') diff --git a/setup.py b/setup.py index 8724b25..11e0641 100644 --- a/setup.py +++ b/setup.py @@ -17,9 +17,10 @@ import io import os + import setuptools # type: ignore -version = "1.3.1" +version = "1.3.2" package_root = os.path.abspath(os.path.dirname(__file__)) @@ -45,7 +46,8 @@ # Until this issue is closed # https://github.com/googleapis/google-cloud-python/issues/10566 "google-api-core[grpc] >= 1.31.5, <3.0.0dev,!=2.0.*,!=2.1.*,!=2.2.*,!=2.3.0", - "proto-plus >= 1.20.3", + "proto-plus >= 1.20.3, <2.0.0dev", + "protobuf >= 3.19.0, <4.0.0dev", ), python_requires=">=3.6", classifiers=[ diff --git a/testing/constraints-3.6.txt b/testing/constraints-3.6.txt index 523c46f..13651c0 100644 --- a/testing/constraints-3.6.txt +++ b/testing/constraints-3.6.txt @@ -7,3 +7,4 @@ # Then this file should have foo==1.14.0 google-api-core==1.31.5 proto-plus==1.20.3 +protobuf==3.19.0 diff --git a/testing/constraints-3.7.txt b/testing/constraints-3.7.txt index e69de29..13651c0 100644 --- a/testing/constraints-3.7.txt +++ b/testing/constraints-3.7.txt @@ -0,0 +1,10 @@ +# This constraints file is used to check that lower bounds +# are correct in setup.py +# List *all* library dependencies and extras in this file. +# Pin the version to the lower bound. +# +# e.g., if setup.py has "foo >= 1.14.0, < 2.0.0dev", +# Then this file should have foo==1.14.0 +google-api-core==1.31.5 +proto-plus==1.20.3 +protobuf==3.19.0 diff --git a/tests/unit/gapic/transcoder_v1/test_transcoder_service.py b/tests/unit/gapic/transcoder_v1/test_transcoder_service.py index 522a05e..897f6c2 100644 --- a/tests/unit/gapic/transcoder_v1/test_transcoder_service.py +++ b/tests/unit/gapic/transcoder_v1/test_transcoder_service.py @@ -14,39 +14,39 @@ # limitations under the License. # import os -import mock -import grpc -from grpc.experimental import aio -import math -import pytest -from proto.marshal.rules.dates import DurationRule, TimestampRule +# try/except added for compatibility with python < 3.8 +try: + from unittest import mock + from unittest.mock import AsyncMock +except ImportError: + import mock +import math +from google.api_core import gapic_v1, grpc_helpers, grpc_helpers_async, path_template from google.api_core import client_options from google.api_core import exceptions as core_exceptions -from google.api_core import gapic_v1 -from google.api_core import grpc_helpers -from google.api_core import grpc_helpers_async -from google.api_core import path_template +import google.auth from google.auth import credentials as ga_credentials from google.auth.exceptions import MutualTLSChannelError -from google.cloud.video.transcoder_v1.services.transcoder_service import ( - TranscoderServiceAsyncClient, -) -from google.cloud.video.transcoder_v1.services.transcoder_service import ( - TranscoderServiceClient, -) -from google.cloud.video.transcoder_v1.services.transcoder_service import pagers -from google.cloud.video.transcoder_v1.services.transcoder_service import transports -from google.cloud.video.transcoder_v1.types import resources -from google.cloud.video.transcoder_v1.types import services from google.oauth2 import service_account from google.protobuf import any_pb2 # type: ignore from google.protobuf import duration_pb2 # type: ignore from google.protobuf import timestamp_pb2 # type: ignore from google.rpc import status_pb2 # type: ignore -import google.auth +import grpc +from grpc.experimental import aio +from proto.marshal.rules.dates import DurationRule, TimestampRule +import pytest + +from google.cloud.video.transcoder_v1.services.transcoder_service import ( + TranscoderServiceAsyncClient, + TranscoderServiceClient, + pagers, + transports, +) +from google.cloud.video.transcoder_v1.types import resources, services def client_cert_source_callback(): @@ -95,20 +95,26 @@ def test__get_default_mtls_endpoint(): @pytest.mark.parametrize( - "client_class", [TranscoderServiceClient, TranscoderServiceAsyncClient,] + "client_class,transport_name", + [ + (TranscoderServiceClient, "grpc"), + (TranscoderServiceAsyncClient, "grpc_asyncio"), + ], ) -def test_transcoder_service_client_from_service_account_info(client_class): +def test_transcoder_service_client_from_service_account_info( + client_class, transport_name +): creds = ga_credentials.AnonymousCredentials() with mock.patch.object( service_account.Credentials, "from_service_account_info" ) as factory: factory.return_value = creds info = {"valid": True} - client = client_class.from_service_account_info(info) + client = client_class.from_service_account_info(info, transport=transport_name) assert client.transport._credentials == creds assert isinstance(client, client_class) - assert client.transport._host == "transcoder.googleapis.com:443" + assert client.transport._host == ("transcoder.googleapis.com:443") @pytest.mark.parametrize( @@ -137,23 +143,33 @@ def test_transcoder_service_client_service_account_always_use_jwt( @pytest.mark.parametrize( - "client_class", [TranscoderServiceClient, TranscoderServiceAsyncClient,] + "client_class,transport_name", + [ + (TranscoderServiceClient, "grpc"), + (TranscoderServiceAsyncClient, "grpc_asyncio"), + ], ) -def test_transcoder_service_client_from_service_account_file(client_class): +def test_transcoder_service_client_from_service_account_file( + client_class, transport_name +): creds = ga_credentials.AnonymousCredentials() with mock.patch.object( service_account.Credentials, "from_service_account_file" ) as factory: factory.return_value = creds - client = client_class.from_service_account_file("dummy/file/path.json") + client = client_class.from_service_account_file( + "dummy/file/path.json", transport=transport_name + ) assert client.transport._credentials == creds assert isinstance(client, client_class) - client = client_class.from_service_account_json("dummy/file/path.json") + client = client_class.from_service_account_json( + "dummy/file/path.json", transport=transport_name + ) assert client.transport._credentials == creds assert isinstance(client, client_class) - assert client.transport._host == "transcoder.googleapis.com:443" + assert client.transport._host == ("transcoder.googleapis.com:443") def test_transcoder_service_client_get_transport_class(): @@ -511,7 +527,9 @@ def test_transcoder_service_client_client_options_scopes( client_class, transport_class, transport_name ): # Check the case scopes are provided. - options = client_options.ClientOptions(scopes=["1", "2"],) + options = client_options.ClientOptions( + scopes=["1", "2"], + ) with mock.patch.object(transport_class, "__init__") as patched: patched.return_value = None client = client_class(client_options=options, transport=transport_name) @@ -651,10 +669,17 @@ def test_transcoder_service_client_create_channel_credentials_file( ) -@pytest.mark.parametrize("request_type", [services.CreateJobRequest, dict,]) +@pytest.mark.parametrize( + "request_type", + [ + services.CreateJobRequest, + dict, + ], +) def test_create_job(request_type, transport: str = "grpc"): client = TranscoderServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -692,7 +717,8 @@ def test_create_job_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = TranscoderServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -708,7 +734,8 @@ async def test_create_job_async( transport: str = "grpc_asyncio", request_type=services.CreateJobRequest ): client = TranscoderServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -749,13 +776,15 @@ async def test_create_job_async_from_dict(): def test_create_job_field_headers(): - client = TranscoderServiceClient(credentials=ga_credentials.AnonymousCredentials(),) + client = TranscoderServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = services.CreateJobRequest() - request.parent = "parent/value" + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.create_job), "__call__") as call: @@ -769,7 +798,10 @@ def test_create_job_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] @pytest.mark.asyncio @@ -782,7 +814,7 @@ async def test_create_job_field_headers_async(): # a field header. Set these to a non-empty value. request = services.CreateJobRequest() - request.parent = "parent/value" + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.create_job), "__call__") as call: @@ -796,11 +828,16 @@ async def test_create_job_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] def test_create_job_flattened(): - client = TranscoderServiceClient(credentials=ga_credentials.AnonymousCredentials(),) + client = TranscoderServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.create_job), "__call__") as call: @@ -809,7 +846,8 @@ def test_create_job_flattened(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. client.create_job( - parent="parent_value", job=resources.Job(name="name_value"), + parent="parent_value", + job=resources.Job(name="name_value"), ) # Establish that the underlying call was made with the expected @@ -825,7 +863,9 @@ def test_create_job_flattened(): def test_create_job_flattened_error(): - client = TranscoderServiceClient(credentials=ga_credentials.AnonymousCredentials(),) + client = TranscoderServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -852,7 +892,8 @@ async def test_create_job_flattened_async(): # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. response = await client.create_job( - parent="parent_value", job=resources.Job(name="name_value"), + parent="parent_value", + job=resources.Job(name="name_value"), ) # Establish that the underlying call was made with the expected @@ -883,10 +924,17 @@ async def test_create_job_flattened_error_async(): ) -@pytest.mark.parametrize("request_type", [services.ListJobsRequest, dict,]) +@pytest.mark.parametrize( + "request_type", + [ + services.ListJobsRequest, + dict, + ], +) def test_list_jobs(request_type, transport: str = "grpc"): client = TranscoderServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -897,7 +945,8 @@ def test_list_jobs(request_type, transport: str = "grpc"): with mock.patch.object(type(client.transport.list_jobs), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = services.ListJobsResponse( - next_page_token="next_page_token_value", unreachable=["unreachable_value"], + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], ) response = client.list_jobs(request) @@ -916,7 +965,8 @@ def test_list_jobs_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = TranscoderServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -932,7 +982,8 @@ async def test_list_jobs_async( transport: str = "grpc_asyncio", request_type=services.ListJobsRequest ): client = TranscoderServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -967,13 +1018,15 @@ async def test_list_jobs_async_from_dict(): def test_list_jobs_field_headers(): - client = TranscoderServiceClient(credentials=ga_credentials.AnonymousCredentials(),) + client = TranscoderServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = services.ListJobsRequest() - request.parent = "parent/value" + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_jobs), "__call__") as call: @@ -987,7 +1040,10 @@ def test_list_jobs_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] @pytest.mark.asyncio @@ -1000,7 +1056,7 @@ async def test_list_jobs_field_headers_async(): # a field header. Set these to a non-empty value. request = services.ListJobsRequest() - request.parent = "parent/value" + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_jobs), "__call__") as call: @@ -1016,11 +1072,16 @@ async def test_list_jobs_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] def test_list_jobs_flattened(): - client = TranscoderServiceClient(credentials=ga_credentials.AnonymousCredentials(),) + client = TranscoderServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.list_jobs), "__call__") as call: @@ -1028,7 +1089,9 @@ def test_list_jobs_flattened(): call.return_value = services.ListJobsResponse() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.list_jobs(parent="parent_value",) + client.list_jobs( + parent="parent_value", + ) # Establish that the underlying call was made with the expected # request object values. @@ -1040,13 +1103,16 @@ def test_list_jobs_flattened(): def test_list_jobs_flattened_error(): - client = TranscoderServiceClient(credentials=ga_credentials.AnonymousCredentials(),) + client = TranscoderServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): client.list_jobs( - services.ListJobsRequest(), parent="parent_value", + services.ListJobsRequest(), + parent="parent_value", ) @@ -1066,7 +1132,9 @@ async def test_list_jobs_flattened_async(): ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.list_jobs(parent="parent_value",) + response = await client.list_jobs( + parent="parent_value", + ) # Establish that the underlying call was made with the expected # request object values. @@ -1087,13 +1155,15 @@ async def test_list_jobs_flattened_error_async(): # fields is an error. with pytest.raises(ValueError): await client.list_jobs( - services.ListJobsRequest(), parent="parent_value", + services.ListJobsRequest(), + parent="parent_value", ) def test_list_jobs_pager(transport_name: str = "grpc"): client = TranscoderServiceClient( - credentials=ga_credentials.AnonymousCredentials, transport=transport_name, + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1101,12 +1171,29 @@ def test_list_jobs_pager(transport_name: str = "grpc"): # Set the response to a series of pages. call.side_effect = ( services.ListJobsResponse( - jobs=[resources.Job(), resources.Job(), resources.Job(),], + jobs=[ + resources.Job(), + resources.Job(), + resources.Job(), + ], next_page_token="abc", ), - services.ListJobsResponse(jobs=[], next_page_token="def",), - services.ListJobsResponse(jobs=[resources.Job(),], next_page_token="ghi",), - services.ListJobsResponse(jobs=[resources.Job(), resources.Job(),],), + services.ListJobsResponse( + jobs=[], + next_page_token="def", + ), + services.ListJobsResponse( + jobs=[ + resources.Job(), + ], + next_page_token="ghi", + ), + services.ListJobsResponse( + jobs=[ + resources.Job(), + resources.Job(), + ], + ), RuntimeError, ) @@ -1118,14 +1205,15 @@ def test_list_jobs_pager(transport_name: str = "grpc"): assert pager._metadata == metadata - results = [i for i in pager] + results = list(pager) assert len(results) == 6 assert all(isinstance(i, resources.Job) for i in results) def test_list_jobs_pages(transport_name: str = "grpc"): client = TranscoderServiceClient( - credentials=ga_credentials.AnonymousCredentials, transport=transport_name, + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1133,12 +1221,29 @@ def test_list_jobs_pages(transport_name: str = "grpc"): # Set the response to a series of pages. call.side_effect = ( services.ListJobsResponse( - jobs=[resources.Job(), resources.Job(), resources.Job(),], + jobs=[ + resources.Job(), + resources.Job(), + resources.Job(), + ], next_page_token="abc", ), - services.ListJobsResponse(jobs=[], next_page_token="def",), - services.ListJobsResponse(jobs=[resources.Job(),], next_page_token="ghi",), - services.ListJobsResponse(jobs=[resources.Job(), resources.Job(),],), + services.ListJobsResponse( + jobs=[], + next_page_token="def", + ), + services.ListJobsResponse( + jobs=[ + resources.Job(), + ], + next_page_token="ghi", + ), + services.ListJobsResponse( + jobs=[ + resources.Job(), + resources.Job(), + ], + ), RuntimeError, ) pages = list(client.list_jobs(request={}).pages) @@ -1159,18 +1264,37 @@ async def test_list_jobs_async_pager(): # Set the response to a series of pages. call.side_effect = ( services.ListJobsResponse( - jobs=[resources.Job(), resources.Job(), resources.Job(),], + jobs=[ + resources.Job(), + resources.Job(), + resources.Job(), + ], next_page_token="abc", ), - services.ListJobsResponse(jobs=[], next_page_token="def",), - services.ListJobsResponse(jobs=[resources.Job(),], next_page_token="ghi",), - services.ListJobsResponse(jobs=[resources.Job(), resources.Job(),],), + services.ListJobsResponse( + jobs=[], + next_page_token="def", + ), + services.ListJobsResponse( + jobs=[ + resources.Job(), + ], + next_page_token="ghi", + ), + services.ListJobsResponse( + jobs=[ + resources.Job(), + resources.Job(), + ], + ), RuntimeError, ) - async_pager = await client.list_jobs(request={},) + async_pager = await client.list_jobs( + request={}, + ) assert async_pager.next_page_token == "abc" responses = [] - async for response in async_pager: + async for response in async_pager: # pragma: no branch responses.append(response) assert len(responses) == 6 @@ -1190,25 +1314,51 @@ async def test_list_jobs_async_pages(): # Set the response to a series of pages. call.side_effect = ( services.ListJobsResponse( - jobs=[resources.Job(), resources.Job(), resources.Job(),], + jobs=[ + resources.Job(), + resources.Job(), + resources.Job(), + ], next_page_token="abc", ), - services.ListJobsResponse(jobs=[], next_page_token="def",), - services.ListJobsResponse(jobs=[resources.Job(),], next_page_token="ghi",), - services.ListJobsResponse(jobs=[resources.Job(), resources.Job(),],), + services.ListJobsResponse( + jobs=[], + next_page_token="def", + ), + services.ListJobsResponse( + jobs=[ + resources.Job(), + ], + next_page_token="ghi", + ), + services.ListJobsResponse( + jobs=[ + resources.Job(), + resources.Job(), + ], + ), RuntimeError, ) pages = [] - async for page_ in (await client.list_jobs(request={})).pages: + async for page_ in ( + await client.list_jobs(request={}) + ).pages: # pragma: no branch pages.append(page_) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token -@pytest.mark.parametrize("request_type", [services.GetJobRequest, dict,]) +@pytest.mark.parametrize( + "request_type", + [ + services.GetJobRequest, + dict, + ], +) def test_get_job(request_type, transport: str = "grpc"): client = TranscoderServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1246,7 +1396,8 @@ def test_get_job_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = TranscoderServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1262,7 +1413,8 @@ async def test_get_job_async( transport: str = "grpc_asyncio", request_type=services.GetJobRequest ): client = TranscoderServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1303,13 +1455,15 @@ async def test_get_job_async_from_dict(): def test_get_job_field_headers(): - client = TranscoderServiceClient(credentials=ga_credentials.AnonymousCredentials(),) + client = TranscoderServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = services.GetJobRequest() - request.name = "name/value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_job), "__call__") as call: @@ -1323,7 +1477,10 @@ def test_get_job_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] @pytest.mark.asyncio @@ -1336,7 +1493,7 @@ async def test_get_job_field_headers_async(): # a field header. Set these to a non-empty value. request = services.GetJobRequest() - request.name = "name/value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_job), "__call__") as call: @@ -1350,11 +1507,16 @@ async def test_get_job_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] def test_get_job_flattened(): - client = TranscoderServiceClient(credentials=ga_credentials.AnonymousCredentials(),) + client = TranscoderServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_job), "__call__") as call: @@ -1362,7 +1524,9 @@ def test_get_job_flattened(): call.return_value = resources.Job() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.get_job(name="name_value",) + client.get_job( + name="name_value", + ) # Establish that the underlying call was made with the expected # request object values. @@ -1374,13 +1538,16 @@ def test_get_job_flattened(): def test_get_job_flattened_error(): - client = TranscoderServiceClient(credentials=ga_credentials.AnonymousCredentials(),) + client = TranscoderServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): client.get_job( - services.GetJobRequest(), name="name_value", + services.GetJobRequest(), + name="name_value", ) @@ -1398,7 +1565,9 @@ async def test_get_job_flattened_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.Job()) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.get_job(name="name_value",) + response = await client.get_job( + name="name_value", + ) # Establish that the underlying call was made with the expected # request object values. @@ -1419,14 +1588,22 @@ async def test_get_job_flattened_error_async(): # fields is an error. with pytest.raises(ValueError): await client.get_job( - services.GetJobRequest(), name="name_value", + services.GetJobRequest(), + name="name_value", ) -@pytest.mark.parametrize("request_type", [services.DeleteJobRequest, dict,]) +@pytest.mark.parametrize( + "request_type", + [ + services.DeleteJobRequest, + dict, + ], +) def test_delete_job(request_type, transport: str = "grpc"): client = TranscoderServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1452,7 +1629,8 @@ def test_delete_job_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = TranscoderServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1468,7 +1646,8 @@ async def test_delete_job_async( transport: str = "grpc_asyncio", request_type=services.DeleteJobRequest ): client = TranscoderServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1496,13 +1675,15 @@ async def test_delete_job_async_from_dict(): def test_delete_job_field_headers(): - client = TranscoderServiceClient(credentials=ga_credentials.AnonymousCredentials(),) + client = TranscoderServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = services.DeleteJobRequest() - request.name = "name/value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_job), "__call__") as call: @@ -1516,7 +1697,10 @@ def test_delete_job_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] @pytest.mark.asyncio @@ -1529,7 +1713,7 @@ async def test_delete_job_field_headers_async(): # a field header. Set these to a non-empty value. request = services.DeleteJobRequest() - request.name = "name/value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_job), "__call__") as call: @@ -1543,11 +1727,16 @@ async def test_delete_job_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] def test_delete_job_flattened(): - client = TranscoderServiceClient(credentials=ga_credentials.AnonymousCredentials(),) + client = TranscoderServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.delete_job), "__call__") as call: @@ -1555,7 +1744,9 @@ def test_delete_job_flattened(): call.return_value = None # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.delete_job(name="name_value",) + client.delete_job( + name="name_value", + ) # Establish that the underlying call was made with the expected # request object values. @@ -1567,13 +1758,16 @@ def test_delete_job_flattened(): def test_delete_job_flattened_error(): - client = TranscoderServiceClient(credentials=ga_credentials.AnonymousCredentials(),) + client = TranscoderServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): client.delete_job( - services.DeleteJobRequest(), name="name_value", + services.DeleteJobRequest(), + name="name_value", ) @@ -1591,7 +1785,9 @@ async def test_delete_job_flattened_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.delete_job(name="name_value",) + response = await client.delete_job( + name="name_value", + ) # Establish that the underlying call was made with the expected # request object values. @@ -1612,14 +1808,22 @@ async def test_delete_job_flattened_error_async(): # fields is an error. with pytest.raises(ValueError): await client.delete_job( - services.DeleteJobRequest(), name="name_value", + services.DeleteJobRequest(), + name="name_value", ) -@pytest.mark.parametrize("request_type", [services.CreateJobTemplateRequest, dict,]) +@pytest.mark.parametrize( + "request_type", + [ + services.CreateJobTemplateRequest, + dict, + ], +) def test_create_job_template(request_type, transport: str = "grpc"): client = TranscoderServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1631,7 +1835,9 @@ def test_create_job_template(request_type, transport: str = "grpc"): type(client.transport.create_job_template), "__call__" ) as call: # Designate an appropriate return value for the call. - call.return_value = resources.JobTemplate(name="name_value",) + call.return_value = resources.JobTemplate( + name="name_value", + ) response = client.create_job_template(request) # Establish that the underlying gRPC stub method was called. @@ -1648,7 +1854,8 @@ def test_create_job_template_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = TranscoderServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1666,7 +1873,8 @@ async def test_create_job_template_async( transport: str = "grpc_asyncio", request_type=services.CreateJobTemplateRequest ): client = TranscoderServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1679,7 +1887,9 @@ async def test_create_job_template_async( ) as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - resources.JobTemplate(name="name_value",) + resources.JobTemplate( + name="name_value", + ) ) response = await client.create_job_template(request) @@ -1699,13 +1909,15 @@ async def test_create_job_template_async_from_dict(): def test_create_job_template_field_headers(): - client = TranscoderServiceClient(credentials=ga_credentials.AnonymousCredentials(),) + client = TranscoderServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = services.CreateJobTemplateRequest() - request.parent = "parent/value" + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1721,7 +1933,10 @@ def test_create_job_template_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] @pytest.mark.asyncio @@ -1734,7 +1949,7 @@ async def test_create_job_template_field_headers_async(): # a field header. Set these to a non-empty value. request = services.CreateJobTemplateRequest() - request.parent = "parent/value" + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1752,11 +1967,16 @@ async def test_create_job_template_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] def test_create_job_template_flattened(): - client = TranscoderServiceClient(credentials=ga_credentials.AnonymousCredentials(),) + client = TranscoderServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1788,7 +2008,9 @@ def test_create_job_template_flattened(): def test_create_job_template_flattened_error(): - client = TranscoderServiceClient(credentials=ga_credentials.AnonymousCredentials(),) + client = TranscoderServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. @@ -1857,10 +2079,17 @@ async def test_create_job_template_flattened_error_async(): ) -@pytest.mark.parametrize("request_type", [services.ListJobTemplatesRequest, dict,]) +@pytest.mark.parametrize( + "request_type", + [ + services.ListJobTemplatesRequest, + dict, + ], +) def test_list_job_templates(request_type, transport: str = "grpc"): client = TranscoderServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1873,7 +2102,8 @@ def test_list_job_templates(request_type, transport: str = "grpc"): ) as call: # Designate an appropriate return value for the call. call.return_value = services.ListJobTemplatesResponse( - next_page_token="next_page_token_value", unreachable=["unreachable_value"], + next_page_token="next_page_token_value", + unreachable=["unreachable_value"], ) response = client.list_job_templates(request) @@ -1892,7 +2122,8 @@ def test_list_job_templates_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = TranscoderServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -1910,7 +2141,8 @@ async def test_list_job_templates_async( transport: str = "grpc_asyncio", request_type=services.ListJobTemplatesRequest ): client = TranscoderServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -1947,13 +2179,15 @@ async def test_list_job_templates_async_from_dict(): def test_list_job_templates_field_headers(): - client = TranscoderServiceClient(credentials=ga_credentials.AnonymousCredentials(),) + client = TranscoderServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = services.ListJobTemplatesRequest() - request.parent = "parent/value" + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -1969,7 +2203,10 @@ def test_list_job_templates_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] @pytest.mark.asyncio @@ -1982,7 +2219,7 @@ async def test_list_job_templates_field_headers_async(): # a field header. Set these to a non-empty value. request = services.ListJobTemplatesRequest() - request.parent = "parent/value" + request.parent = "parent_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -2000,11 +2237,16 @@ async def test_list_job_templates_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "parent=parent/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "parent=parent_value", + ) in kw["metadata"] def test_list_job_templates_flattened(): - client = TranscoderServiceClient(credentials=ga_credentials.AnonymousCredentials(),) + client = TranscoderServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -2014,7 +2256,9 @@ def test_list_job_templates_flattened(): call.return_value = services.ListJobTemplatesResponse() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.list_job_templates(parent="parent_value",) + client.list_job_templates( + parent="parent_value", + ) # Establish that the underlying call was made with the expected # request object values. @@ -2026,13 +2270,16 @@ def test_list_job_templates_flattened(): def test_list_job_templates_flattened_error(): - client = TranscoderServiceClient(credentials=ga_credentials.AnonymousCredentials(),) + client = TranscoderServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): client.list_job_templates( - services.ListJobTemplatesRequest(), parent="parent_value", + services.ListJobTemplatesRequest(), + parent="parent_value", ) @@ -2054,7 +2301,9 @@ async def test_list_job_templates_flattened_async(): ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.list_job_templates(parent="parent_value",) + response = await client.list_job_templates( + parent="parent_value", + ) # Establish that the underlying call was made with the expected # request object values. @@ -2075,13 +2324,15 @@ async def test_list_job_templates_flattened_error_async(): # fields is an error. with pytest.raises(ValueError): await client.list_job_templates( - services.ListJobTemplatesRequest(), parent="parent_value", + services.ListJobTemplatesRequest(), + parent="parent_value", ) def test_list_job_templates_pager(transport_name: str = "grpc"): client = TranscoderServiceClient( - credentials=ga_credentials.AnonymousCredentials, transport=transport_name, + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, ) # Mock the actual call within the gRPC stub, and fake the request. @@ -2098,12 +2349,21 @@ def test_list_job_templates_pager(transport_name: str = "grpc"): ], next_page_token="abc", ), - services.ListJobTemplatesResponse(job_templates=[], next_page_token="def",), services.ListJobTemplatesResponse( - job_templates=[resources.JobTemplate(),], next_page_token="ghi", + job_templates=[], + next_page_token="def", + ), + services.ListJobTemplatesResponse( + job_templates=[ + resources.JobTemplate(), + ], + next_page_token="ghi", ), services.ListJobTemplatesResponse( - job_templates=[resources.JobTemplate(), resources.JobTemplate(),], + job_templates=[ + resources.JobTemplate(), + resources.JobTemplate(), + ], ), RuntimeError, ) @@ -2116,14 +2376,15 @@ def test_list_job_templates_pager(transport_name: str = "grpc"): assert pager._metadata == metadata - results = [i for i in pager] + results = list(pager) assert len(results) == 6 assert all(isinstance(i, resources.JobTemplate) for i in results) def test_list_job_templates_pages(transport_name: str = "grpc"): client = TranscoderServiceClient( - credentials=ga_credentials.AnonymousCredentials, transport=transport_name, + credentials=ga_credentials.AnonymousCredentials, + transport=transport_name, ) # Mock the actual call within the gRPC stub, and fake the request. @@ -2140,12 +2401,21 @@ def test_list_job_templates_pages(transport_name: str = "grpc"): ], next_page_token="abc", ), - services.ListJobTemplatesResponse(job_templates=[], next_page_token="def",), services.ListJobTemplatesResponse( - job_templates=[resources.JobTemplate(),], next_page_token="ghi", + job_templates=[], + next_page_token="def", + ), + services.ListJobTemplatesResponse( + job_templates=[ + resources.JobTemplate(), + ], + next_page_token="ghi", ), services.ListJobTemplatesResponse( - job_templates=[resources.JobTemplate(), resources.JobTemplate(),], + job_templates=[ + resources.JobTemplate(), + resources.JobTemplate(), + ], ), RuntimeError, ) @@ -2176,19 +2446,30 @@ async def test_list_job_templates_async_pager(): ], next_page_token="abc", ), - services.ListJobTemplatesResponse(job_templates=[], next_page_token="def",), services.ListJobTemplatesResponse( - job_templates=[resources.JobTemplate(),], next_page_token="ghi", + job_templates=[], + next_page_token="def", ), services.ListJobTemplatesResponse( - job_templates=[resources.JobTemplate(), resources.JobTemplate(),], + job_templates=[ + resources.JobTemplate(), + ], + next_page_token="ghi", + ), + services.ListJobTemplatesResponse( + job_templates=[ + resources.JobTemplate(), + resources.JobTemplate(), + ], ), RuntimeError, ) - async_pager = await client.list_job_templates(request={},) + async_pager = await client.list_job_templates( + request={}, + ) assert async_pager.next_page_token == "abc" responses = [] - async for response in async_pager: + async for response in async_pager: # pragma: no branch responses.append(response) assert len(responses) == 6 @@ -2217,26 +2498,44 @@ async def test_list_job_templates_async_pages(): ], next_page_token="abc", ), - services.ListJobTemplatesResponse(job_templates=[], next_page_token="def",), services.ListJobTemplatesResponse( - job_templates=[resources.JobTemplate(),], next_page_token="ghi", + job_templates=[], + next_page_token="def", ), services.ListJobTemplatesResponse( - job_templates=[resources.JobTemplate(), resources.JobTemplate(),], + job_templates=[ + resources.JobTemplate(), + ], + next_page_token="ghi", + ), + services.ListJobTemplatesResponse( + job_templates=[ + resources.JobTemplate(), + resources.JobTemplate(), + ], ), RuntimeError, ) pages = [] - async for page_ in (await client.list_job_templates(request={})).pages: + async for page_ in ( + await client.list_job_templates(request={}) + ).pages: # pragma: no branch pages.append(page_) for page_, token in zip(pages, ["abc", "def", "ghi", ""]): assert page_.raw_page.next_page_token == token -@pytest.mark.parametrize("request_type", [services.GetJobTemplateRequest, dict,]) +@pytest.mark.parametrize( + "request_type", + [ + services.GetJobTemplateRequest, + dict, + ], +) def test_get_job_template(request_type, transport: str = "grpc"): client = TranscoderServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -2246,7 +2545,9 @@ def test_get_job_template(request_type, transport: str = "grpc"): # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_job_template), "__call__") as call: # Designate an appropriate return value for the call. - call.return_value = resources.JobTemplate(name="name_value",) + call.return_value = resources.JobTemplate( + name="name_value", + ) response = client.get_job_template(request) # Establish that the underlying gRPC stub method was called. @@ -2263,7 +2564,8 @@ def test_get_job_template_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = TranscoderServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -2279,7 +2581,8 @@ async def test_get_job_template_async( transport: str = "grpc_asyncio", request_type=services.GetJobTemplateRequest ): client = TranscoderServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -2290,7 +2593,9 @@ async def test_get_job_template_async( with mock.patch.object(type(client.transport.get_job_template), "__call__") as call: # Designate an appropriate return value for the call. call.return_value = grpc_helpers_async.FakeUnaryUnaryCall( - resources.JobTemplate(name="name_value",) + resources.JobTemplate( + name="name_value", + ) ) response = await client.get_job_template(request) @@ -2310,13 +2615,15 @@ async def test_get_job_template_async_from_dict(): def test_get_job_template_field_headers(): - client = TranscoderServiceClient(credentials=ga_credentials.AnonymousCredentials(),) + client = TranscoderServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = services.GetJobTemplateRequest() - request.name = "name/value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_job_template), "__call__") as call: @@ -2330,7 +2637,10 @@ def test_get_job_template_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] @pytest.mark.asyncio @@ -2343,7 +2653,7 @@ async def test_get_job_template_field_headers_async(): # a field header. Set these to a non-empty value. request = services.GetJobTemplateRequest() - request.name = "name/value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_job_template), "__call__") as call: @@ -2359,11 +2669,16 @@ async def test_get_job_template_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] def test_get_job_template_flattened(): - client = TranscoderServiceClient(credentials=ga_credentials.AnonymousCredentials(),) + client = TranscoderServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object(type(client.transport.get_job_template), "__call__") as call: @@ -2371,7 +2686,9 @@ def test_get_job_template_flattened(): call.return_value = resources.JobTemplate() # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.get_job_template(name="name_value",) + client.get_job_template( + name="name_value", + ) # Establish that the underlying call was made with the expected # request object values. @@ -2383,13 +2700,16 @@ def test_get_job_template_flattened(): def test_get_job_template_flattened_error(): - client = TranscoderServiceClient(credentials=ga_credentials.AnonymousCredentials(),) + client = TranscoderServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): client.get_job_template( - services.GetJobTemplateRequest(), name="name_value", + services.GetJobTemplateRequest(), + name="name_value", ) @@ -2409,7 +2729,9 @@ async def test_get_job_template_flattened_async(): ) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.get_job_template(name="name_value",) + response = await client.get_job_template( + name="name_value", + ) # Establish that the underlying call was made with the expected # request object values. @@ -2430,14 +2752,22 @@ async def test_get_job_template_flattened_error_async(): # fields is an error. with pytest.raises(ValueError): await client.get_job_template( - services.GetJobTemplateRequest(), name="name_value", + services.GetJobTemplateRequest(), + name="name_value", ) -@pytest.mark.parametrize("request_type", [services.DeleteJobTemplateRequest, dict,]) +@pytest.mark.parametrize( + "request_type", + [ + services.DeleteJobTemplateRequest, + dict, + ], +) def test_delete_job_template(request_type, transport: str = "grpc"): client = TranscoderServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -2465,7 +2795,8 @@ def test_delete_job_template_empty_call(): # This test is a coverage failsafe to make sure that totally empty calls, # i.e. request == None and no flattened fields passed, work. client = TranscoderServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport="grpc", + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc", ) # Mock the actual call within the gRPC stub, and fake the request. @@ -2483,7 +2814,8 @@ async def test_delete_job_template_async( transport: str = "grpc_asyncio", request_type=services.DeleteJobTemplateRequest ): client = TranscoderServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # Everything is optional in proto3 as far as the runtime is concerned, @@ -2513,13 +2845,15 @@ async def test_delete_job_template_async_from_dict(): def test_delete_job_template_field_headers(): - client = TranscoderServiceClient(credentials=ga_credentials.AnonymousCredentials(),) + client = TranscoderServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Any value that is part of the HTTP/1.1 URI should be sent as # a field header. Set these to a non-empty value. request = services.DeleteJobTemplateRequest() - request.name = "name/value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -2535,7 +2869,10 @@ def test_delete_job_template_field_headers(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] @pytest.mark.asyncio @@ -2548,7 +2885,7 @@ async def test_delete_job_template_field_headers_async(): # a field header. Set these to a non-empty value. request = services.DeleteJobTemplateRequest() - request.name = "name/value" + request.name = "name_value" # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -2564,11 +2901,16 @@ async def test_delete_job_template_field_headers_async(): # Establish that the field header was sent. _, _, kw = call.mock_calls[0] - assert ("x-goog-request-params", "name=name/value",) in kw["metadata"] + assert ( + "x-goog-request-params", + "name=name_value", + ) in kw["metadata"] def test_delete_job_template_flattened(): - client = TranscoderServiceClient(credentials=ga_credentials.AnonymousCredentials(),) + client = TranscoderServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Mock the actual call within the gRPC stub, and fake the request. with mock.patch.object( @@ -2578,7 +2920,9 @@ def test_delete_job_template_flattened(): call.return_value = None # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - client.delete_job_template(name="name_value",) + client.delete_job_template( + name="name_value", + ) # Establish that the underlying call was made with the expected # request object values. @@ -2590,13 +2934,16 @@ def test_delete_job_template_flattened(): def test_delete_job_template_flattened_error(): - client = TranscoderServiceClient(credentials=ga_credentials.AnonymousCredentials(),) + client = TranscoderServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) # Attempting to call a method with both a request object and flattened # fields is an error. with pytest.raises(ValueError): client.delete_job_template( - services.DeleteJobTemplateRequest(), name="name_value", + services.DeleteJobTemplateRequest(), + name="name_value", ) @@ -2616,7 +2963,9 @@ async def test_delete_job_template_flattened_async(): call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None) # Call the method with a truthy value for each flattened field, # using the keyword arguments to the method. - response = await client.delete_job_template(name="name_value",) + response = await client.delete_job_template( + name="name_value", + ) # Establish that the underlying call was made with the expected # request object values. @@ -2637,7 +2986,8 @@ async def test_delete_job_template_flattened_error_async(): # fields is an error. with pytest.raises(ValueError): await client.delete_job_template( - services.DeleteJobTemplateRequest(), name="name_value", + services.DeleteJobTemplateRequest(), + name="name_value", ) @@ -2648,7 +2998,8 @@ def test_credentials_transport_error(): ) with pytest.raises(ValueError): client = TranscoderServiceClient( - credentials=ga_credentials.AnonymousCredentials(), transport=transport, + credentials=ga_credentials.AnonymousCredentials(), + transport=transport, ) # It is an error to provide a credentials file and a transport instance. @@ -2668,7 +3019,10 @@ def test_credentials_transport_error(): options = client_options.ClientOptions() options.api_key = "api_key" with pytest.raises(ValueError): - client = TranscoderServiceClient(client_options=options, transport=transport,) + client = TranscoderServiceClient( + client_options=options, + transport=transport, + ) # It is an error to provide an api_key and a credential. options = mock.Mock() @@ -2684,7 +3038,8 @@ def test_credentials_transport_error(): ) with pytest.raises(ValueError): client = TranscoderServiceClient( - client_options={"scopes": ["1", "2"]}, transport=transport, + client_options={"scopes": ["1", "2"]}, + transport=transport, ) @@ -2727,10 +3082,28 @@ def test_transport_adc(transport_class): adc.assert_called_once() +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + ], +) +def test_transport_kind(transport_name): + transport = TranscoderServiceClient.get_transport_class(transport_name)( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert transport.kind == transport_name + + def test_transport_grpc_default(): # A client should use the gRPC transport by default. - client = TranscoderServiceClient(credentials=ga_credentials.AnonymousCredentials(),) - assert isinstance(client.transport, transports.TranscoderServiceGrpcTransport,) + client = TranscoderServiceClient( + credentials=ga_credentials.AnonymousCredentials(), + ) + assert isinstance( + client.transport, + transports.TranscoderServiceGrpcTransport, + ) def test_transcoder_service_base_transport_error(): @@ -2771,6 +3144,14 @@ def test_transcoder_service_base_transport(): with pytest.raises(NotImplementedError): transport.close() + # Catch all for all remaining methods and properties + remainder = [ + "kind", + ] + for r in remainder: + with pytest.raises(NotImplementedError): + getattr(transport, r)() + def test_transcoder_service_base_transport_with_credentials_file(): # Instantiate the base transport with a credentials file @@ -2782,7 +3163,8 @@ def test_transcoder_service_base_transport_with_credentials_file(): Transport.return_value = None load_creds.return_value = (ga_credentials.AnonymousCredentials(), None) transport = transports.TranscoderServiceTransport( - credentials_file="credentials.json", quota_project_id="octopus", + credentials_file="credentials.json", + quota_project_id="octopus", ) load_creds.assert_called_once_with( "credentials.json", @@ -2915,24 +3297,40 @@ def test_transcoder_service_grpc_transport_client_cert_source_for_mtls(transport ) -def test_transcoder_service_host_no_port(): +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + ], +) +def test_transcoder_service_host_no_port(transport_name): client = TranscoderServiceClient( credentials=ga_credentials.AnonymousCredentials(), client_options=client_options.ClientOptions( api_endpoint="transcoder.googleapis.com" ), + transport=transport_name, ) - assert client.transport._host == "transcoder.googleapis.com:443" + assert client.transport._host == ("transcoder.googleapis.com:443") -def test_transcoder_service_host_with_port(): +@pytest.mark.parametrize( + "transport_name", + [ + "grpc", + "grpc_asyncio", + ], +) +def test_transcoder_service_host_with_port(transport_name): client = TranscoderServiceClient( credentials=ga_credentials.AnonymousCredentials(), client_options=client_options.ClientOptions( api_endpoint="transcoder.googleapis.com:8000" ), + transport=transport_name, ) - assert client.transport._host == "transcoder.googleapis.com:8000" + assert client.transport._host == ("transcoder.googleapis.com:8000") def test_transcoder_service_grpc_transport_channel(): @@ -2940,7 +3338,8 @@ def test_transcoder_service_grpc_transport_channel(): # Check that channel is used if provided. transport = transports.TranscoderServiceGrpcTransport( - host="squid.clam.whelk", channel=channel, + host="squid.clam.whelk", + channel=channel, ) assert transport.grpc_channel == channel assert transport._host == "squid.clam.whelk:443" @@ -2952,7 +3351,8 @@ def test_transcoder_service_grpc_asyncio_transport_channel(): # Check that channel is used if provided. transport = transports.TranscoderServiceGrpcAsyncIOTransport( - host="squid.clam.whelk", channel=channel, + host="squid.clam.whelk", + channel=channel, ) assert transport.grpc_channel == channel assert transport._host == "squid.clam.whelk:443" @@ -3064,7 +3464,9 @@ def test_job_path(): location = "clam" job = "whelk" expected = "projects/{project}/locations/{location}/jobs/{job}".format( - project=project, location=location, job=job, + project=project, + location=location, + job=job, ) actual = TranscoderServiceClient.job_path(project, location, job) assert expected == actual @@ -3087,8 +3489,12 @@ def test_job_template_path(): project = "cuttlefish" location = "mussel" job_template = "winkle" - expected = "projects/{project}/locations/{location}/jobTemplates/{job_template}".format( - project=project, location=location, job_template=job_template, + expected = ( + "projects/{project}/locations/{location}/jobTemplates/{job_template}".format( + project=project, + location=location, + job_template=job_template, + ) ) actual = TranscoderServiceClient.job_template_path(project, location, job_template) assert expected == actual @@ -3129,7 +3535,9 @@ def test_parse_common_billing_account_path(): def test_common_folder_path(): folder = "whelk" - expected = "folders/{folder}".format(folder=folder,) + expected = "folders/{folder}".format( + folder=folder, + ) actual = TranscoderServiceClient.common_folder_path(folder) assert expected == actual @@ -3147,7 +3555,9 @@ def test_parse_common_folder_path(): def test_common_organization_path(): organization = "oyster" - expected = "organizations/{organization}".format(organization=organization,) + expected = "organizations/{organization}".format( + organization=organization, + ) actual = TranscoderServiceClient.common_organization_path(organization) assert expected == actual @@ -3165,7 +3575,9 @@ def test_parse_common_organization_path(): def test_common_project_path(): project = "cuttlefish" - expected = "projects/{project}".format(project=project,) + expected = "projects/{project}".format( + project=project, + ) actual = TranscoderServiceClient.common_project_path(project) assert expected == actual @@ -3185,7 +3597,8 @@ def test_common_location_path(): project = "winkle" location = "nautilus" expected = "projects/{project}/locations/{location}".format( - project=project, location=location, + project=project, + location=location, ) actual = TranscoderServiceClient.common_location_path(project, location) assert expected == actual @@ -3210,7 +3623,8 @@ def test_client_with_default_client_info(): transports.TranscoderServiceTransport, "_prep_wrapped_messages" ) as prep: client = TranscoderServiceClient( - credentials=ga_credentials.AnonymousCredentials(), client_info=client_info, + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, ) prep.assert_called_once_with(client_info) @@ -3219,7 +3633,8 @@ def test_client_with_default_client_info(): ) as prep: transport_class = TranscoderServiceClient.get_transport_class() transport = transport_class( - credentials=ga_credentials.AnonymousCredentials(), client_info=client_info, + credentials=ga_credentials.AnonymousCredentials(), + client_info=client_info, ) prep.assert_called_once_with(client_info) @@ -3227,7 +3642,8 @@ def test_client_with_default_client_info(): @pytest.mark.asyncio async def test_transport_close_async(): client = TranscoderServiceAsyncClient( - credentials=ga_credentials.AnonymousCredentials(), transport="grpc_asyncio", + credentials=ga_credentials.AnonymousCredentials(), + transport="grpc_asyncio", ) with mock.patch.object( type(getattr(client.transport, "grpc_channel")), "close"
Note: This service is not intended for secure transactions such as banking, social media, email, or purchasing. Use at your own risk. We assume no liability whatsoever for broken pages.
Alternative Proxies: