.+?)$", path)
+ return m.groupdict() if m else {}
+
def __init__(
self,
*,
- credentials: credentials.Credentials = None,
- transport: Union[str, TranscoderServiceTransport] = None,
- client_options: ClientOptions = None,
+ credentials: Optional[credentials.Credentials] = None,
+ transport: Union[str, TranscoderServiceTransport, None] = None,
+ client_options: Optional[client_options_lib.ClientOptions] = None,
client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
) -> None:
"""Instantiate the transcoder service client.
@@ -189,23 +260,26 @@ def __init__(
transport (Union[str, ~.TranscoderServiceTransport]): The
transport to use. If set to None, a transport is chosen
automatically.
- client_options (ClientOptions): Custom options for the client. It
- won't take effect if a ``transport`` instance is provided.
+ client_options (client_options_lib.ClientOptions): Custom options for the
+ client. It won't take effect if a ``transport`` instance is provided.
(1) The ``api_endpoint`` property can be used to override the
- default endpoint provided by the client. GOOGLE_API_USE_MTLS
+ default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT
environment variable can also be used to override the endpoint:
"always" (always use the default mTLS endpoint), "never" (always
- use the default regular endpoint, this is the default value for
- the environment variable) and "auto" (auto switch to the default
- mTLS endpoint if client SSL credentials is present). However,
- the ``api_endpoint`` property takes precedence if provided.
- (2) The ``client_cert_source`` property is used to provide client
- SSL credentials for mutual TLS transport. If not provided, the
- default SSL credentials will be used if present.
- client_info (google.api_core.gapic_v1.client_info.ClientInfo):
- The client info used to send a user-agent string along with
- API requests. If ``None``, then default info will be used.
- Generally, you only need to set this if you're developing
+ use the default regular endpoint) and "auto" (auto switch to the
+ default mTLS endpoint if client certificate is present, this is
+ the default value). However, the ``api_endpoint`` property takes
+ precedence if provided.
+ (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable
+ is "true", then the ``client_cert_source`` property can be used
+ to provide client certificate for mutual TLS transport. If
+ not provided, the default SSL client certificate will be used if
+ present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not
+ set, no client certificate will be used.
+ client_info (google.api_core.gapic_v1.client_info.ClientInfo):
+ The client info used to send a user-agent string along with
+ API requests. If ``None``, then default info will be used.
+ Generally, you only need to set this if you're developing
your own client library.
Raises:
@@ -213,29 +287,47 @@ def __init__(
creation failed for any reason.
"""
if isinstance(client_options, dict):
- client_options = ClientOptions.from_dict(client_options)
+ client_options = client_options_lib.from_dict(client_options)
if client_options is None:
- client_options = ClientOptions.ClientOptions()
+ client_options = client_options_lib.ClientOptions()
+
+ # Create SSL credentials for mutual TLS if needed.
+ use_client_cert = bool(
+ util.strtobool(os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false"))
+ )
- if client_options.api_endpoint is None:
- use_mtls_env = os.getenv("GOOGLE_API_USE_MTLS", "never")
+ ssl_credentials = None
+ is_mtls = False
+ if use_client_cert:
+ if client_options.client_cert_source:
+ import grpc # type: ignore
+
+ cert, key = client_options.client_cert_source()
+ ssl_credentials = grpc.ssl_channel_credentials(
+ certificate_chain=cert, private_key=key
+ )
+ is_mtls = True
+ else:
+ creds = SslCredentials()
+ is_mtls = creds.is_mtls
+ ssl_credentials = creds.ssl_credentials if is_mtls else None
+
+ # Figure out which api endpoint to use.
+ if client_options.api_endpoint is not None:
+ api_endpoint = client_options.api_endpoint
+ else:
+ use_mtls_env = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto")
if use_mtls_env == "never":
- client_options.api_endpoint = self.DEFAULT_ENDPOINT
+ api_endpoint = self.DEFAULT_ENDPOINT
elif use_mtls_env == "always":
- client_options.api_endpoint = self.DEFAULT_MTLS_ENDPOINT
+ api_endpoint = self.DEFAULT_MTLS_ENDPOINT
elif use_mtls_env == "auto":
- has_client_cert_source = (
- client_options.client_cert_source is not None
- or mtls.has_default_client_cert_source()
- )
- client_options.api_endpoint = (
- self.DEFAULT_MTLS_ENDPOINT
- if has_client_cert_source
- else self.DEFAULT_ENDPOINT
+ api_endpoint = (
+ self.DEFAULT_MTLS_ENDPOINT if is_mtls else self.DEFAULT_ENDPOINT
)
else:
raise MutualTLSChannelError(
- "Unsupported GOOGLE_API_USE_MTLS value. Accepted values: never, auto, always"
+ "Unsupported GOOGLE_API_USE_MTLS_ENDPOINT value. Accepted values: never, auto, always"
)
# Save or instantiate the transport.
@@ -259,10 +351,9 @@ def __init__(
self._transport = Transport(
credentials=credentials,
credentials_file=client_options.credentials_file,
- host=client_options.api_endpoint,
+ host=api_endpoint,
scopes=client_options.scopes,
- api_mtls_endpoint=client_options.api_endpoint,
- client_cert_source=client_options.client_cert_source,
+ ssl_channel_credentials=ssl_credentials,
quota_project_id=client_options.quota_project_id,
client_info=client_info,
)
@@ -602,7 +693,8 @@ def create_job_template(
resource name.
This value should be 4-63 characters, and valid
- characters are ``/[a-zA-Z0-9_-_]/``.
+ characters must match the regular expression
+ ``[a-zA-Z][a-zA-Z0-9_-]*``.
This corresponds to the ``job_template_id`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
diff --git a/google/cloud/video/transcoder_v1beta1/services/transcoder_service/transports/base.py b/google/cloud/video/transcoder_v1beta1/services/transcoder_service/transports/base.py
index f088718..dbb7824 100644
--- a/google/cloud/video/transcoder_v1beta1/services/transcoder_service/transports/base.py
+++ b/google/cloud/video/transcoder_v1beta1/services/transcoder_service/transports/base.py
@@ -19,7 +19,7 @@
import typing
import pkg_resources
-from google import auth
+from google import auth # type: ignore
from google.api_core import exceptions # type: ignore
from google.api_core import gapic_v1 # type: ignore
from google.api_core import retry as retries # type: ignore
diff --git a/google/cloud/video/transcoder_v1beta1/services/transcoder_service/transports/grpc.py b/google/cloud/video/transcoder_v1beta1/services/transcoder_service/transports/grpc.py
index 1ba71d9..41c7569 100644
--- a/google/cloud/video/transcoder_v1beta1/services/transcoder_service/transports/grpc.py
+++ b/google/cloud/video/transcoder_v1beta1/services/transcoder_service/transports/grpc.py
@@ -15,6 +15,7 @@
# limitations under the License.
#
+import warnings
from typing import Callable, Dict, Optional, Sequence, Tuple
from google.api_core import grpc_helpers # type: ignore
@@ -23,7 +24,6 @@
from google.auth import credentials # type: ignore
from google.auth.transport.grpc import SslCredentials # type: ignore
-
import grpc # type: ignore
from google.cloud.video.transcoder_v1beta1.types import resources
@@ -64,6 +64,7 @@ def __init__(
channel: grpc.Channel = None,
api_mtls_endpoint: str = None,
client_cert_source: Callable[[], Tuple[bytes, bytes]] = None,
+ ssl_channel_credentials: grpc.ChannelCredentials = None,
quota_project_id: Optional[str] = None,
client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
) -> None:
@@ -84,20 +85,22 @@ def __init__(
ignored if ``channel`` is provided.
channel (Optional[grpc.Channel]): A ``Channel`` instance through
which to make calls.
- api_mtls_endpoint (Optional[str]): The mutual TLS endpoint. If
- provided, it overrides the ``host`` argument and tries to create
+ api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint.
+ If provided, it overrides the ``host`` argument and tries to create
a mutual TLS channel with client SSL credentials from
``client_cert_source`` or applicatin default SSL credentials.
- client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): A
- callback to provide client SSL certificate bytes and private key
- bytes, both in PEM format. It is ignored if ``api_mtls_endpoint``
- is None.
+ client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]):
+ Deprecated. A callback to provide client SSL certificate bytes and
+ private key bytes, both in PEM format. It is ignored if
+ ``api_mtls_endpoint`` is None.
+ ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials
+ for grpc channel. It is ignored if ``channel`` is provided.
quota_project_id (Optional[str]): An optional project to use for billing
and quota.
- client_info (google.api_core.gapic_v1.client_info.ClientInfo):
- The client info used to send a user-agent string along with
- API requests. If ``None``, then default info will be used.
- Generally, you only need to set this if you're developing
+ client_info (google.api_core.gapic_v1.client_info.ClientInfo):
+ The client info used to send a user-agent string along with
+ API requests. If ``None``, then default info will be used.
+ Generally, you only need to set this if you're developing
your own client library.
Raises:
@@ -106,6 +109,8 @@ def __init__(
google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials``
and ``credentials_file`` are passed.
"""
+ self._ssl_channel_credentials = ssl_channel_credentials
+
if channel:
# Sanity check: Ensure that channel and credentials are not both
# provided.
@@ -113,7 +118,13 @@ def __init__(
# If a channel was explicitly provided, set it.
self._grpc_channel = channel
+ self._ssl_channel_credentials = None
elif api_mtls_endpoint:
+ warnings.warn(
+ "api_mtls_endpoint and client_cert_source are deprecated",
+ DeprecationWarning,
+ )
+
host = (
api_mtls_endpoint
if ":" in api_mtls_endpoint
@@ -144,6 +155,24 @@ def __init__(
scopes=scopes or self.AUTH_SCOPES,
quota_project_id=quota_project_id,
)
+ self._ssl_channel_credentials = ssl_credentials
+ else:
+ host = host if ":" in host else host + ":443"
+
+ if credentials is None:
+ credentials, _ = auth.default(
+ scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id
+ )
+
+ # create a new channel. The provided one is ignored.
+ self._grpc_channel = type(self).create_channel(
+ host,
+ credentials=credentials,
+ credentials_file=credentials_file,
+ ssl_credentials=ssl_channel_credentials,
+ scopes=scopes or self.AUTH_SCOPES,
+ quota_project_id=quota_project_id,
+ )
self._stubs = {} # type: Dict[str, Callable]
@@ -204,19 +233,8 @@ def create_channel(
@property
def grpc_channel(self) -> grpc.Channel:
- """Create the channel designed to connect to this service.
-
- This property caches on the instance; repeated calls return
- the same channel.
+ """Return the channel designed to connect to this service.
"""
- # Sanity check: Only create a new channel if we do not already
- # have one.
- if not hasattr(self, "_grpc_channel"):
- self._grpc_channel = self.create_channel(
- self._host, credentials=self._credentials,
- )
-
- # Return the channel from cache.
return self._grpc_channel
@property
diff --git a/google/cloud/video/transcoder_v1beta1/services/transcoder_service/transports/grpc_asyncio.py b/google/cloud/video/transcoder_v1beta1/services/transcoder_service/transports/grpc_asyncio.py
index eb97bc0..23a7485 100644
--- a/google/cloud/video/transcoder_v1beta1/services/transcoder_service/transports/grpc_asyncio.py
+++ b/google/cloud/video/transcoder_v1beta1/services/transcoder_service/transports/grpc_asyncio.py
@@ -15,10 +15,12 @@
# limitations under the License.
#
+import warnings
from typing import Awaitable, Callable, Dict, Optional, Sequence, Tuple
from google.api_core import gapic_v1 # type: ignore
from google.api_core import grpc_helpers_async # type: ignore
+from google import auth # type: ignore
from google.auth import credentials # type: ignore
from google.auth.transport.grpc import SslCredentials # type: ignore
@@ -106,6 +108,7 @@ def __init__(
channel: aio.Channel = None,
api_mtls_endpoint: str = None,
client_cert_source: Callable[[], Tuple[bytes, bytes]] = None,
+ ssl_channel_credentials: grpc.ChannelCredentials = None,
quota_project_id=None,
client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
) -> None:
@@ -127,14 +130,16 @@ def __init__(
are passed to :func:`google.auth.default`.
channel (Optional[aio.Channel]): A ``Channel`` instance through
which to make calls.
- api_mtls_endpoint (Optional[str]): The mutual TLS endpoint. If
- provided, it overrides the ``host`` argument and tries to create
+ api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint.
+ If provided, it overrides the ``host`` argument and tries to create
a mutual TLS channel with client SSL credentials from
``client_cert_source`` or applicatin default SSL credentials.
- client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]): A
- callback to provide client SSL certificate bytes and private key
- bytes, both in PEM format. It is ignored if ``api_mtls_endpoint``
- is None.
+ client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]):
+ Deprecated. A callback to provide client SSL certificate bytes and
+ private key bytes, both in PEM format. It is ignored if
+ ``api_mtls_endpoint`` is None.
+ ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials
+ for grpc channel. It is ignored if ``channel`` is provided.
quota_project_id (Optional[str]): An optional project to use for billing
and quota.
client_info (google.api_core.gapic_v1.client_info.ClientInfo):
@@ -149,6 +154,8 @@ def __init__(
google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials``
and ``credentials_file`` are passed.
"""
+ self._ssl_channel_credentials = ssl_channel_credentials
+
if channel:
# Sanity check: Ensure that channel and credentials are not both
# provided.
@@ -156,13 +163,24 @@ def __init__(
# If a channel was explicitly provided, set it.
self._grpc_channel = channel
+ self._ssl_channel_credentials = None
elif api_mtls_endpoint:
+ warnings.warn(
+ "api_mtls_endpoint and client_cert_source are deprecated",
+ DeprecationWarning,
+ )
+
host = (
api_mtls_endpoint
if ":" in api_mtls_endpoint
else api_mtls_endpoint + ":443"
)
+ if credentials is None:
+ credentials, _ = auth.default(
+ scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id
+ )
+
# Create SSL credentials with client_cert_source or application
# default SSL credentials.
if client_cert_source:
@@ -182,6 +200,24 @@ def __init__(
scopes=scopes or self.AUTH_SCOPES,
quota_project_id=quota_project_id,
)
+ self._ssl_channel_credentials = ssl_credentials
+ else:
+ host = host if ":" in host else host + ":443"
+
+ if credentials is None:
+ credentials, _ = auth.default(
+ scopes=self.AUTH_SCOPES, quota_project_id=quota_project_id
+ )
+
+ # create a new channel. The provided one is ignored.
+ self._grpc_channel = type(self).create_channel(
+ host,
+ credentials=credentials,
+ credentials_file=credentials_file,
+ ssl_credentials=ssl_channel_credentials,
+ scopes=scopes or self.AUTH_SCOPES,
+ quota_project_id=quota_project_id,
+ )
# Run the base constructor.
super().__init__(
@@ -202,13 +238,6 @@ def grpc_channel(self) -> aio.Channel:
This property caches on the instance; repeated calls return
the same channel.
"""
- # Sanity check: Only create a new channel if we do not already
- # have one.
- if not hasattr(self, "_grpc_channel"):
- self._grpc_channel = self.create_channel(
- self._host, credentials=self._credentials,
- )
-
# Return the channel from cache.
return self._grpc_channel
diff --git a/google/cloud/video/transcoder_v1beta1/types/resources.py b/google/cloud/video/transcoder_v1beta1/types/resources.py
index 0fd3180..886df3b 100644
--- a/google/cloud/video/transcoder_v1beta1/types/resources.py
+++ b/google/cloud/video/transcoder_v1beta1/types/resources.py
@@ -19,6 +19,7 @@
from google.protobuf import duration_pb2 as duration # type: ignore
+from google.protobuf import timestamp_pb2 as timestamp # type: ignore
__protobuf__ = proto.module(
@@ -99,6 +100,14 @@ class Job(proto.Message):
Output only. List of failure details. This property may
contain additional information about the failure when
``failure_reason`` is present.
+ create_time (~.timestamp.Timestamp):
+ Output only. The time the job was created.
+ start_time (~.timestamp.Timestamp):
+ Output only. The time the transcoding
+ started.
+ end_time (~.timestamp.Timestamp):
+ Output only. The time the transcoding
+ finished.
"""
class ProcessingState(proto.Enum):
@@ -152,6 +161,12 @@ class OriginUri(proto.Message):
proto.MESSAGE, number=11, message="FailureDetail",
)
+ create_time = proto.Field(proto.MESSAGE, number=12, message=timestamp.Timestamp,)
+
+ start_time = proto.Field(proto.MESSAGE, number=13, message=timestamp.Timestamp,)
+
+ end_time = proto.Field(proto.MESSAGE, number=14, message=timestamp.Timestamp,)
+
class JobTemplate(proto.Message):
r"""Transcoding job template resource.
@@ -387,10 +402,10 @@ class Manifest(proto.Message):
Attributes:
file_name (str):
- The name of the generated file. The default is ``"master"``
- with the extension suffix corresponding to the
- ``Manifest.type``.
- type (~.resources.Manifest.ManifestType):
+ The name of the generated file. The default is
+ ``"manifest"`` with the extension suffix corresponding to
+ the ``Manifest.type``.
+ type_ (~.resources.Manifest.ManifestType):
Required. Type of the manifest, can be "HLS"
or "DASH".
mux_streams (Sequence[str]):
@@ -410,7 +425,7 @@ class ManifestType(proto.Enum):
file_name = proto.Field(proto.STRING, number=1)
- type = proto.Field(proto.ENUM, number=2, enum=ManifestType,)
+ type_ = proto.Field(proto.ENUM, number=2, enum=ManifestType,)
mux_streams = proto.RepeatedField(proto.STRING, number=3)
@@ -432,7 +447,7 @@ class SpriteSheet(proto.Message):
r"""Sprite sheet configuration.
Attributes:
- format (str):
+ format_ (str):
Format type. The default is ``"jpeg"``.
Supported formats:
@@ -476,7 +491,7 @@ class SpriteSheet(proto.Message):
Specify the interval value in seconds.
"""
- format = proto.Field(proto.STRING, number=1)
+ format_ = proto.Field(proto.STRING, number=1)
file_prefix = proto.Field(proto.STRING, number=2)
@@ -811,11 +826,17 @@ class VideoStream(proto.Message):
Enforce specified codec preset. The default is
``"veryfast"``.
height_pixels (int):
- Required. The height of video in pixels. Must
- be an even integer.
+ The height of the video in pixels. Must be an
+ even integer. When not specified, the height is
+ adjusted to match the specified width and input
+ aspect ratio. If both are omitted, the input
+ height is used.
width_pixels (int):
- Required. The width of video in pixels. Must
- be an even integer.
+ The width of the video in pixels. Must be an
+ even integer. When not specified, the width is
+ adjusted to match the specified height and input
+ aspect ratio. If both are omitted, the input
+ width is used.
pixel_format (str):
Pixel format to use. The default is ``"yuv420p"``.
@@ -880,10 +901,34 @@ class VideoStream(proto.Message):
equal to zero. Must be less than
``VideoStream.gop_frame_count`` if set. The default is 0.
frame_rate (float):
- Required. The video frame rate in frames per
- second. Must be less than or equal to 120. Will
- default to the input frame rate if larger than
- the input frame rate.
+ Required. The target video frame rate in frames per second
+ (FPS). Must be less than or equal to 120. Will default to
+ the input frame rate if larger than the input frame rate.
+ The API will generate an output FPS that is divisible by the
+ input FPS, and smaller or equal to the target FPS.
+
+ The following table shows the computed video FPS given the
+ target FPS (in parenthesis) and input FPS (in the first
+ column):
+
+ ::
+
+ | | (30) | (60) | (25) | (50) |
+ |--------|--------|--------|------|------|
+ | 240 | Fail | Fail | Fail | Fail |
+ | 120 | 30 | 60 | 20 | 30 |
+ | 100 | 25 | 50 | 20 | 30 |
+ | 50 | 25 | 50 | 20 | 30 |
+ | 60 | 30 | 60 | 20 | 30 |
+ | 59.94 | 29.97 | 59.94 | 20 | 30 |
+ | 48 | 24 | 48 | 20 | 30 |
+ | 30 | 30 | 30 | 20 | 30 |
+ | 25 | 25 | 25 | 20 | 30 |
+ | 24 | 24 | 24 | 20 | 30 |
+ | 23.976 | 23.976 | 23.976 | 20 | 30 |
+ | 15 | 15 | 15 | 20 | 30 |
+ | 12 | 12 | 12 | 20 | 30 |
+ | 10 | 10 | 10 | 20 | 30 |
aq_strength (float):
Specify the intensity of the adaptive
quantizer (AQ). Must be between 0 and 1, where 0
diff --git a/google/cloud/video/transcoder_v1beta1/types/services.py b/google/cloud/video/transcoder_v1beta1/types/services.py
index 5de258d..7e27235 100644
--- a/google/cloud/video/transcoder_v1beta1/types/services.py
+++ b/google/cloud/video/transcoder_v1beta1/types/services.py
@@ -136,7 +136,8 @@ class CreateJobTemplateRequest(proto.Message):
name.
This value should be 4-63 characters, and valid characters
- are ``/[a-zA-Z0-9_-_]/``.
+ must match the regular expression
+ ``[a-zA-Z][a-zA-Z0-9_-]*``.
"""
parent = proto.Field(proto.STRING, number=1)
diff --git a/noxfile.py b/noxfile.py
index dfdc268..153daa5 100644
--- a/noxfile.py
+++ b/noxfile.py
@@ -72,7 +72,9 @@ def default(session):
# Install all test dependencies, then install this package in-place.
session.install("asyncmock", "pytest-asyncio")
- session.install("mock", "pytest", "pytest-cov")
+ session.install(
+ "mock", "pytest", "pytest-cov",
+ )
session.install("-e", ".")
# Run py.test against the unit tests.
@@ -173,7 +175,9 @@ def docfx(session):
"""Build the docfx yaml files for this library."""
session.install("-e", ".")
- session.install("sphinx", "alabaster", "recommonmark", "sphinx-docfx-yaml")
+ # sphinx-docfx-yaml supports up to sphinx version 1.5.5.
+ # https://github.com/docascode/sphinx-docfx-yaml/issues/97
+ session.install("sphinx==1.5.5", "alabaster", "recommonmark", "sphinx-docfx-yaml")
shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True)
session.run(
diff --git a/scripts/decrypt-secrets.sh b/scripts/decrypt-secrets.sh
index ff599eb..21f6d2a 100755
--- a/scripts/decrypt-secrets.sh
+++ b/scripts/decrypt-secrets.sh
@@ -20,14 +20,27 @@ ROOT=$( dirname "$DIR" )
# Work from the project root.
cd $ROOT
+# Prevent it from overriding files.
+# We recommend that sample authors use their own service account files and cloud project.
+# In that case, they are supposed to prepare these files by themselves.
+if [[ -f "testing/test-env.sh" ]] || \
+ [[ -f "testing/service-account.json" ]] || \
+ [[ -f "testing/client-secrets.json" ]]; then
+ echo "One or more target files exist, aborting."
+ exit 1
+fi
+
# Use SECRET_MANAGER_PROJECT if set, fallback to cloud-devrel-kokoro-resources.
PROJECT_ID="${SECRET_MANAGER_PROJECT:-cloud-devrel-kokoro-resources}"
gcloud secrets versions access latest --secret="python-docs-samples-test-env" \
+ --project="${PROJECT_ID}" \
> testing/test-env.sh
gcloud secrets versions access latest \
--secret="python-docs-samples-service-account" \
+ --project="${PROJECT_ID}" \
> testing/service-account.json
gcloud secrets versions access latest \
--secret="python-docs-samples-client-secrets" \
- > testing/client-secrets.json
\ No newline at end of file
+ --project="${PROJECT_ID}" \
+ > testing/client-secrets.json
diff --git a/setup.py b/setup.py
index 1599096..0318706 100644
--- a/setup.py
+++ b/setup.py
@@ -19,7 +19,7 @@
import os
import setuptools # type: ignore
-version = "0.1.0"
+version = "0.2.0"
package_root = os.path.abspath(os.path.dirname(__file__))
diff --git a/synth.metadata b/synth.metadata
index 45009e7..76cbf07 100644
--- a/synth.metadata
+++ b/synth.metadata
@@ -3,30 +3,22 @@
{
"git": {
"name": ".",
- "remote": "sso://devrel/cloud/libraries/python/python-video-transcoder",
- "sha": "d83d277c4513cf6c1caaf56ca3a6564cd5910c86"
- }
- },
- {
- "git": {
- "name": "googleapis",
- "remote": "https://github.com/googleapis/googleapis.git",
- "sha": "72eb54c45231d84266ca059473bc1793c394fcb2",
- "internalRef": "328059685"
+ "remote": "git@github.com:googleapis/python-video-transcoder",
+ "sha": "5a006f4972af69e2244ee71617d75c79212c9b97"
}
},
{
"git": {
"name": "synthtool",
"remote": "https://github.com/googleapis/synthtool.git",
- "sha": "05de3e1e14a0b07eab8b474e669164dbd31f81fb"
+ "sha": "d5fc0bcf9ea9789c5b0e3154a9e3b29e5cea6116"
}
},
{
"git": {
"name": "synthtool",
"remote": "https://github.com/googleapis/synthtool.git",
- "sha": "05de3e1e14a0b07eab8b474e669164dbd31f81fb"
+ "sha": "d5fc0bcf9ea9789c5b0e3154a9e3b29e5cea6116"
}
}
],
diff --git a/synth.py b/synth.py
index 76c55bf..ffa41ff 100644
--- a/synth.py
+++ b/synth.py
@@ -25,21 +25,23 @@
# ----------------------------------------------------------------------------
# Generate transcoder GAPIC layer
# ----------------------------------------------------------------------------
-library = gapic.py_library(
- service="transcoder",
- version="v1beta1",
- bazel_target="//google/cloud/video/transcoder/v1beta1:video-transcoder-v1beta1-py",
-)
-
-s.move(
- library,
- excludes=[
- "setup.py",
- "docs/index.rst",
- "noxfile.py",
- "scripts/fixup_transcoder_v1beta1_keywords.py",
- ],
-)
+versions = ["v1beta1"]
+for version in versions:
+ library = gapic.py_library(
+ service="transcoder",
+ version=version,
+ bazel_target=f"//google/cloud/video/transcoder/{version}:video-transcoder-{version}-py",
+ )
+
+ s.move(
+ library,
+ excludes=[
+ "setup.py",
+ "docs/index.rst",
+ "noxfile.py",
+ f"scripts/fixup_transcoder_{version}_keywords.py",
+ ],
+ )
# ----------------------------------------------------------------------------
# Add templated files
diff --git a/tests/unit/gapic/transcoder_v1beta1/test_transcoder_service.py b/tests/unit/gapic/transcoder_v1beta1/test_transcoder_service.py
index dc47558..33af7c6 100644
--- a/tests/unit/gapic/transcoder_v1beta1/test_transcoder_service.py
+++ b/tests/unit/gapic/transcoder_v1beta1/test_transcoder_service.py
@@ -44,6 +44,7 @@
from google.cloud.video.transcoder_v1beta1.types import services
from google.oauth2 import service_account
from google.protobuf import duration_pb2 as duration # type: ignore
+from google.protobuf import timestamp_pb2 as timestamp # type: ignore
def client_cert_source_callback():
@@ -101,12 +102,12 @@ def test_transcoder_service_client_from_service_account_file(client_class):
) as factory:
factory.return_value = creds
client = client_class.from_service_account_file("dummy/file/path.json")
- assert client._transport._credentials == creds
+ assert client.transport._credentials == creds
client = client_class.from_service_account_json("dummy/file/path.json")
- assert client._transport._credentials == creds
+ assert client.transport._credentials == creds
- assert client._transport._host == "transcoder.googleapis.com:443"
+ assert client.transport._host == "transcoder.googleapis.com:443"
def test_transcoder_service_client_get_transport_class():
@@ -162,15 +163,14 @@ def test_transcoder_service_client_client_options(
credentials_file=None,
host="squid.clam.whelk",
scopes=None,
- api_mtls_endpoint="squid.clam.whelk",
- client_cert_source=None,
+ ssl_channel_credentials=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
)
- # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS is
+ # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is
# "never".
- with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS": "never"}):
+ with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}):
with mock.patch.object(transport_class, "__init__") as patched:
patched.return_value = None
client = client_class()
@@ -179,15 +179,14 @@ def test_transcoder_service_client_client_options(
credentials_file=None,
host=client.DEFAULT_ENDPOINT,
scopes=None,
- api_mtls_endpoint=client.DEFAULT_ENDPOINT,
- client_cert_source=None,
+ ssl_channel_credentials=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
)
- # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS is
+ # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is
# "always".
- with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS": "always"}):
+ with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}):
with mock.patch.object(transport_class, "__init__") as patched:
patched.return_value = None
client = client_class()
@@ -196,95 +195,185 @@ def test_transcoder_service_client_client_options(
credentials_file=None,
host=client.DEFAULT_MTLS_ENDPOINT,
scopes=None,
- api_mtls_endpoint=client.DEFAULT_MTLS_ENDPOINT,
- client_cert_source=None,
+ ssl_channel_credentials=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
)
- # Check the case api_endpoint is not provided, GOOGLE_API_USE_MTLS is
- # "auto", and client_cert_source is provided.
- with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS": "auto"}):
+ # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has
+ # unsupported value.
+ with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}):
+ with pytest.raises(MutualTLSChannelError):
+ client = client_class()
+
+ # Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value.
+ with mock.patch.dict(
+ os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}
+ ):
+ with pytest.raises(ValueError):
+ client = client_class()
+
+ # Check the case quota_project_id is provided
+ options = client_options.ClientOptions(quota_project_id="octopus")
+ with mock.patch.object(transport_class, "__init__") as patched:
+ patched.return_value = None
+ client = client_class(client_options=options)
+ patched.assert_called_once_with(
+ credentials=None,
+ credentials_file=None,
+ host=client.DEFAULT_ENDPOINT,
+ scopes=None,
+ ssl_channel_credentials=None,
+ quota_project_id="octopus",
+ client_info=transports.base.DEFAULT_CLIENT_INFO,
+ )
+
+
+@pytest.mark.parametrize(
+ "client_class,transport_class,transport_name,use_client_cert_env",
+ [
+ (
+ TranscoderServiceClient,
+ transports.TranscoderServiceGrpcTransport,
+ "grpc",
+ "true",
+ ),
+ (
+ TranscoderServiceAsyncClient,
+ transports.TranscoderServiceGrpcAsyncIOTransport,
+ "grpc_asyncio",
+ "true",
+ ),
+ (
+ TranscoderServiceClient,
+ transports.TranscoderServiceGrpcTransport,
+ "grpc",
+ "false",
+ ),
+ (
+ TranscoderServiceAsyncClient,
+ transports.TranscoderServiceGrpcAsyncIOTransport,
+ "grpc_asyncio",
+ "false",
+ ),
+ ],
+)
+@mock.patch.object(
+ TranscoderServiceClient,
+ "DEFAULT_ENDPOINT",
+ modify_default_endpoint(TranscoderServiceClient),
+)
+@mock.patch.object(
+ TranscoderServiceAsyncClient,
+ "DEFAULT_ENDPOINT",
+ modify_default_endpoint(TranscoderServiceAsyncClient),
+)
+@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"})
+def test_transcoder_service_client_mtls_env_auto(
+ client_class, transport_class, transport_name, use_client_cert_env
+):
+ # This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default
+ # mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists.
+
+ # Check the case client_cert_source is provided. Whether client cert is used depends on
+ # GOOGLE_API_USE_CLIENT_CERTIFICATE value.
+ with mock.patch.dict(
+ os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}
+ ):
options = client_options.ClientOptions(
client_cert_source=client_cert_source_callback
)
with mock.patch.object(transport_class, "__init__") as patched:
- patched.return_value = None
- client = client_class(client_options=options)
- patched.assert_called_once_with(
- credentials=None,
- credentials_file=None,
- host=client.DEFAULT_MTLS_ENDPOINT,
- scopes=None,
- api_mtls_endpoint=client.DEFAULT_MTLS_ENDPOINT,
- client_cert_source=client_cert_source_callback,
- quota_project_id=None,
- client_info=transports.base.DEFAULT_CLIENT_INFO,
- )
-
- # Check the case api_endpoint is not provided, GOOGLE_API_USE_MTLS is
- # "auto", and default_client_cert_source is provided.
- with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS": "auto"}):
- with mock.patch.object(transport_class, "__init__") as patched:
+ ssl_channel_creds = mock.Mock()
with mock.patch(
- "google.auth.transport.mtls.has_default_client_cert_source",
- return_value=True,
+ "grpc.ssl_channel_credentials", return_value=ssl_channel_creds
):
patched.return_value = None
- client = client_class()
+ client = client_class(client_options=options)
+
+ if use_client_cert_env == "false":
+ expected_ssl_channel_creds = None
+ expected_host = client.DEFAULT_ENDPOINT
+ else:
+ expected_ssl_channel_creds = ssl_channel_creds
+ expected_host = client.DEFAULT_MTLS_ENDPOINT
+
patched.assert_called_once_with(
credentials=None,
credentials_file=None,
- host=client.DEFAULT_MTLS_ENDPOINT,
+ host=expected_host,
scopes=None,
- api_mtls_endpoint=client.DEFAULT_MTLS_ENDPOINT,
- client_cert_source=None,
+ ssl_channel_credentials=expected_ssl_channel_creds,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
)
- # Check the case api_endpoint is not provided, GOOGLE_API_USE_MTLS is
- # "auto", but client_cert_source and default_client_cert_source are None.
- with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS": "auto"}):
+ # Check the case ADC client cert is provided. Whether client cert is used depends on
+ # GOOGLE_API_USE_CLIENT_CERTIFICATE value.
+ with mock.patch.dict(
+ os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}
+ ):
with mock.patch.object(transport_class, "__init__") as patched:
with mock.patch(
- "google.auth.transport.mtls.has_default_client_cert_source",
- return_value=False,
+ "google.auth.transport.grpc.SslCredentials.__init__", return_value=None
):
- patched.return_value = None
- client = client_class()
- patched.assert_called_once_with(
- credentials=None,
- credentials_file=None,
- host=client.DEFAULT_ENDPOINT,
- scopes=None,
- api_mtls_endpoint=client.DEFAULT_ENDPOINT,
- client_cert_source=None,
- quota_project_id=None,
- client_info=transports.base.DEFAULT_CLIENT_INFO,
- )
-
- # Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS has
- # unsupported value.
- with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS": "Unsupported"}):
- with pytest.raises(MutualTLSChannelError):
- client = client_class()
-
- # Check the case quota_project_id is provided
- options = client_options.ClientOptions(quota_project_id="octopus")
- with mock.patch.object(transport_class, "__init__") as patched:
- patched.return_value = None
- client = client_class(client_options=options)
- patched.assert_called_once_with(
- credentials=None,
- credentials_file=None,
- host=client.DEFAULT_ENDPOINT,
- scopes=None,
- api_mtls_endpoint=client.DEFAULT_ENDPOINT,
- client_cert_source=None,
- quota_project_id="octopus",
- client_info=transports.base.DEFAULT_CLIENT_INFO,
- )
+ with mock.patch(
+ "google.auth.transport.grpc.SslCredentials.is_mtls",
+ new_callable=mock.PropertyMock,
+ ) as is_mtls_mock:
+ with mock.patch(
+ "google.auth.transport.grpc.SslCredentials.ssl_credentials",
+ new_callable=mock.PropertyMock,
+ ) as ssl_credentials_mock:
+ if use_client_cert_env == "false":
+ is_mtls_mock.return_value = False
+ ssl_credentials_mock.return_value = None
+ expected_host = client.DEFAULT_ENDPOINT
+ expected_ssl_channel_creds = None
+ else:
+ is_mtls_mock.return_value = True
+ ssl_credentials_mock.return_value = mock.Mock()
+ expected_host = client.DEFAULT_MTLS_ENDPOINT
+ expected_ssl_channel_creds = (
+ ssl_credentials_mock.return_value
+ )
+
+ patched.return_value = None
+ client = client_class()
+ patched.assert_called_once_with(
+ credentials=None,
+ credentials_file=None,
+ host=expected_host,
+ scopes=None,
+ ssl_channel_credentials=expected_ssl_channel_creds,
+ quota_project_id=None,
+ client_info=transports.base.DEFAULT_CLIENT_INFO,
+ )
+
+ # Check the case client_cert_source and ADC client cert are not provided.
+ with mock.patch.dict(
+ os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}
+ ):
+ with mock.patch.object(transport_class, "__init__") as patched:
+ with mock.patch(
+ "google.auth.transport.grpc.SslCredentials.__init__", return_value=None
+ ):
+ with mock.patch(
+ "google.auth.transport.grpc.SslCredentials.is_mtls",
+ new_callable=mock.PropertyMock,
+ ) as is_mtls_mock:
+ is_mtls_mock.return_value = False
+ patched.return_value = None
+ client = client_class()
+ patched.assert_called_once_with(
+ credentials=None,
+ credentials_file=None,
+ host=client.DEFAULT_ENDPOINT,
+ scopes=None,
+ ssl_channel_credentials=None,
+ quota_project_id=None,
+ client_info=transports.base.DEFAULT_CLIENT_INFO,
+ )
@pytest.mark.parametrize(
@@ -311,8 +400,7 @@ def test_transcoder_service_client_client_options_scopes(
credentials_file=None,
host=client.DEFAULT_ENDPOINT,
scopes=["1", "2"],
- api_mtls_endpoint=client.DEFAULT_ENDPOINT,
- client_cert_source=None,
+ ssl_channel_credentials=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
)
@@ -342,8 +430,7 @@ def test_transcoder_service_client_client_options_credentials_file(
credentials_file="credentials.json",
host=client.DEFAULT_ENDPOINT,
scopes=None,
- api_mtls_endpoint=client.DEFAULT_ENDPOINT,
- client_cert_source=None,
+ ssl_channel_credentials=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
)
@@ -362,8 +449,7 @@ def test_transcoder_service_client_client_options_from_dict():
credentials_file=None,
host="squid.clam.whelk",
scopes=None,
- api_mtls_endpoint="squid.clam.whelk",
- client_cert_source=None,
+ ssl_channel_credentials=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
)
@@ -379,7 +465,7 @@ def test_create_job(transport: str = "grpc", request_type=services.CreateJobRequ
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(type(client._transport.create_job), "__call__") as call:
+ with mock.patch.object(type(client.transport.create_job), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = resources.Job(
name="name_value",
@@ -400,6 +486,7 @@ def test_create_job(transport: str = "grpc", request_type=services.CreateJobRequ
assert args[0] == services.CreateJobRequest()
# Establish that the response is the type that we expect.
+
assert isinstance(response, resources.Job)
assert response.name == "name_value"
@@ -420,19 +507,19 @@ def test_create_job_from_dict():
@pytest.mark.asyncio
-async def test_create_job_async(transport: str = "grpc_asyncio"):
+async def test_create_job_async(
+ transport: str = "grpc_asyncio", request_type=services.CreateJobRequest
+):
client = TranscoderServiceAsyncClient(
credentials=credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
- request = services.CreateJobRequest()
+ request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(
- type(client._client._transport.create_job), "__call__"
- ) as call:
+ with mock.patch.object(type(client.transport.create_job), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
resources.Job(
@@ -451,7 +538,7 @@ async def test_create_job_async(transport: str = "grpc_asyncio"):
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
- assert args[0] == request
+ assert args[0] == services.CreateJobRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, resources.Job)
@@ -469,6 +556,11 @@ async def test_create_job_async(transport: str = "grpc_asyncio"):
assert response.failure_reason == "failure_reason_value"
+@pytest.mark.asyncio
+async def test_create_job_async_from_dict():
+ await test_create_job_async(request_type=dict)
+
+
def test_create_job_field_headers():
client = TranscoderServiceClient(credentials=credentials.AnonymousCredentials(),)
@@ -478,7 +570,7 @@ def test_create_job_field_headers():
request.parent = "parent/value"
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(type(client._transport.create_job), "__call__") as call:
+ with mock.patch.object(type(client.transport.create_job), "__call__") as call:
call.return_value = resources.Job()
client.create_job(request)
@@ -505,9 +597,7 @@ async def test_create_job_field_headers_async():
request.parent = "parent/value"
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(
- type(client._client._transport.create_job), "__call__"
- ) as call:
+ with mock.patch.object(type(client.transport.create_job), "__call__") as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.Job())
await client.create_job(request)
@@ -526,7 +616,7 @@ def test_create_job_flattened():
client = TranscoderServiceClient(credentials=credentials.AnonymousCredentials(),)
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(type(client._transport.create_job), "__call__") as call:
+ with mock.patch.object(type(client.transport.create_job), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = resources.Job()
@@ -566,9 +656,7 @@ async def test_create_job_flattened_async():
)
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(
- type(client._client._transport.create_job), "__call__"
- ) as call:
+ with mock.patch.object(type(client.transport.create_job), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = resources.Job()
@@ -615,7 +703,7 @@ def test_list_jobs(transport: str = "grpc", request_type=services.ListJobsReques
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(type(client._transport.list_jobs), "__call__") as call:
+ with mock.patch.object(type(client.transport.list_jobs), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = services.ListJobsResponse(
next_page_token="next_page_token_value",
@@ -630,6 +718,7 @@ def test_list_jobs(transport: str = "grpc", request_type=services.ListJobsReques
assert args[0] == services.ListJobsRequest()
# Establish that the response is the type that we expect.
+
assert isinstance(response, pagers.ListJobsPager)
assert response.next_page_token == "next_page_token_value"
@@ -640,19 +729,19 @@ def test_list_jobs_from_dict():
@pytest.mark.asyncio
-async def test_list_jobs_async(transport: str = "grpc_asyncio"):
+async def test_list_jobs_async(
+ transport: str = "grpc_asyncio", request_type=services.ListJobsRequest
+):
client = TranscoderServiceAsyncClient(
credentials=credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
- request = services.ListJobsRequest()
+ request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(
- type(client._client._transport.list_jobs), "__call__"
- ) as call:
+ with mock.patch.object(type(client.transport.list_jobs), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
services.ListJobsResponse(next_page_token="next_page_token_value",)
@@ -664,7 +753,7 @@ async def test_list_jobs_async(transport: str = "grpc_asyncio"):
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
- assert args[0] == request
+ assert args[0] == services.ListJobsRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, pagers.ListJobsAsyncPager)
@@ -672,6 +761,11 @@ async def test_list_jobs_async(transport: str = "grpc_asyncio"):
assert response.next_page_token == "next_page_token_value"
+@pytest.mark.asyncio
+async def test_list_jobs_async_from_dict():
+ await test_list_jobs_async(request_type=dict)
+
+
def test_list_jobs_field_headers():
client = TranscoderServiceClient(credentials=credentials.AnonymousCredentials(),)
@@ -681,7 +775,7 @@ def test_list_jobs_field_headers():
request.parent = "parent/value"
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(type(client._transport.list_jobs), "__call__") as call:
+ with mock.patch.object(type(client.transport.list_jobs), "__call__") as call:
call.return_value = services.ListJobsResponse()
client.list_jobs(request)
@@ -708,9 +802,7 @@ async def test_list_jobs_field_headers_async():
request.parent = "parent/value"
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(
- type(client._client._transport.list_jobs), "__call__"
- ) as call:
+ with mock.patch.object(type(client.transport.list_jobs), "__call__") as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
services.ListJobsResponse()
)
@@ -731,7 +823,7 @@ def test_list_jobs_flattened():
client = TranscoderServiceClient(credentials=credentials.AnonymousCredentials(),)
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(type(client._transport.list_jobs), "__call__") as call:
+ with mock.patch.object(type(client.transport.list_jobs), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = services.ListJobsResponse()
@@ -765,9 +857,7 @@ async def test_list_jobs_flattened_async():
)
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(
- type(client._client._transport.list_jobs), "__call__"
- ) as call:
+ with mock.patch.object(type(client.transport.list_jobs), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = services.ListJobsResponse()
@@ -804,7 +894,7 @@ def test_list_jobs_pager():
client = TranscoderServiceClient(credentials=credentials.AnonymousCredentials,)
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(type(client._transport.list_jobs), "__call__") as call:
+ with mock.patch.object(type(client.transport.list_jobs), "__call__") as call:
# Set the response to a series of pages.
call.side_effect = (
services.ListJobsResponse(
@@ -834,7 +924,7 @@ def test_list_jobs_pages():
client = TranscoderServiceClient(credentials=credentials.AnonymousCredentials,)
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(type(client._transport.list_jobs), "__call__") as call:
+ with mock.patch.object(type(client.transport.list_jobs), "__call__") as call:
# Set the response to a series of pages.
call.side_effect = (
services.ListJobsResponse(
@@ -847,8 +937,8 @@ def test_list_jobs_pages():
RuntimeError,
)
pages = list(client.list_jobs(request={}).pages)
- for page, token in zip(pages, ["abc", "def", "ghi", ""]):
- assert page.raw_page.next_page_token == token
+ for page_, token in zip(pages, ["abc", "def", "ghi", ""]):
+ assert page_.raw_page.next_page_token == token
@pytest.mark.asyncio
@@ -857,9 +947,7 @@ async def test_list_jobs_async_pager():
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._client._transport.list_jobs),
- "__call__",
- new_callable=mock.AsyncMock,
+ type(client.transport.list_jobs), "__call__", new_callable=mock.AsyncMock
) as call:
# Set the response to a series of pages.
call.side_effect = (
@@ -888,9 +976,7 @@ async def test_list_jobs_async_pages():
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._client._transport.list_jobs),
- "__call__",
- new_callable=mock.AsyncMock,
+ type(client.transport.list_jobs), "__call__", new_callable=mock.AsyncMock
) as call:
# Set the response to a series of pages.
call.side_effect = (
@@ -904,10 +990,10 @@ async def test_list_jobs_async_pages():
RuntimeError,
)
pages = []
- async for page in (await client.list_jobs(request={})).pages:
- pages.append(page)
- for page, token in zip(pages, ["abc", "def", "ghi", ""]):
- assert page.raw_page.next_page_token == token
+ async for page_ in (await client.list_jobs(request={})).pages:
+ pages.append(page_)
+ for page_, token in zip(pages, ["abc", "def", "ghi", ""]):
+ assert page_.raw_page.next_page_token == token
def test_get_job(transport: str = "grpc", request_type=services.GetJobRequest):
@@ -920,7 +1006,7 @@ def test_get_job(transport: str = "grpc", request_type=services.GetJobRequest):
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(type(client._transport.get_job), "__call__") as call:
+ with mock.patch.object(type(client.transport.get_job), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = resources.Job(
name="name_value",
@@ -941,6 +1027,7 @@ def test_get_job(transport: str = "grpc", request_type=services.GetJobRequest):
assert args[0] == services.GetJobRequest()
# Establish that the response is the type that we expect.
+
assert isinstance(response, resources.Job)
assert response.name == "name_value"
@@ -961,17 +1048,19 @@ def test_get_job_from_dict():
@pytest.mark.asyncio
-async def test_get_job_async(transport: str = "grpc_asyncio"):
+async def test_get_job_async(
+ transport: str = "grpc_asyncio", request_type=services.GetJobRequest
+):
client = TranscoderServiceAsyncClient(
credentials=credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
- request = services.GetJobRequest()
+ request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(type(client._client._transport.get_job), "__call__") as call:
+ with mock.patch.object(type(client.transport.get_job), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
resources.Job(
@@ -990,7 +1079,7 @@ async def test_get_job_async(transport: str = "grpc_asyncio"):
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
- assert args[0] == request
+ assert args[0] == services.GetJobRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, resources.Job)
@@ -1008,6 +1097,11 @@ async def test_get_job_async(transport: str = "grpc_asyncio"):
assert response.failure_reason == "failure_reason_value"
+@pytest.mark.asyncio
+async def test_get_job_async_from_dict():
+ await test_get_job_async(request_type=dict)
+
+
def test_get_job_field_headers():
client = TranscoderServiceClient(credentials=credentials.AnonymousCredentials(),)
@@ -1017,7 +1111,7 @@ def test_get_job_field_headers():
request.name = "name/value"
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(type(client._transport.get_job), "__call__") as call:
+ with mock.patch.object(type(client.transport.get_job), "__call__") as call:
call.return_value = resources.Job()
client.get_job(request)
@@ -1044,7 +1138,7 @@ async def test_get_job_field_headers_async():
request.name = "name/value"
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(type(client._client._transport.get_job), "__call__") as call:
+ with mock.patch.object(type(client.transport.get_job), "__call__") as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.Job())
await client.get_job(request)
@@ -1063,7 +1157,7 @@ def test_get_job_flattened():
client = TranscoderServiceClient(credentials=credentials.AnonymousCredentials(),)
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(type(client._transport.get_job), "__call__") as call:
+ with mock.patch.object(type(client.transport.get_job), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = resources.Job()
@@ -1097,7 +1191,7 @@ async def test_get_job_flattened_async():
)
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(type(client._client._transport.get_job), "__call__") as call:
+ with mock.patch.object(type(client.transport.get_job), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = resources.Job()
@@ -1138,7 +1232,7 @@ def test_delete_job(transport: str = "grpc", request_type=services.DeleteJobRequ
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(type(client._transport.delete_job), "__call__") as call:
+ with mock.patch.object(type(client.transport.delete_job), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = None
@@ -1159,19 +1253,19 @@ def test_delete_job_from_dict():
@pytest.mark.asyncio
-async def test_delete_job_async(transport: str = "grpc_asyncio"):
+async def test_delete_job_async(
+ transport: str = "grpc_asyncio", request_type=services.DeleteJobRequest
+):
client = TranscoderServiceAsyncClient(
credentials=credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
- request = services.DeleteJobRequest()
+ request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(
- type(client._client._transport.delete_job), "__call__"
- ) as call:
+ with mock.patch.object(type(client.transport.delete_job), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None)
@@ -1181,12 +1275,17 @@ async def test_delete_job_async(transport: str = "grpc_asyncio"):
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
- assert args[0] == request
+ assert args[0] == services.DeleteJobRequest()
# Establish that the response is the type that we expect.
assert response is None
+@pytest.mark.asyncio
+async def test_delete_job_async_from_dict():
+ await test_delete_job_async(request_type=dict)
+
+
def test_delete_job_field_headers():
client = TranscoderServiceClient(credentials=credentials.AnonymousCredentials(),)
@@ -1196,7 +1295,7 @@ def test_delete_job_field_headers():
request.name = "name/value"
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(type(client._transport.delete_job), "__call__") as call:
+ with mock.patch.object(type(client.transport.delete_job), "__call__") as call:
call.return_value = None
client.delete_job(request)
@@ -1223,9 +1322,7 @@ async def test_delete_job_field_headers_async():
request.name = "name/value"
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(
- type(client._client._transport.delete_job), "__call__"
- ) as call:
+ with mock.patch.object(type(client.transport.delete_job), "__call__") as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None)
await client.delete_job(request)
@@ -1244,7 +1341,7 @@ def test_delete_job_flattened():
client = TranscoderServiceClient(credentials=credentials.AnonymousCredentials(),)
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(type(client._transport.delete_job), "__call__") as call:
+ with mock.patch.object(type(client.transport.delete_job), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = None
@@ -1278,9 +1375,7 @@ async def test_delete_job_flattened_async():
)
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(
- type(client._client._transport.delete_job), "__call__"
- ) as call:
+ with mock.patch.object(type(client.transport.delete_job), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = None
@@ -1324,7 +1419,7 @@ def test_create_job_template(
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._transport.create_job_template), "__call__"
+ type(client.transport.create_job_template), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = resources.JobTemplate(name="name_value",)
@@ -1338,6 +1433,7 @@ def test_create_job_template(
assert args[0] == services.CreateJobTemplateRequest()
# Establish that the response is the type that we expect.
+
assert isinstance(response, resources.JobTemplate)
assert response.name == "name_value"
@@ -1348,18 +1444,20 @@ def test_create_job_template_from_dict():
@pytest.mark.asyncio
-async def test_create_job_template_async(transport: str = "grpc_asyncio"):
+async def test_create_job_template_async(
+ transport: str = "grpc_asyncio", request_type=services.CreateJobTemplateRequest
+):
client = TranscoderServiceAsyncClient(
credentials=credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
- request = services.CreateJobTemplateRequest()
+ request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._client._transport.create_job_template), "__call__"
+ type(client.transport.create_job_template), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
@@ -1372,7 +1470,7 @@ async def test_create_job_template_async(transport: str = "grpc_asyncio"):
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
- assert args[0] == request
+ assert args[0] == services.CreateJobTemplateRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, resources.JobTemplate)
@@ -1380,6 +1478,11 @@ async def test_create_job_template_async(transport: str = "grpc_asyncio"):
assert response.name == "name_value"
+@pytest.mark.asyncio
+async def test_create_job_template_async_from_dict():
+ await test_create_job_template_async(request_type=dict)
+
+
def test_create_job_template_field_headers():
client = TranscoderServiceClient(credentials=credentials.AnonymousCredentials(),)
@@ -1390,7 +1493,7 @@ def test_create_job_template_field_headers():
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._transport.create_job_template), "__call__"
+ type(client.transport.create_job_template), "__call__"
) as call:
call.return_value = resources.JobTemplate()
@@ -1419,7 +1522,7 @@ async def test_create_job_template_field_headers_async():
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._client._transport.create_job_template), "__call__"
+ type(client.transport.create_job_template), "__call__"
) as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
resources.JobTemplate()
@@ -1442,7 +1545,7 @@ def test_create_job_template_flattened():
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._transport.create_job_template), "__call__"
+ type(client.transport.create_job_template), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = resources.JobTemplate()
@@ -1489,7 +1592,7 @@ async def test_create_job_template_flattened_async():
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._client._transport.create_job_template), "__call__"
+ type(client.transport.create_job_template), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = resources.JobTemplate()
@@ -1547,7 +1650,7 @@ def test_list_job_templates(
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._transport.list_job_templates), "__call__"
+ type(client.transport.list_job_templates), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = services.ListJobTemplatesResponse(
@@ -1563,6 +1666,7 @@ def test_list_job_templates(
assert args[0] == services.ListJobTemplatesRequest()
# Establish that the response is the type that we expect.
+
assert isinstance(response, pagers.ListJobTemplatesPager)
assert response.next_page_token == "next_page_token_value"
@@ -1573,18 +1677,20 @@ def test_list_job_templates_from_dict():
@pytest.mark.asyncio
-async def test_list_job_templates_async(transport: str = "grpc_asyncio"):
+async def test_list_job_templates_async(
+ transport: str = "grpc_asyncio", request_type=services.ListJobTemplatesRequest
+):
client = TranscoderServiceAsyncClient(
credentials=credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
- request = services.ListJobTemplatesRequest()
+ request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._client._transport.list_job_templates), "__call__"
+ type(client.transport.list_job_templates), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
@@ -1597,7 +1703,7 @@ async def test_list_job_templates_async(transport: str = "grpc_asyncio"):
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
- assert args[0] == request
+ assert args[0] == services.ListJobTemplatesRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, pagers.ListJobTemplatesAsyncPager)
@@ -1605,6 +1711,11 @@ async def test_list_job_templates_async(transport: str = "grpc_asyncio"):
assert response.next_page_token == "next_page_token_value"
+@pytest.mark.asyncio
+async def test_list_job_templates_async_from_dict():
+ await test_list_job_templates_async(request_type=dict)
+
+
def test_list_job_templates_field_headers():
client = TranscoderServiceClient(credentials=credentials.AnonymousCredentials(),)
@@ -1615,7 +1726,7 @@ def test_list_job_templates_field_headers():
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._transport.list_job_templates), "__call__"
+ type(client.transport.list_job_templates), "__call__"
) as call:
call.return_value = services.ListJobTemplatesResponse()
@@ -1644,7 +1755,7 @@ async def test_list_job_templates_field_headers_async():
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._client._transport.list_job_templates), "__call__"
+ type(client.transport.list_job_templates), "__call__"
) as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
services.ListJobTemplatesResponse()
@@ -1667,7 +1778,7 @@ def test_list_job_templates_flattened():
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._transport.list_job_templates), "__call__"
+ type(client.transport.list_job_templates), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = services.ListJobTemplatesResponse()
@@ -1703,7 +1814,7 @@ async def test_list_job_templates_flattened_async():
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._client._transport.list_job_templates), "__call__"
+ type(client.transport.list_job_templates), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = services.ListJobTemplatesResponse()
@@ -1742,7 +1853,7 @@ def test_list_job_templates_pager():
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._transport.list_job_templates), "__call__"
+ type(client.transport.list_job_templates), "__call__"
) as call:
# Set the response to a series of pages.
call.side_effect = (
@@ -1782,7 +1893,7 @@ def test_list_job_templates_pages():
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._transport.list_job_templates), "__call__"
+ type(client.transport.list_job_templates), "__call__"
) as call:
# Set the response to a series of pages.
call.side_effect = (
@@ -1804,8 +1915,8 @@ def test_list_job_templates_pages():
RuntimeError,
)
pages = list(client.list_job_templates(request={}).pages)
- for page, token in zip(pages, ["abc", "def", "ghi", ""]):
- assert page.raw_page.next_page_token == token
+ for page_, token in zip(pages, ["abc", "def", "ghi", ""]):
+ assert page_.raw_page.next_page_token == token
@pytest.mark.asyncio
@@ -1814,7 +1925,7 @@ async def test_list_job_templates_async_pager():
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._client._transport.list_job_templates),
+ type(client.transport.list_job_templates),
"__call__",
new_callable=mock.AsyncMock,
) as call:
@@ -1853,7 +1964,7 @@ async def test_list_job_templates_async_pages():
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._client._transport.list_job_templates),
+ type(client.transport.list_job_templates),
"__call__",
new_callable=mock.AsyncMock,
) as call:
@@ -1877,10 +1988,10 @@ async def test_list_job_templates_async_pages():
RuntimeError,
)
pages = []
- async for page in (await client.list_job_templates(request={})).pages:
- pages.append(page)
- for page, token in zip(pages, ["abc", "def", "ghi", ""]):
- assert page.raw_page.next_page_token == token
+ async for page_ in (await client.list_job_templates(request={})).pages:
+ pages.append(page_)
+ for page_, token in zip(pages, ["abc", "def", "ghi", ""]):
+ assert page_.raw_page.next_page_token == token
def test_get_job_template(
@@ -1895,9 +2006,7 @@ def test_get_job_template(
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(
- type(client._transport.get_job_template), "__call__"
- ) as call:
+ with mock.patch.object(type(client.transport.get_job_template), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = resources.JobTemplate(name="name_value",)
@@ -1910,6 +2019,7 @@ def test_get_job_template(
assert args[0] == services.GetJobTemplateRequest()
# Establish that the response is the type that we expect.
+
assert isinstance(response, resources.JobTemplate)
assert response.name == "name_value"
@@ -1920,19 +2030,19 @@ def test_get_job_template_from_dict():
@pytest.mark.asyncio
-async def test_get_job_template_async(transport: str = "grpc_asyncio"):
+async def test_get_job_template_async(
+ transport: str = "grpc_asyncio", request_type=services.GetJobTemplateRequest
+):
client = TranscoderServiceAsyncClient(
credentials=credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
- request = services.GetJobTemplateRequest()
+ request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(
- type(client._client._transport.get_job_template), "__call__"
- ) as call:
+ with mock.patch.object(type(client.transport.get_job_template), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
resources.JobTemplate(name="name_value",)
@@ -1944,7 +2054,7 @@ async def test_get_job_template_async(transport: str = "grpc_asyncio"):
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
- assert args[0] == request
+ assert args[0] == services.GetJobTemplateRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, resources.JobTemplate)
@@ -1952,6 +2062,11 @@ async def test_get_job_template_async(transport: str = "grpc_asyncio"):
assert response.name == "name_value"
+@pytest.mark.asyncio
+async def test_get_job_template_async_from_dict():
+ await test_get_job_template_async(request_type=dict)
+
+
def test_get_job_template_field_headers():
client = TranscoderServiceClient(credentials=credentials.AnonymousCredentials(),)
@@ -1961,9 +2076,7 @@ def test_get_job_template_field_headers():
request.name = "name/value"
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(
- type(client._transport.get_job_template), "__call__"
- ) as call:
+ with mock.patch.object(type(client.transport.get_job_template), "__call__") as call:
call.return_value = resources.JobTemplate()
client.get_job_template(request)
@@ -1990,9 +2103,7 @@ async def test_get_job_template_field_headers_async():
request.name = "name/value"
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(
- type(client._client._transport.get_job_template), "__call__"
- ) as call:
+ with mock.patch.object(type(client.transport.get_job_template), "__call__") as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
resources.JobTemplate()
)
@@ -2013,9 +2124,7 @@ def test_get_job_template_flattened():
client = TranscoderServiceClient(credentials=credentials.AnonymousCredentials(),)
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(
- type(client._transport.get_job_template), "__call__"
- ) as call:
+ with mock.patch.object(type(client.transport.get_job_template), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = resources.JobTemplate()
@@ -2049,9 +2158,7 @@ async def test_get_job_template_flattened_async():
)
# Mock the actual call within the gRPC stub, and fake the request.
- with mock.patch.object(
- type(client._client._transport.get_job_template), "__call__"
- ) as call:
+ with mock.patch.object(type(client.transport.get_job_template), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = resources.JobTemplate()
@@ -2097,7 +2204,7 @@ def test_delete_job_template(
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._transport.delete_job_template), "__call__"
+ type(client.transport.delete_job_template), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = None
@@ -2119,18 +2226,20 @@ def test_delete_job_template_from_dict():
@pytest.mark.asyncio
-async def test_delete_job_template_async(transport: str = "grpc_asyncio"):
+async def test_delete_job_template_async(
+ transport: str = "grpc_asyncio", request_type=services.DeleteJobTemplateRequest
+):
client = TranscoderServiceAsyncClient(
credentials=credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
- request = services.DeleteJobTemplateRequest()
+ request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._client._transport.delete_job_template), "__call__"
+ type(client.transport.delete_job_template), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None)
@@ -2141,12 +2250,17 @@ async def test_delete_job_template_async(transport: str = "grpc_asyncio"):
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
- assert args[0] == request
+ assert args[0] == services.DeleteJobTemplateRequest()
# Establish that the response is the type that we expect.
assert response is None
+@pytest.mark.asyncio
+async def test_delete_job_template_async_from_dict():
+ await test_delete_job_template_async(request_type=dict)
+
+
def test_delete_job_template_field_headers():
client = TranscoderServiceClient(credentials=credentials.AnonymousCredentials(),)
@@ -2157,7 +2271,7 @@ def test_delete_job_template_field_headers():
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._transport.delete_job_template), "__call__"
+ type(client.transport.delete_job_template), "__call__"
) as call:
call.return_value = None
@@ -2186,7 +2300,7 @@ async def test_delete_job_template_field_headers_async():
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._client._transport.delete_job_template), "__call__"
+ type(client.transport.delete_job_template), "__call__"
) as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(None)
@@ -2207,7 +2321,7 @@ def test_delete_job_template_flattened():
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._transport.delete_job_template), "__call__"
+ type(client.transport.delete_job_template), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = None
@@ -2243,7 +2357,7 @@ async def test_delete_job_template_flattened_async():
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
- type(client._client._transport.delete_job_template), "__call__"
+ type(client.transport.delete_job_template), "__call__"
) as call:
# Designate an appropriate return value for the call.
call.return_value = None
@@ -2311,7 +2425,7 @@ def test_transport_instance():
credentials=credentials.AnonymousCredentials(),
)
client = TranscoderServiceClient(transport=transport)
- assert client._transport is transport
+ assert client.transport is transport
def test_transport_get_channel():
@@ -2329,10 +2443,25 @@ def test_transport_get_channel():
assert channel
+@pytest.mark.parametrize(
+ "transport_class",
+ [
+ transports.TranscoderServiceGrpcTransport,
+ transports.TranscoderServiceGrpcAsyncIOTransport,
+ ],
+)
+def test_transport_adc(transport_class):
+ # Test default credentials are used if not provided.
+ with mock.patch.object(auth, "default") as adc:
+ adc.return_value = (credentials.AnonymousCredentials(), None)
+ transport_class()
+ adc.assert_called_once()
+
+
def test_transport_grpc_default():
# A client should use the gRPC transport by default.
client = TranscoderServiceClient(credentials=credentials.AnonymousCredentials(),)
- assert isinstance(client._transport, transports.TranscoderServiceGrpcTransport,)
+ assert isinstance(client.transport, transports.TranscoderServiceGrpcTransport,)
def test_transcoder_service_base_transport_error():
@@ -2390,6 +2519,17 @@ def test_transcoder_service_base_transport_with_credentials_file():
)
+def test_transcoder_service_base_transport_with_adc():
+ # Test the default credentials are used if credentials and credentials_file are None.
+ with mock.patch.object(auth, "default") as adc, mock.patch(
+ "google.cloud.video.transcoder_v1beta1.services.transcoder_service.transports.TranscoderServiceTransport._prep_wrapped_messages"
+ ) as Transport:
+ Transport.return_value = None
+ adc.return_value = (credentials.AnonymousCredentials(), None)
+ transport = transports.TranscoderServiceTransport()
+ adc.assert_called_once()
+
+
def test_transcoder_service_auth_adc():
# If no credentials are provided, we should use ADC credentials.
with mock.patch.object(auth, "default") as adc:
@@ -2422,7 +2562,7 @@ def test_transcoder_service_host_no_port():
api_endpoint="transcoder.googleapis.com"
),
)
- assert client._transport._host == "transcoder.googleapis.com:443"
+ assert client.transport._host == "transcoder.googleapis.com:443"
def test_transcoder_service_host_with_port():
@@ -2432,185 +2572,119 @@ def test_transcoder_service_host_with_port():
api_endpoint="transcoder.googleapis.com:8000"
),
)
- assert client._transport._host == "transcoder.googleapis.com:8000"
+ assert client.transport._host == "transcoder.googleapis.com:8000"
def test_transcoder_service_grpc_transport_channel():
channel = grpc.insecure_channel("http://localhost/")
- # Check that if channel is provided, mtls endpoint and client_cert_source
- # won't be used.
- callback = mock.MagicMock()
+ # Check that channel is used if provided.
transport = transports.TranscoderServiceGrpcTransport(
- host="squid.clam.whelk",
- channel=channel,
- api_mtls_endpoint="mtls.squid.clam.whelk",
- client_cert_source=callback,
+ host="squid.clam.whelk", channel=channel,
)
assert transport.grpc_channel == channel
assert transport._host == "squid.clam.whelk:443"
- assert not callback.called
+ assert transport._ssl_channel_credentials == None
def test_transcoder_service_grpc_asyncio_transport_channel():
channel = aio.insecure_channel("http://localhost/")
- # Check that if channel is provided, mtls endpoint and client_cert_source
- # won't be used.
- callback = mock.MagicMock()
+ # Check that channel is used if provided.
transport = transports.TranscoderServiceGrpcAsyncIOTransport(
- host="squid.clam.whelk",
- channel=channel,
- api_mtls_endpoint="mtls.squid.clam.whelk",
- client_cert_source=callback,
+ host="squid.clam.whelk", channel=channel,
)
assert transport.grpc_channel == channel
assert transport._host == "squid.clam.whelk:443"
- assert not callback.called
-
-
-@mock.patch("grpc.ssl_channel_credentials", autospec=True)
-@mock.patch("google.api_core.grpc_helpers.create_channel", autospec=True)
-def test_transcoder_service_grpc_transport_channel_mtls_with_client_cert_source(
- grpc_create_channel, grpc_ssl_channel_cred
-):
- # Check that if channel is None, but api_mtls_endpoint and client_cert_source
- # are provided, then a mTLS channel will be created.
- mock_cred = mock.Mock()
-
- mock_ssl_cred = mock.Mock()
- grpc_ssl_channel_cred.return_value = mock_ssl_cred
-
- mock_grpc_channel = mock.Mock()
- grpc_create_channel.return_value = mock_grpc_channel
-
- transport = transports.TranscoderServiceGrpcTransport(
- host="squid.clam.whelk",
- credentials=mock_cred,
- api_mtls_endpoint="mtls.squid.clam.whelk",
- client_cert_source=client_cert_source_callback,
- )
- grpc_ssl_channel_cred.assert_called_once_with(
- certificate_chain=b"cert bytes", private_key=b"key bytes"
- )
- grpc_create_channel.assert_called_once_with(
- "mtls.squid.clam.whelk:443",
- credentials=mock_cred,
- credentials_file=None,
- scopes=("https://www.googleapis.com/auth/cloud-platform",),
- ssl_credentials=mock_ssl_cred,
- quota_project_id=None,
- )
- assert transport.grpc_channel == mock_grpc_channel
-
-
-@mock.patch("grpc.ssl_channel_credentials", autospec=True)
-@mock.patch("google.api_core.grpc_helpers_async.create_channel", autospec=True)
-def test_transcoder_service_grpc_asyncio_transport_channel_mtls_with_client_cert_source(
- grpc_create_channel, grpc_ssl_channel_cred
-):
- # Check that if channel is None, but api_mtls_endpoint and client_cert_source
- # are provided, then a mTLS channel will be created.
- mock_cred = mock.Mock()
-
- mock_ssl_cred = mock.Mock()
- grpc_ssl_channel_cred.return_value = mock_ssl_cred
-
- mock_grpc_channel = mock.Mock()
- grpc_create_channel.return_value = mock_grpc_channel
-
- transport = transports.TranscoderServiceGrpcAsyncIOTransport(
- host="squid.clam.whelk",
- credentials=mock_cred,
- api_mtls_endpoint="mtls.squid.clam.whelk",
- client_cert_source=client_cert_source_callback,
- )
- grpc_ssl_channel_cred.assert_called_once_with(
- certificate_chain=b"cert bytes", private_key=b"key bytes"
- )
- grpc_create_channel.assert_called_once_with(
- "mtls.squid.clam.whelk:443",
- credentials=mock_cred,
- credentials_file=None,
- scopes=("https://www.googleapis.com/auth/cloud-platform",),
- ssl_credentials=mock_ssl_cred,
- quota_project_id=None,
- )
- assert transport.grpc_channel == mock_grpc_channel
+ assert transport._ssl_channel_credentials == None
@pytest.mark.parametrize(
- "api_mtls_endpoint", ["mtls.squid.clam.whelk", "mtls.squid.clam.whelk:443"]
+ "transport_class",
+ [
+ transports.TranscoderServiceGrpcTransport,
+ transports.TranscoderServiceGrpcAsyncIOTransport,
+ ],
)
-@mock.patch("google.api_core.grpc_helpers.create_channel", autospec=True)
-def test_transcoder_service_grpc_transport_channel_mtls_with_adc(
- grpc_create_channel, api_mtls_endpoint
+def test_transcoder_service_transport_channel_mtls_with_client_cert_source(
+ transport_class,
):
- # Check that if channel and client_cert_source are None, but api_mtls_endpoint
- # is provided, then a mTLS channel will be created with SSL ADC.
- mock_grpc_channel = mock.Mock()
- grpc_create_channel.return_value = mock_grpc_channel
-
- # Mock google.auth.transport.grpc.SslCredentials class.
- mock_ssl_cred = mock.Mock()
- with mock.patch.multiple(
- "google.auth.transport.grpc.SslCredentials",
- __init__=mock.Mock(return_value=None),
- ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred),
- ):
- mock_cred = mock.Mock()
- transport = transports.TranscoderServiceGrpcTransport(
- host="squid.clam.whelk",
- credentials=mock_cred,
- api_mtls_endpoint=api_mtls_endpoint,
- client_cert_source=None,
- )
- grpc_create_channel.assert_called_once_with(
- "mtls.squid.clam.whelk:443",
- credentials=mock_cred,
- credentials_file=None,
- scopes=("https://www.googleapis.com/auth/cloud-platform",),
- ssl_credentials=mock_ssl_cred,
- quota_project_id=None,
- )
- assert transport.grpc_channel == mock_grpc_channel
+ with mock.patch(
+ "grpc.ssl_channel_credentials", autospec=True
+ ) as grpc_ssl_channel_cred:
+ with mock.patch.object(
+ transport_class, "create_channel", autospec=True
+ ) as grpc_create_channel:
+ mock_ssl_cred = mock.Mock()
+ grpc_ssl_channel_cred.return_value = mock_ssl_cred
+
+ mock_grpc_channel = mock.Mock()
+ grpc_create_channel.return_value = mock_grpc_channel
+
+ cred = credentials.AnonymousCredentials()
+ with pytest.warns(DeprecationWarning):
+ with mock.patch.object(auth, "default") as adc:
+ adc.return_value = (cred, None)
+ transport = transport_class(
+ host="squid.clam.whelk",
+ api_mtls_endpoint="mtls.squid.clam.whelk",
+ client_cert_source=client_cert_source_callback,
+ )
+ adc.assert_called_once()
+
+ grpc_ssl_channel_cred.assert_called_once_with(
+ certificate_chain=b"cert bytes", private_key=b"key bytes"
+ )
+ grpc_create_channel.assert_called_once_with(
+ "mtls.squid.clam.whelk:443",
+ credentials=cred,
+ credentials_file=None,
+ scopes=("https://www.googleapis.com/auth/cloud-platform",),
+ ssl_credentials=mock_ssl_cred,
+ quota_project_id=None,
+ )
+ assert transport.grpc_channel == mock_grpc_channel
+ assert transport._ssl_channel_credentials == mock_ssl_cred
@pytest.mark.parametrize(
- "api_mtls_endpoint", ["mtls.squid.clam.whelk", "mtls.squid.clam.whelk:443"]
+ "transport_class",
+ [
+ transports.TranscoderServiceGrpcTransport,
+ transports.TranscoderServiceGrpcAsyncIOTransport,
+ ],
)
-@mock.patch("google.api_core.grpc_helpers_async.create_channel", autospec=True)
-def test_transcoder_service_grpc_asyncio_transport_channel_mtls_with_adc(
- grpc_create_channel, api_mtls_endpoint
-):
- # Check that if channel and client_cert_source are None, but api_mtls_endpoint
- # is provided, then a mTLS channel will be created with SSL ADC.
- mock_grpc_channel = mock.Mock()
- grpc_create_channel.return_value = mock_grpc_channel
-
- # Mock google.auth.transport.grpc.SslCredentials class.
+def test_transcoder_service_transport_channel_mtls_with_adc(transport_class):
mock_ssl_cred = mock.Mock()
with mock.patch.multiple(
"google.auth.transport.grpc.SslCredentials",
__init__=mock.Mock(return_value=None),
ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred),
):
- mock_cred = mock.Mock()
- transport = transports.TranscoderServiceGrpcAsyncIOTransport(
- host="squid.clam.whelk",
- credentials=mock_cred,
- api_mtls_endpoint=api_mtls_endpoint,
- client_cert_source=None,
- )
- grpc_create_channel.assert_called_once_with(
- "mtls.squid.clam.whelk:443",
- credentials=mock_cred,
- credentials_file=None,
- scopes=("https://www.googleapis.com/auth/cloud-platform",),
- ssl_credentials=mock_ssl_cred,
- quota_project_id=None,
- )
- assert transport.grpc_channel == mock_grpc_channel
+ with mock.patch.object(
+ transport_class, "create_channel", autospec=True
+ ) as grpc_create_channel:
+ mock_grpc_channel = mock.Mock()
+ grpc_create_channel.return_value = mock_grpc_channel
+ mock_cred = mock.Mock()
+
+ with pytest.warns(DeprecationWarning):
+ transport = transport_class(
+ host="squid.clam.whelk",
+ credentials=mock_cred,
+ api_mtls_endpoint="mtls.squid.clam.whelk",
+ client_cert_source=None,
+ )
+
+ grpc_create_channel.assert_called_once_with(
+ "mtls.squid.clam.whelk:443",
+ credentials=mock_cred,
+ credentials_file=None,
+ scopes=("https://www.googleapis.com/auth/cloud-platform",),
+ ssl_credentials=mock_ssl_cred,
+ quota_project_id=None,
+ )
+ assert transport.grpc_channel == mock_grpc_channel
def test_job_path():
@@ -2639,9 +2713,9 @@ def test_parse_job_path():
def test_job_template_path():
- project = "squid"
- location = "clam"
- job_template = "whelk"
+ project = "cuttlefish"
+ location = "mussel"
+ job_template = "winkle"
expected = "projects/{project}/locations/{location}/jobTemplates/{job_template}".format(
project=project, location=location, job_template=job_template,
@@ -2652,9 +2726,9 @@ def test_job_template_path():
def test_parse_job_template_path():
expected = {
- "project": "octopus",
- "location": "oyster",
- "job_template": "nudibranch",
+ "project": "nautilus",
+ "location": "scallop",
+ "job_template": "abalone",
}
path = TranscoderServiceClient.job_template_path(**expected)
@@ -2663,6 +2737,107 @@ def test_parse_job_template_path():
assert expected == actual
+def test_common_billing_account_path():
+ billing_account = "squid"
+
+ expected = "billingAccounts/{billing_account}".format(
+ billing_account=billing_account,
+ )
+ actual = TranscoderServiceClient.common_billing_account_path(billing_account)
+ assert expected == actual
+
+
+def test_parse_common_billing_account_path():
+ expected = {
+ "billing_account": "clam",
+ }
+ path = TranscoderServiceClient.common_billing_account_path(**expected)
+
+ # Check that the path construction is reversible.
+ actual = TranscoderServiceClient.parse_common_billing_account_path(path)
+ assert expected == actual
+
+
+def test_common_folder_path():
+ folder = "whelk"
+
+ expected = "folders/{folder}".format(folder=folder,)
+ actual = TranscoderServiceClient.common_folder_path(folder)
+ assert expected == actual
+
+
+def test_parse_common_folder_path():
+ expected = {
+ "folder": "octopus",
+ }
+ path = TranscoderServiceClient.common_folder_path(**expected)
+
+ # Check that the path construction is reversible.
+ actual = TranscoderServiceClient.parse_common_folder_path(path)
+ assert expected == actual
+
+
+def test_common_organization_path():
+ organization = "oyster"
+
+ expected = "organizations/{organization}".format(organization=organization,)
+ actual = TranscoderServiceClient.common_organization_path(organization)
+ assert expected == actual
+
+
+def test_parse_common_organization_path():
+ expected = {
+ "organization": "nudibranch",
+ }
+ path = TranscoderServiceClient.common_organization_path(**expected)
+
+ # Check that the path construction is reversible.
+ actual = TranscoderServiceClient.parse_common_organization_path(path)
+ assert expected == actual
+
+
+def test_common_project_path():
+ project = "cuttlefish"
+
+ expected = "projects/{project}".format(project=project,)
+ actual = TranscoderServiceClient.common_project_path(project)
+ assert expected == actual
+
+
+def test_parse_common_project_path():
+ expected = {
+ "project": "mussel",
+ }
+ path = TranscoderServiceClient.common_project_path(**expected)
+
+ # Check that the path construction is reversible.
+ actual = TranscoderServiceClient.parse_common_project_path(path)
+ assert expected == actual
+
+
+def test_common_location_path():
+ project = "winkle"
+ location = "nautilus"
+
+ expected = "projects/{project}/locations/{location}".format(
+ project=project, location=location,
+ )
+ actual = TranscoderServiceClient.common_location_path(project, location)
+ assert expected == actual
+
+
+def test_parse_common_location_path():
+ expected = {
+ "project": "scallop",
+ "location": "abalone",
+ }
+ path = TranscoderServiceClient.common_location_path(**expected)
+
+ # Check that the path construction is reversible.
+ actual = TranscoderServiceClient.parse_common_location_path(path)
+ assert expected == actual
+
+
def test_client_withDEFAULT_CLIENT_INFO():
client_info = gapic_v1.client_info.ClientInfo()
pFad - Phonifier reborn
Pfad - The Proxy pFad of © 2024 Garber Painting. All rights reserved.
Note: This service is not intended for secure transactions such as banking, social media, email, or purchasing. Use at your own risk. We assume no liability whatsoever for broken pages.
Alternative Proxies:
Alternative Proxy
pFad Proxy
pFad v3 Proxy
pFad v4 Proxy