From 473b6935b618dbe07997165647f96cba8fae9f21 Mon Sep 17 00:00:00 2001 From: Nick Cook Date: Fri, 14 Jan 2022 16:41:43 -0800 Subject: [PATCH 1/2] feat: move samples from GoogleCloudPlatform/python-docs-samples --- README.rst | 2 + samples/snippets/README.md | 40 ++ samples/snippets/create_job_from_ad_hoc.py | 123 ++++++ samples/snippets/create_job_from_preset.py | 88 ++++ samples/snippets/create_job_from_template.py | 88 ++++ samples/snippets/create_job_template.py | 115 +++++ .../create_job_with_animated_overlay.py | 161 +++++++ .../create_job_with_concatenated_inputs.py | 203 +++++++++ ...te_job_with_periodic_images_spritesheet.py | 137 ++++++ ..._job_with_set_number_images_spritesheet.py | 136 ++++++ .../create_job_with_static_overlay.py | 149 +++++++ samples/snippets/delete_job.py | 56 +++ samples/snippets/delete_job_template.py | 58 +++ samples/snippets/get_job.py | 57 +++ samples/snippets/get_job_state.py | 58 +++ samples/snippets/get_job_template.py | 58 +++ samples/snippets/job_template_test.py | 60 +++ samples/snippets/job_test.py | 416 ++++++++++++++++++ samples/snippets/list_job_templates.py | 59 +++ samples/snippets/list_jobs.py | 57 +++ samples/snippets/requirements-test.txt | 3 + samples/snippets/requirements.txt | 3 + 22 files changed, 2127 insertions(+) create mode 100644 samples/snippets/README.md create mode 100644 samples/snippets/create_job_from_ad_hoc.py create mode 100644 samples/snippets/create_job_from_preset.py create mode 100644 samples/snippets/create_job_from_template.py create mode 100644 samples/snippets/create_job_template.py create mode 100644 samples/snippets/create_job_with_animated_overlay.py create mode 100644 samples/snippets/create_job_with_concatenated_inputs.py create mode 100644 samples/snippets/create_job_with_periodic_images_spritesheet.py create mode 100644 samples/snippets/create_job_with_set_number_images_spritesheet.py create mode 100644 samples/snippets/create_job_with_static_overlay.py create mode 100644 samples/snippets/delete_job.py create mode 100644 samples/snippets/delete_job_template.py create mode 100644 samples/snippets/get_job.py create mode 100644 samples/snippets/get_job_state.py create mode 100644 samples/snippets/get_job_template.py create mode 100644 samples/snippets/job_template_test.py create mode 100644 samples/snippets/job_test.py create mode 100644 samples/snippets/list_job_templates.py create mode 100644 samples/snippets/list_jobs.py create mode 100644 samples/snippets/requirements-test.txt create mode 100644 samples/snippets/requirements.txt diff --git a/README.rst b/README.rst index e613c07..889cba7 100644 --- a/README.rst +++ b/README.rst @@ -68,9 +68,11 @@ Windows \Scripts\activate \Scripts\pip.exe install google-cloud-video-transcoder + Next Steps ~~~~~~~~~~ +- See the [Samples](./samples/snippets/README.md). - Read the `Client Library Documentation`_ for Cloud Transcoder API API to see other available methods on the client. - Read the `Transcoder API Product documentation`_ to learn diff --git a/samples/snippets/README.md b/samples/snippets/README.md new file mode 100644 index 0000000..ce4ff95 --- /dev/null +++ b/samples/snippets/README.md @@ -0,0 +1,40 @@ +# Transcoder API Python Samples + +This directory contains samples for the Transcoder API. Use this API to transcode videos into a variety of formats. The Transcoder API benefits broadcasters, production companies, businesses, and individuals looking to transform their video content for use across a variety of user devices. For more information, see the [Transcoder API documentation](https://cloud.google.com/transcoder/). + +## Setup + +To run the samples, you need to first follow the steps in [Before you begin](https://cloud.google.com/transcoder/docs/how-to/before-you-begin). + +For more information on authentication, refer to the +[Authentication Getting Started Guide](https://cloud.google.com/docs/authentication/getting-started). + +## Install Dependencies + +1. Clone python-docs-samples and change directory to the sample directory you want to use. + + $ git clone https://github.com/googleapis/python-video-transcoder.git + +1. Install [pip](https://pip.pypa.io/) and [virtualenv](https://virtualenv.pypa.io/) if you do not already have them. You may want to refer to the [Python Development Environment Setup Guide](https://cloud.google.com/python/setup) for Google Cloud Platform for instructions. + +1. Create a virtualenv. Samples are compatible with Python 3.6+. + + $ virtualenv env + $ source env/bin/activate + +1. Install the dependencies needed to run the samples. + + $ pip install -r requirements.txt + +## The client library + +This sample uses the [Google Cloud Client Library for Python](https://googlecloudplatform.github.io/google-cloud-python/). +You can read the documentation for more details on API usage and use GitHub +to [browse the source](https://github.com/GoogleCloudPlatform/google-cloud-python) and [report issues](https://github.com/GoogleCloudPlatform/google-cloud-python/issues). + +## Testing + +Make sure to enable the Transcoder API on the test project. Set the following environment variables: + +* `GOOGLE_CLOUD_PROJECT` +* `GOOGLE_CLOUD_PROJECT_NUMBER` diff --git a/samples/snippets/create_job_from_ad_hoc.py b/samples/snippets/create_job_from_ad_hoc.py new file mode 100644 index 0000000..7fc6dce --- /dev/null +++ b/samples/snippets/create_job_from_ad_hoc.py @@ -0,0 +1,123 @@ +#!/usr/bin/env python + +# Copyright 2020 Google Inc. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Google Cloud Transcoder sample for creating a job based on a supplied job config. + +Example usage: + python create_job_from_ad_hoc.py --project_id --location --input_uri --output_uri +""" + +# [START transcoder_create_job_from_ad_hoc] + +import argparse + +from google.cloud.video import transcoder_v1 +from google.cloud.video.transcoder_v1.services.transcoder_service import ( + TranscoderServiceClient, +) + + +def create_job_from_ad_hoc(project_id, location, input_uri, output_uri): + """Creates a job based on an ad-hoc job configuration. + + Args: + project_id: The GCP project ID. + location: The location to start the job in. + input_uri: Uri of the video in the Cloud Storage bucket. + output_uri: Uri of the video output folder in the Cloud Storage bucket.""" + + client = TranscoderServiceClient() + + parent = f"projects/{project_id}/locations/{location}" + job = transcoder_v1.types.Job() + job.input_uri = input_uri + job.output_uri = output_uri + job.config = transcoder_v1.types.JobConfig( + elementary_streams=[ + transcoder_v1.types.ElementaryStream( + key="video-stream0", + video_stream=transcoder_v1.types.VideoStream( + h264=transcoder_v1.types.VideoStream.H264CodecSettings( + height_pixels=360, + width_pixels=640, + bitrate_bps=550000, + frame_rate=60, + ), + ), + ), + transcoder_v1.types.ElementaryStream( + key="video-stream1", + video_stream=transcoder_v1.types.VideoStream( + h264=transcoder_v1.types.VideoStream.H264CodecSettings( + height_pixels=720, + width_pixels=1280, + bitrate_bps=2500000, + frame_rate=60, + ), + ), + ), + transcoder_v1.types.ElementaryStream( + key="audio-stream0", + audio_stream=transcoder_v1.types.AudioStream( + codec="aac", bitrate_bps=64000 + ), + ), + ], + mux_streams=[ + transcoder_v1.types.MuxStream( + key="sd", + container="mp4", + elementary_streams=["video-stream0", "audio-stream0"], + ), + transcoder_v1.types.MuxStream( + key="hd", + container="mp4", + elementary_streams=["video-stream1", "audio-stream0"], + ), + ], + ) + response = client.create_job(parent=parent, job=job) + print(f"Job: {response.name}") + return response + + +# [END transcoder_create_job_from_ad_hoc] + +if __name__ == "__main__": + parser = argparse.ArgumentParser() + parser.add_argument("--project_id", help="Your Cloud project ID.", required=True) + parser.add_argument( + "--location", + help="The location to start this job in.", + default="us-central1", + ) + parser.add_argument( + "--input_uri", + help="Uri of the video in the Cloud Storage bucket.", + required=True, + ) + parser.add_argument( + "--output_uri", + help="Uri of the video output folder in the Cloud Storage bucket. Must end in '/'.", + required=True, + ) + args = parser.parse_args() + create_job_from_ad_hoc( + args.project_id, + args.location, + args.input_uri, + args.output_uri, + ) diff --git a/samples/snippets/create_job_from_preset.py b/samples/snippets/create_job_from_preset.py new file mode 100644 index 0000000..67c1672 --- /dev/null +++ b/samples/snippets/create_job_from_preset.py @@ -0,0 +1,88 @@ +#!/usr/bin/env python + +# Copyright 2020 Google Inc. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Google Cloud Transcoder sample for creating a job based on a job preset. + +Example usage: + python create_job_from_preset.py --project_id --location --input_uri --output_uri [--preset ] +""" + +# [START transcoder_create_job_from_preset] + +import argparse + +from google.cloud.video import transcoder_v1 +from google.cloud.video.transcoder_v1.services.transcoder_service import ( + TranscoderServiceClient, +) + + +def create_job_from_preset(project_id, location, input_uri, output_uri, preset): + """Creates a job based on a job preset. + + Args: + project_id: The GCP project ID. + location: The location to start the job in. + input_uri: Uri of the video in the Cloud Storage bucket. + output_uri: Uri of the video output folder in the Cloud Storage bucket. + preset: The preset template (for example, 'preset/web-hd').""" + + client = TranscoderServiceClient() + + parent = f"projects/{project_id}/locations/{location}" + job = transcoder_v1.types.Job() + job.input_uri = input_uri + job.output_uri = output_uri + job.template_id = preset + + response = client.create_job(parent=parent, job=job) + print(f"Job: {response.name}") + return response + + +# [END transcoder_create_job_from_preset] + +if __name__ == "__main__": + parser = argparse.ArgumentParser() + parser.add_argument("--project_id", help="Your Cloud project ID.", required=True) + parser.add_argument( + "--location", + help="The location to start this job in.", + default="us-central1", + ) + parser.add_argument( + "--input_uri", + help="Uri of the video in the Cloud Storage bucket.", + required=True, + ) + parser.add_argument( + "--output_uri", + help="Uri of the video output folder in the Cloud Storage bucket. Must end in '/'.", + required=True, + ) + parser.add_argument( + "--preset", + help="The preset template (for example, 'preset/web-hd').", + default="preset/web-hd", + ) + args = parser.parse_args() + create_job_from_preset( + args.project_id, + args.location, + args.input_uri, + args.output_uri, + args.preset, + ) diff --git a/samples/snippets/create_job_from_template.py b/samples/snippets/create_job_from_template.py new file mode 100644 index 0000000..685c3f6 --- /dev/null +++ b/samples/snippets/create_job_from_template.py @@ -0,0 +1,88 @@ +#!/usr/bin/env python + +# Copyright 2020 Google Inc. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Google Cloud Transcoder sample for creating a job based on a job template. + +Example usage: + python create_job_from_template.py --project_id --location --input_uri --output_uri --template_id +""" + +# [START transcoder_create_job_from_template] + +import argparse + +from google.cloud.video import transcoder_v1 +from google.cloud.video.transcoder_v1.services.transcoder_service import ( + TranscoderServiceClient, +) + + +def create_job_from_template(project_id, location, input_uri, output_uri, template_id): + """Creates a job based on a job template. + + Args: + project_id: The GCP project ID. + location: The location to start the job in. + input_uri: Uri of the video in the Cloud Storage bucket. + output_uri: Uri of the video output folder in the Cloud Storage bucket. + template_id: The user-defined template ID.""" + + client = TranscoderServiceClient() + + parent = f"projects/{project_id}/locations/{location}" + job = transcoder_v1.types.Job() + job.input_uri = input_uri + job.output_uri = output_uri + job.template_id = template_id + + response = client.create_job(parent=parent, job=job) + print(f"Job: {response.name}") + return response + + +# [END transcoder_create_job_from_template] + +if __name__ == "__main__": + parser = argparse.ArgumentParser() + parser.add_argument("--project_id", help="Your Cloud project ID.", required=True) + parser.add_argument( + "--location", + help="The location to start this job in.", + default="us-central1", + ) + parser.add_argument( + "--input_uri", + help="Uri of the video in the Cloud Storage bucket.", + required=True, + ) + parser.add_argument( + "--output_uri", + help="Uri of the video output folder in the Cloud Storage bucket. Must end in '/'.", + required=True, + ) + parser.add_argument( + "--template_id", + help="The job template ID. The template must be located in the same location as the job.", + required=True, + ) + args = parser.parse_args() + create_job_from_template( + args.project_id, + args.location, + args.input_uri, + args.output_uri, + args.template_id, + ) diff --git a/samples/snippets/create_job_template.py b/samples/snippets/create_job_template.py new file mode 100644 index 0000000..95ed05d --- /dev/null +++ b/samples/snippets/create_job_template.py @@ -0,0 +1,115 @@ +#!/usr/bin/env python + +# Copyright 2020 Google Inc. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Google Cloud Transcoder sample for creating a job template. + +Example usage: + python create_job_template.py --project_id [--location ] [--template_id ] +""" + +# [START transcoder_create_job_template] + +import argparse + +from google.cloud.video import transcoder_v1 +from google.cloud.video.transcoder_v1.services.transcoder_service import ( + TranscoderServiceClient, +) + + +def create_job_template(project_id, location, template_id): + """Creates a job template. + + Args: + project_id: The GCP project ID. + location: The location to store this template in. + template_id: The user-defined template ID.""" + + client = TranscoderServiceClient() + + parent = f"projects/{project_id}/locations/{location}" + + job_template = transcoder_v1.types.JobTemplate() + job_template.name = ( + f"projects/{project_id}/locations/{location}/jobTemplates/{template_id}" + ) + job_template.config = transcoder_v1.types.JobConfig( + elementary_streams=[ + transcoder_v1.types.ElementaryStream( + key="video-stream0", + video_stream=transcoder_v1.types.VideoStream( + h264=transcoder_v1.types.VideoStream.H264CodecSettings( + height_pixels=360, + width_pixels=640, + bitrate_bps=550000, + frame_rate=60, + ), + ), + ), + transcoder_v1.types.ElementaryStream( + key="video-stream1", + video_stream=transcoder_v1.types.VideoStream( + h264=transcoder_v1.types.VideoStream.H264CodecSettings( + height_pixels=720, + width_pixels=1280, + bitrate_bps=2500000, + frame_rate=60, + ), + ), + ), + transcoder_v1.types.ElementaryStream( + key="audio-stream0", + audio_stream=transcoder_v1.types.AudioStream( + codec="aac", bitrate_bps=64000 + ), + ), + ], + mux_streams=[ + transcoder_v1.types.MuxStream( + key="sd", + container="mp4", + elementary_streams=["video-stream0", "audio-stream0"], + ), + transcoder_v1.types.MuxStream( + key="hd", + container="mp4", + elementary_streams=["video-stream1", "audio-stream0"], + ), + ], + ) + + response = client.create_job_template( + parent=parent, job_template=job_template, job_template_id=template_id + ) + print(f"Job template: {response.name}") + return response + + +# [END transcoder_create_job_template] + +if __name__ == "__main__": + parser = argparse.ArgumentParser() + parser.add_argument("--project_id", help="Your Cloud project ID.", required=True) + parser.add_argument( + "--location", + help="The location to store this template in.", + default="us-central1", + ) + parser.add_argument( + "--template_id", help="The job template ID.", default="my-job-template" + ) + args = parser.parse_args() + create_job_template(args.project_id, args.location, args.template_id) diff --git a/samples/snippets/create_job_with_animated_overlay.py b/samples/snippets/create_job_with_animated_overlay.py new file mode 100644 index 0000000..a3f3cbb --- /dev/null +++ b/samples/snippets/create_job_with_animated_overlay.py @@ -0,0 +1,161 @@ +#!/usr/bin/env python + +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Google Cloud Transcoder sample for creating a job based on a supplied job config that includes an animated overlay. + +Example usage: + python create_job_with_animated_overlay.py --project_id --location --input_uri --overlay_image_uri --output_uri +""" + +# [START transcoder_create_job_with_animated_overlay] + +import argparse + +from google.cloud.video import transcoder_v1 +from google.cloud.video.transcoder_v1.services.transcoder_service import ( + TranscoderServiceClient, +) +from google.protobuf import duration_pb2 as duration + + +def create_job_with_animated_overlay( + project_id, location, input_uri, overlay_image_uri, output_uri +): + """Creates a job based on an ad-hoc job configuration that includes an animated image overlay. + + Args: + project_id: The GCP project ID. + location: The location to start the job in. + input_uri: Uri of the video in the Cloud Storage bucket. + overlay_image_uri: Uri of the JPEG image for the overlay in the Cloud Storage bucket. Must be a JPEG. + output_uri: Uri of the video output folder in the Cloud Storage bucket.""" + + client = TranscoderServiceClient() + + parent = f"projects/{project_id}/locations/{location}" + job = transcoder_v1.types.Job() + job.input_uri = input_uri + job.output_uri = output_uri + job.config = transcoder_v1.types.JobConfig( + elementary_streams=[ + transcoder_v1.types.ElementaryStream( + key="video-stream0", + video_stream=transcoder_v1.types.VideoStream( + h264=transcoder_v1.types.VideoStream.H264CodecSettings( + height_pixels=360, + width_pixels=640, + bitrate_bps=550000, + frame_rate=60, + ), + ), + ), + transcoder_v1.types.ElementaryStream( + key="audio-stream0", + audio_stream=transcoder_v1.types.AudioStream( + codec="aac", bitrate_bps=64000 + ), + ), + ], + mux_streams=[ + transcoder_v1.types.MuxStream( + key="sd", + container="mp4", + elementary_streams=["video-stream0", "audio-stream0"], + ), + ], + overlays=[ + transcoder_v1.types.Overlay( + image=transcoder_v1.types.Overlay.Image( + uri=overlay_image_uri, + resolution=transcoder_v1.types.Overlay.NormalizedCoordinate( + x=0, + y=0, + ), + alpha=1, + ), + animations=[ + transcoder_v1.types.Overlay.Animation( + animation_fade=transcoder_v1.types.Overlay.AnimationFade( + fade_type=transcoder_v1.types.Overlay.FadeType.FADE_IN, + xy=transcoder_v1.types.Overlay.NormalizedCoordinate( + x=0.5, + y=0.5, + ), + start_time_offset=duration.Duration( + seconds=5, + ), + end_time_offset=duration.Duration( + seconds=10, + ), + ), + ), + transcoder_v1.types.Overlay.Animation( + animation_fade=transcoder_v1.types.Overlay.AnimationFade( + fade_type=transcoder_v1.types.Overlay.FadeType.FADE_OUT, + xy=transcoder_v1.types.Overlay.NormalizedCoordinate( + x=0.5, + y=0.5, + ), + start_time_offset=duration.Duration( + seconds=12, + ), + end_time_offset=duration.Duration( + seconds=15, + ), + ), + ), + ], + ), + ], + ) + response = client.create_job(parent=parent, job=job) + print(f"Job: {response.name}") + return response + + +# [END transcoder_create_job_with_animated_overlay] + +if __name__ == "__main__": + parser = argparse.ArgumentParser() + parser.add_argument("--project_id", help="Your Cloud project ID.", required=True) + parser.add_argument( + "--location", + help="The location to start this job in.", + default="us-central1", + ) + parser.add_argument( + "--input_uri", + help="Uri of the video in the Cloud Storage bucket.", + required=True, + ) + parser.add_argument( + "--overlay_image_uri", + help="Uri of the overlay JPEG image in the Cloud Storage bucket. Must be a JPEG.", + required=True, + ) + parser.add_argument( + "--output_uri", + help="Uri of the video output folder in the Cloud Storage bucket. Must end in '/'.", + required=True, + ) + args = parser.parse_args() + create_job_with_animated_overlay( + args.project_id, + args.location, + args.input_uri, + args.overlay_image_uri, + args.output_uri, + ) diff --git a/samples/snippets/create_job_with_concatenated_inputs.py b/samples/snippets/create_job_with_concatenated_inputs.py new file mode 100644 index 0000000..4a64f88 --- /dev/null +++ b/samples/snippets/create_job_with_concatenated_inputs.py @@ -0,0 +1,203 @@ +#!/usr/bin/env python + +# Copyright 2022 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Google Cloud Transcoder sample for creating a job based on concatenating two input videos. + +Example usage: + python create_job_with_concatenated_inputs.py --project_id --location \ + --input1_uri --start_time_input1 --end_time_input1 \ + --input2_uri --start_time_input2 --end_time_input2 \ + --output_uri +""" + +# [START transcoder_create_job_with_concatenated_inputs] + +import argparse + +from google.cloud.video import transcoder_v1 +from google.cloud.video.transcoder_v1.services.transcoder_service import ( + TranscoderServiceClient, +) +from google.protobuf import duration_pb2 as duration + + +def create_job_with_concatenated_inputs( + project_id, + location, + input1_uri, + start_time_input1, + end_time_input1, + input2_uri, + start_time_input2, + end_time_input2, + output_uri, +): + """Creates a job based on an ad-hoc job configuration that concatenates two input videos. + + Args: + project_id (str): The GCP project ID. + location (str): The location to start the job in. + input1_uri (str): Uri of the first video in the Cloud Storage bucket. + start_time_input1 (str): Start time, in fractional seconds ending in 's' + (e.g., '0s'), relative to the first input video timeline. + end_time_input1 (str): End time, in fractional seconds ending in 's' + (e.g., '8.1s'), relative to the first input video timeline. + input2_uri (str): Uri of the second video in the Cloud Storage bucket. + start_time_input2 (str): Start time, in fractional seconds ending in 's' + (e.g., '3.5s'), relative to the second input video timeline. + end_time_input2 (str): End time, in fractional seconds ending in 's' + (e.g., '15s'), relative to the second input video timeline. + output_uri (str): Uri of the video output folder in the Cloud Storage + bucket.""" + + s1 = duration.Duration() + s1.FromJsonString(start_time_input1) + e1 = duration.Duration() + e1.FromJsonString(end_time_input1) + + s2 = duration.Duration() + s2.FromJsonString(start_time_input2) + e2 = duration.Duration() + e2.FromJsonString(end_time_input2) + + client = TranscoderServiceClient() + + parent = f"projects/{project_id}/locations/{location}" + job = transcoder_v1.types.Job() + job.output_uri = output_uri + job.config = transcoder_v1.types.JobConfig( + inputs=[ + transcoder_v1.types.Input( + key="input1", + uri=input1_uri, + ), + transcoder_v1.types.Input( + key="input2", + uri=input2_uri, + ), + ], + edit_list=[ + transcoder_v1.types.EditAtom( + key="atom1", + inputs=["input1"], + start_time_offset=s1, + end_time_offset=e1, + ), + transcoder_v1.types.EditAtom( + key="atom2", + inputs=["input2"], + start_time_offset=s2, + end_time_offset=e2, + ), + ], + elementary_streams=[ + transcoder_v1.types.ElementaryStream( + key="video-stream0", + video_stream=transcoder_v1.types.VideoStream( + h264=transcoder_v1.types.VideoStream.H264CodecSettings( + height_pixels=360, + width_pixels=640, + bitrate_bps=550000, + frame_rate=60, + ), + ), + ), + transcoder_v1.types.ElementaryStream( + key="audio-stream0", + audio_stream=transcoder_v1.types.AudioStream( + codec="aac", bitrate_bps=64000 + ), + ), + ], + mux_streams=[ + transcoder_v1.types.MuxStream( + key="sd", + container="mp4", + elementary_streams=["video-stream0", "audio-stream0"], + ), + ], + ) + response = client.create_job(parent=parent, job=job) + print(f"Job: {response.name}") + return response + + +# [END transcoder_create_job_with_concatenated_inputs] + +if __name__ == "__main__": + parser = argparse.ArgumentParser() + parser.add_argument("--project_id", help="Your Cloud project ID.", required=True) + parser.add_argument( + "--location", + help="The location to start this job in.", + default="us-central1", + ) + parser.add_argument( + "--input1_uri", + help="Uri of the first video in the Cloud Storage bucket.", + required=True, + ) + parser.add_argument( + "--start_time_input1", + help="Start time, in fractional seconds ending in 's' (e.g., '1.1s'), " + + "relative to the first input video timeline. Use this field to trim " + + "content from the beginning of the first video.", + required=True, + ) + parser.add_argument( + "--end_time_input1", + help="End time, in fractional seconds ending in 's' (e.g., '9.5s'), " + + "relative to the first input video timeline. Use this field to trim " + + "content from the end of the first video.", + required=True, + ) + parser.add_argument( + "--input2_uri", + help="Uri of the second video in the Cloud Storage bucket.", + required=True, + ) + parser.add_argument( + "--start_time_input2", + help="Start time, in fractional seconds ending in 's' (e.g., '1.1s'), " + + "relative to the second input video timeline. Use this field to trim " + + "content from the beginning of the second video.", + required=True, + ) + parser.add_argument( + "--end_time_input2", + help="End time, in fractional seconds ending in 's' (e.g., '9.5s'), " + + "relative to the second input video timeline. Use this field to trim " + + "content from the end of the second video.", + required=True, + ) + parser.add_argument( + "--output_uri", + help="Uri of the video output folder in the Cloud Storage bucket. " + + "Must end in '/'.", + required=True, + ) + args = parser.parse_args() + create_job_with_concatenated_inputs( + args.project_id, + args.location, + args.input1_uri, + args.start_time_input1, + args.end_time_input1, + args.input2_uri, + args.start_time_input2, + args.end_time_input2, + args.output_uri, + ) diff --git a/samples/snippets/create_job_with_periodic_images_spritesheet.py b/samples/snippets/create_job_with_periodic_images_spritesheet.py new file mode 100644 index 0000000..a3f7af5 --- /dev/null +++ b/samples/snippets/create_job_with_periodic_images_spritesheet.py @@ -0,0 +1,137 @@ +#!/usr/bin/env python + +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Google Cloud Transcoder sample for creating a job that generates two spritesheets from the input video. Each spritesheet contains images that are captured periodically. + +Example usage: + python create_job_with_periodic_images_spritesheet.py --project_id --location --input_uri --output_uri +""" + +# [START transcoder_create_job_with_periodic_images_spritesheet] + +import argparse + +from google.cloud.video import transcoder_v1 +from google.cloud.video.transcoder_v1.services.transcoder_service import ( + TranscoderServiceClient, +) +from google.protobuf import duration_pb2 as duration + + +def create_job_with_periodic_images_spritesheet( + project_id, location, input_uri, output_uri +): + """Creates a job based on an ad-hoc job configuration that generates two spritesheets. + + Args: + project_id: The GCP project ID. + location: The location to start the job in. + input_uri: Uri of the video in the Cloud Storage bucket. + output_uri: Uri of the video output folder in the Cloud Storage bucket.""" + + client = TranscoderServiceClient() + + parent = f"projects/{project_id}/locations/{location}" + job = transcoder_v1.types.Job() + job.input_uri = input_uri + job.output_uri = output_uri + job.config = transcoder_v1.types.JobConfig( + # Create an ad-hoc job. For more information, see https://cloud.google.com/transcoder/docs/how-to/jobs#create_jobs_ad_hoc. + # See all options for the job config at https://cloud.google.com/transcoder/docs/reference/rest/v1beta1/JobConfig. + elementary_streams=[ + # This section defines the output video stream. + transcoder_v1.types.ElementaryStream( + key="video-stream0", + video_stream=transcoder_v1.types.VideoStream( + h264=transcoder_v1.types.VideoStream.H264CodecSettings( + height_pixels=360, + width_pixels=640, + bitrate_bps=550000, + frame_rate=60, + ), + ), + ), + # This section defines the output audio stream. + transcoder_v1.types.ElementaryStream( + key="audio-stream0", + audio_stream=transcoder_v1.types.AudioStream( + codec="aac", bitrate_bps=64000 + ), + ), + ], + # This section multiplexes the output audio and video together into a container. + mux_streams=[ + transcoder_v1.types.MuxStream( + key="sd", + container="mp4", + elementary_streams=["video-stream0", "audio-stream0"], + ), + ], + # Generate two sprite sheets from the input video into the GCS bucket. For more information, see + # https://cloud.google.com/transcoder/docs/how-to/generate-spritesheet#generate_image_periodically. + sprite_sheets=[ + # Generate a sprite sheet with 64x32px images. An image is taken every 7 seconds from the video. + transcoder_v1.types.SpriteSheet( + file_prefix="small-sprite-sheet", + sprite_width_pixels=64, + sprite_height_pixels=32, + interval=duration.Duration( + seconds=7, + ), + ), + # Generate a sprite sheet with 128x72px images. An image is taken every 7 seconds from the video. + transcoder_v1.types.SpriteSheet( + file_prefix="large-sprite-sheet", + sprite_width_pixels=128, + sprite_height_pixels=72, + interval=duration.Duration( + seconds=7, + ), + ), + ], + ) + response = client.create_job(parent=parent, job=job) + print(f"Job: {response.name}") + return response + + +# [END transcoder_create_job_with_periodic_images_spritesheet] + +if __name__ == "__main__": + parser = argparse.ArgumentParser() + parser.add_argument("--project_id", help="Your Cloud project ID.", required=True) + parser.add_argument( + "--location", + help="The location to start this job in.", + default="us-central1", + ) + parser.add_argument( + "--input_uri", + help="Uri of the video in the Cloud Storage bucket.", + required=True, + ) + parser.add_argument( + "--output_uri", + help="Uri of the video output folder in the Cloud Storage bucket. Must end in '/'.", + required=True, + ) + args = parser.parse_args() + create_job_with_periodic_images_spritesheet( + args.project_id, + args.location, + args.input_uri, + args.output_uri, + ) diff --git a/samples/snippets/create_job_with_set_number_images_spritesheet.py b/samples/snippets/create_job_with_set_number_images_spritesheet.py new file mode 100644 index 0000000..b21b249 --- /dev/null +++ b/samples/snippets/create_job_with_set_number_images_spritesheet.py @@ -0,0 +1,136 @@ +#!/usr/bin/env python + +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Google Cloud Transcoder sample for creating a job that generates two spritesheets from the input video. Each spritesheet contains a set number of images. + +Example usage: + python create_job_with_set_number_images_spritesheet.py --project_id --location --input_uri --output_uri +""" + +# [START transcoder_create_job_with_set_number_images_spritesheet] + +import argparse + +from google.cloud.video import transcoder_v1 +from google.cloud.video.transcoder_v1.services.transcoder_service import ( + TranscoderServiceClient, +) + + +def create_job_with_set_number_images_spritesheet( + project_id, location, input_uri, output_uri +): + """Creates a job based on an ad-hoc job configuration that generates two spritesheets. + + Args: + project_id: The GCP project ID. + location: The location to start the job in. + input_uri: Uri of the video in the Cloud Storage bucket. + output_uri: Uri of the video output folder in the Cloud Storage bucket.""" + + client = TranscoderServiceClient() + + parent = f"projects/{project_id}/locations/{location}" + job = transcoder_v1.types.Job() + job.input_uri = input_uri + job.output_uri = output_uri + job.config = transcoder_v1.types.JobConfig( + # Create an ad-hoc job. For more information, see https://cloud.google.com/transcoder/docs/how-to/jobs#create_jobs_ad_hoc. + # See all options for the job config at https://cloud.google.com/transcoder/docs/reference/rest/v1beta1/JobConfig. + elementary_streams=[ + # This section defines the output video stream. + transcoder_v1.types.ElementaryStream( + key="video-stream0", + video_stream=transcoder_v1.types.VideoStream( + h264=transcoder_v1.types.VideoStream.H264CodecSettings( + height_pixels=360, + width_pixels=640, + bitrate_bps=550000, + frame_rate=60, + ), + ), + ), + # This section defines the output audio stream. + transcoder_v1.types.ElementaryStream( + key="audio-stream0", + audio_stream=transcoder_v1.types.AudioStream( + codec="aac", bitrate_bps=64000 + ), + ), + ], + # This section multiplexes the output audio and video together into a container. + mux_streams=[ + transcoder_v1.types.MuxStream( + key="sd", + container="mp4", + elementary_streams=["video-stream0", "audio-stream0"], + ), + ], + # Generate two sprite sheets from the input video into the GCS bucket. For more information, see + # https://cloud.google.com/transcoder/docs/how-to/generate-spritesheet#generate_set_number_of_images. + sprite_sheets=[ + # Generate a 10x10 sprite sheet with 64x32px images. + transcoder_v1.types.SpriteSheet( + file_prefix="small-sprite-sheet", + sprite_width_pixels=64, + sprite_height_pixels=32, + column_count=10, + row_count=10, + total_count=100, + ), + # Generate a 10x10 sprite sheet with 128x72px images. + transcoder_v1.types.SpriteSheet( + file_prefix="large-sprite-sheet", + sprite_width_pixels=128, + sprite_height_pixels=72, + column_count=10, + row_count=10, + total_count=100, + ), + ], + ) + response = client.create_job(parent=parent, job=job) + print(f"Job: {response.name}") + return response + + +# [END transcoder_create_job_with_set_number_images_spritesheet] + +if __name__ == "__main__": + parser = argparse.ArgumentParser() + parser.add_argument("--project_id", help="Your Cloud project ID.", required=True) + parser.add_argument( + "--location", + help="The location to start this job in.", + default="us-central1", + ) + parser.add_argument( + "--input_uri", + help="Uri of the video in the Cloud Storage bucket.", + required=True, + ) + parser.add_argument( + "--output_uri", + help="Uri of the video output folder in the Cloud Storage bucket. Must end in '/'.", + required=True, + ) + args = parser.parse_args() + create_job_with_set_number_images_spritesheet( + args.project_id, + args.location, + args.input_uri, + args.output_uri, + ) diff --git a/samples/snippets/create_job_with_static_overlay.py b/samples/snippets/create_job_with_static_overlay.py new file mode 100644 index 0000000..37ce26b --- /dev/null +++ b/samples/snippets/create_job_with_static_overlay.py @@ -0,0 +1,149 @@ +#!/usr/bin/env python + +# Copyright 2021 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Google Cloud Transcoder sample for creating a job based on a supplied job config that includes a static overlay. + +Example usage: + python create_job_with_static_overlay.py --project_id --location --input_uri --overlay_image_uri --output_uri +""" + +# [START transcoder_create_job_with_static_overlay] + +import argparse + +from google.cloud.video import transcoder_v1 +from google.cloud.video.transcoder_v1.services.transcoder_service import ( + TranscoderServiceClient, +) +from google.protobuf import duration_pb2 as duration + + +def create_job_with_static_overlay( + project_id, location, input_uri, overlay_image_uri, output_uri +): + """Creates a job based on an ad-hoc job configuration that includes a static image overlay. + + Args: + project_id: The GCP project ID. + location: The location to start the job in. + input_uri: Uri of the video in the Cloud Storage bucket. + overlay_image_uri: Uri of the JPEG image for the overlay in the Cloud Storage bucket. Must be a JPEG. + output_uri: Uri of the video output folder in the Cloud Storage bucket.""" + + client = TranscoderServiceClient() + + parent = f"projects/{project_id}/locations/{location}" + job = transcoder_v1.types.Job() + job.input_uri = input_uri + job.output_uri = output_uri + job.config = transcoder_v1.types.JobConfig( + elementary_streams=[ + transcoder_v1.types.ElementaryStream( + key="video-stream0", + video_stream=transcoder_v1.types.VideoStream( + h264=transcoder_v1.types.VideoStream.H264CodecSettings( + height_pixels=360, + width_pixels=640, + bitrate_bps=550000, + frame_rate=60, + ), + ), + ), + transcoder_v1.types.ElementaryStream( + key="audio-stream0", + audio_stream=transcoder_v1.types.AudioStream( + codec="aac", bitrate_bps=64000 + ), + ), + ], + mux_streams=[ + transcoder_v1.types.MuxStream( + key="sd", + container="mp4", + elementary_streams=["video-stream0", "audio-stream0"], + ), + ], + overlays=[ + transcoder_v1.types.Overlay( + image=transcoder_v1.types.Overlay.Image( + uri=overlay_image_uri, + resolution=transcoder_v1.types.Overlay.NormalizedCoordinate( + x=1, + y=0.5, + ), + alpha=1, + ), + animations=[ + transcoder_v1.types.Overlay.Animation( + animation_static=transcoder_v1.types.Overlay.AnimationStatic( + xy=transcoder_v1.types.Overlay.NormalizedCoordinate( + x=0, + y=0, + ), + start_time_offset=duration.Duration( + seconds=0, + ), + ), + ), + transcoder_v1.types.Overlay.Animation( + animation_end=transcoder_v1.types.Overlay.AnimationEnd( + start_time_offset=duration.Duration( + seconds=10, + ), + ), + ), + ], + ), + ], + ) + response = client.create_job(parent=parent, job=job) + print(f"Job: {response.name}") + return response + + +# [END transcoder_create_job_with_static_overlay] + +if __name__ == "__main__": + parser = argparse.ArgumentParser() + parser.add_argument("--project_id", help="Your Cloud project ID.", required=True) + parser.add_argument( + "--location", + help="The location to start this job in.", + default="us-central1", + ) + parser.add_argument( + "--input_uri", + help="Uri of the video in the Cloud Storage bucket.", + required=True, + ) + parser.add_argument( + "--overlay_image_uri", + help="Uri of the overlay JPEG image in the Cloud Storage bucket. Must be a JPEG.", + required=True, + ) + parser.add_argument( + "--output_uri", + help="Uri of the video output folder in the Cloud Storage bucket. Must end in '/'.", + required=True, + ) + args = parser.parse_args() + create_job_with_static_overlay( + args.project_id, + args.location, + args.input_uri, + args.overlay_image_uri, + args.output_uri, + ) diff --git a/samples/snippets/delete_job.py b/samples/snippets/delete_job.py new file mode 100644 index 0000000..5f139f9 --- /dev/null +++ b/samples/snippets/delete_job.py @@ -0,0 +1,56 @@ +#!/usr/bin/env python + +# Copyright 2020 Google Inc. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Google Cloud Transcoder sample for deleting a job. + +Example usage: + python delete_job.py --project_id --location --job_id +""" + +# [START transcoder_delete_job] + +import argparse + +from google.cloud.video.transcoder_v1.services.transcoder_service import ( + TranscoderServiceClient, +) + + +def delete_job(project_id, location, job_id): + """Gets a job. + + Args: + project_id: The GCP project ID. + location: The location this job is in. + job_id: The job ID.""" + + client = TranscoderServiceClient() + + name = f"projects/{project_id}/locations/{location}/jobs/{job_id}" + response = client.delete_job(name=name) + print("Deleted job") + return response + + +# [END transcoder_delete_job] + +if __name__ == "__main__": + parser = argparse.ArgumentParser() + parser.add_argument("--project_id", help="Your Cloud project ID.", required=True) + parser.add_argument("--location", help="The location of the job.", required=True) + parser.add_argument("--job_id", help="The job ID.", required=True) + args = parser.parse_args() + delete_job(args.project_id, args.location, args.job_id) diff --git a/samples/snippets/delete_job_template.py b/samples/snippets/delete_job_template.py new file mode 100644 index 0000000..ccce70f --- /dev/null +++ b/samples/snippets/delete_job_template.py @@ -0,0 +1,58 @@ +#!/usr/bin/env python + +# Copyright 2020 Google Inc. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Google Cloud Transcoder sample for deleting a job template. + +Example usage: + python delete_job_template.py --project_id --location --template_id +""" + +# [START transcoder_delete_job_template] + +import argparse + +from google.cloud.video.transcoder_v1.services.transcoder_service import ( + TranscoderServiceClient, +) + + +def delete_job_template(project_id, location, template_id): + """Deletes a job template. + + Args: + project_id: The GCP project ID. + location: The location of the template. + template_id: The user-defined template ID.""" + + client = TranscoderServiceClient() + + name = f"projects/{project_id}/locations/{location}/jobTemplates/{template_id}" + response = client.delete_job_template(name=name) + print("Deleted job template") + return response + + +# [END transcoder_delete_job_template] + +if __name__ == "__main__": + parser = argparse.ArgumentParser() + parser.add_argument("--project_id", help="Your Cloud project ID.", required=True) + parser.add_argument( + "--location", help="The location of the template.", required=True + ) + parser.add_argument("--template_id", help="The job template ID.", required=True) + args = parser.parse_args() + delete_job_template(args.project_id, args.location, args.template_id) diff --git a/samples/snippets/get_job.py b/samples/snippets/get_job.py new file mode 100644 index 0000000..ec5d7f1 --- /dev/null +++ b/samples/snippets/get_job.py @@ -0,0 +1,57 @@ +#!/usr/bin/env python + +# Copyright 2020 Google Inc. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Google Cloud Transcoder sample for getting the details for a job. + +Example usage: + python get_job.py --project_id --location --job_id +""" + +# [START transcoder_get_job] + +import argparse + +from google.cloud.video.transcoder_v1.services.transcoder_service import ( + TranscoderServiceClient, +) + + +def get_job(project_id, location, job_id): + """Gets a job. + + Args: + project_id: The GCP project ID. + location: The location this job is in. + job_id: The job ID.""" + + client = TranscoderServiceClient() + + name = f"projects/{project_id}/locations/{location}/jobs/{job_id}" + response = client.get_job(name=name) + print(f"Job: {response.name}") + return response + + +# [END transcoder_get_job] + + +if __name__ == "__main__": + parser = argparse.ArgumentParser() + parser.add_argument("--project_id", help="Your Cloud project ID.", required=True) + parser.add_argument("--location", help="The location of the job.", required=True) + parser.add_argument("--job_id", help="The job ID.", required=True) + args = parser.parse_args() + get_job(args.project_id, args.location, args.job_id) diff --git a/samples/snippets/get_job_state.py b/samples/snippets/get_job_state.py new file mode 100644 index 0000000..6b73acf --- /dev/null +++ b/samples/snippets/get_job_state.py @@ -0,0 +1,58 @@ +#!/usr/bin/env python + +# Copyright 2020 Google Inc. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Google Cloud Transcoder sample for getting the state for a job. + +Example usage: + python get_job_state.py --project_id --location --job_id +""" + +# [START transcoder_get_job_state] + +import argparse + +from google.cloud.video.transcoder_v1.services.transcoder_service import ( + TranscoderServiceClient, +) + + +def get_job_state(project_id, location, job_id): + """Gets a job's state. + + Args: + project_id: The GCP project ID. + location: The location this job is in. + job_id: The job ID.""" + + client = TranscoderServiceClient() + + name = f"projects/{project_id}/locations/{location}/jobs/{job_id}" + response = client.get_job(name=name) + + print(f"Job state: {str(response.state)}") + return response + + +# [END transcoder_get_job_state] + + +if __name__ == "__main__": + parser = argparse.ArgumentParser() + parser.add_argument("--project_id", help="Your Cloud project ID.", required=True) + parser.add_argument("--location", help="The location of the job.", required=True) + parser.add_argument("--job_id", help="The job ID.", required=True) + args = parser.parse_args() + get_job_state(args.project_id, args.location, args.job_id) diff --git a/samples/snippets/get_job_template.py b/samples/snippets/get_job_template.py new file mode 100644 index 0000000..4d6ccf7 --- /dev/null +++ b/samples/snippets/get_job_template.py @@ -0,0 +1,58 @@ +#!/usr/bin/env python + +# Copyright 2020 Google Inc. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Google Cloud Transcoder sample for getting a job template. + +Example usage: + python get_job_template.py --project_id --location --template_id +""" + +# [START transcoder_get_job_template] + +import argparse + +from google.cloud.video.transcoder_v1.services.transcoder_service import ( + TranscoderServiceClient, +) + + +def get_job_template(project_id, location, template_id): + """Gets a job template. + + Args: + project_id: The GCP project ID. + location: The location of the template. + template_id: The user-defined template ID.""" + + client = TranscoderServiceClient() + + name = f"projects/{project_id}/locations/{location}/jobTemplates/{template_id}" + response = client.get_job_template(name=name) + print(f"Job template: {response.name}") + return response + + +# [END transcoder_get_job_template] + +if __name__ == "__main__": + parser = argparse.ArgumentParser() + parser.add_argument("--project_id", help="Your Cloud project ID.", required=True) + parser.add_argument( + "--location", help="The location of the template.", required=True + ) + parser.add_argument("--template_id", help="The job template ID.", required=True) + args = parser.parse_args() + get_job_template(args.project_id, args.location, args.template_id) diff --git a/samples/snippets/job_template_test.py b/samples/snippets/job_template_test.py new file mode 100644 index 0000000..259595a --- /dev/null +++ b/samples/snippets/job_template_test.py @@ -0,0 +1,60 @@ +# Copyright 2020 Google Inc. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import os +import uuid + +from google.api_core.exceptions import NotFound + +import create_job_template +import delete_job_template +import get_job_template +import list_job_templates + +location = "us-central1" +project_id = os.environ["GOOGLE_CLOUD_PROJECT"] +project_number = os.environ["GOOGLE_CLOUD_PROJECT_NUMBER"] +template_id = f"my-python-test-template-{uuid.uuid4()}" + + +def test_template_operations(capsys): + + # Enable the following API on the test project: + # * Transcoder API + + job_template_name = ( + f"projects/{project_number}/locations/{location}/jobTemplates/{template_id}" + ) + + try: + delete_job_template.delete_job_template(project_id, location, template_id) + except NotFound as e: + print(f"Ignoring NotFound, details: {e}") + out, _ = capsys.readouterr() + + create_job_template.create_job_template(project_id, location, template_id) + out, _ = capsys.readouterr() + assert job_template_name in out + + get_job_template.get_job_template(project_id, location, template_id) + out, _ = capsys.readouterr() + assert job_template_name in out + + list_job_templates.list_job_templates(project_id, location) + out, _ = capsys.readouterr() + assert job_template_name in out + + delete_job_template.delete_job_template(project_id, location, template_id) + out, _ = capsys.readouterr() + assert "Deleted job template" in out diff --git a/samples/snippets/job_test.py b/samples/snippets/job_test.py new file mode 100644 index 0000000..bf54918 --- /dev/null +++ b/samples/snippets/job_test.py @@ -0,0 +1,416 @@ +# Copyright 2020 Google Inc. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import os +import time +import uuid + +import backoff +from google.cloud import storage +from googleapiclient.errors import HttpError +import pytest + +import create_job_from_ad_hoc +import create_job_from_preset +import create_job_from_template +import create_job_template +import create_job_with_animated_overlay +import create_job_with_concatenated_inputs +import create_job_with_periodic_images_spritesheet +import create_job_with_set_number_images_spritesheet +import create_job_with_static_overlay +import delete_job +import delete_job_template +import get_job +import get_job_state +import list_jobs + +location = "us-central1" +project_id = os.environ["GOOGLE_CLOUD_PROJECT"] +project_number = os.environ["GOOGLE_CLOUD_PROJECT_NUMBER"] +template_id = f"my-python-test-template-{uuid.uuid4()}" + +input_bucket_name = "cloud-samples-data/media/" +output_bucket_name = f"python-samples-transcoder-{uuid.uuid4()}" +test_video_file_name = "ChromeCast.mp4" +test_overlay_image_file_name = "overlay.jpg" +test_concat1_file_name = "ForBiggerEscapes.mp4" +test_concat2_file_name = "ForBiggerJoyrides.mp4" + +input_uri = f"gs://{input_bucket_name}{test_video_file_name}" +overlay_image_uri = f"gs://{input_bucket_name}{test_overlay_image_file_name}" +concat1_uri = f"gs://{input_bucket_name}{test_concat1_file_name}" +concat2_uri = f"gs://{input_bucket_name}{test_concat2_file_name}" +output_uri_for_preset = f"gs://{output_bucket_name}/test-output-preset/" +output_uri_for_template = f"gs://{output_bucket_name}/test-output-template/" +output_uri_for_adhoc = f"gs://{output_bucket_name}/test-output-adhoc/" +output_uri_for_static_overlay = f"gs://{output_bucket_name}/test-output-static-overlay/" +output_uri_for_animated_overlay = ( + f"gs://{output_bucket_name}/test-output-animated-overlay/" +) +small_spritesheet_file_prefix = "small-sprite-sheet" +large_spritesheet_file_prefix = "large-sprite-sheet" +spritesheet_file_suffix = "0000000000.jpeg" + +output_dir_for_set_number_spritesheet = "test-output-set-number-spritesheet/" +output_uri_for_set_number_spritesheet = ( + f"gs://{output_bucket_name}/{output_dir_for_set_number_spritesheet}" +) +output_dir_for_periodic_spritesheet = "test-output-periodic-spritesheet/" +output_uri_for_periodic_spritesheet = ( + f"gs://{output_bucket_name}/{output_dir_for_periodic_spritesheet}" +) +output_uri_for_concat = f"gs://{output_bucket_name}/test-output-concat/" + +preset = "preset/web-hd" +job_succeeded_state = "ProcessingState.SUCCEEDED" + + +@pytest.fixture(scope="module") +def test_bucket(): + storage_client = storage.Client() + bucket = storage_client.create_bucket(output_bucket_name) + + yield bucket + bucket.delete(force=True) + + +def test_create_job_from_preset(capsys, test_bucket): + create_job_from_preset.create_job_from_preset( + project_id, location, input_uri, output_uri_for_preset, preset + ) + out, _ = capsys.readouterr() + job_name_prefix = f"projects/{project_number}/locations/{location}/jobs/" + assert job_name_prefix in out + + str_slice = out.split("/") + job_id = str_slice[len(str_slice) - 1].rstrip("\n") + job_name = f"projects/{project_number}/locations/{location}/jobs/{job_id}" + assert job_name in out + + get_job.get_job(project_id, location, job_id) + out, _ = capsys.readouterr() + assert job_name in out + + time.sleep(30) + + _assert_job_state_succeeded(capsys, job_id) + + list_jobs.list_jobs(project_id, location) + out, _ = capsys.readouterr() + assert job_name in out + + delete_job.delete_job(project_id, location, job_id) + out, _ = capsys.readouterr() + assert "Deleted job" in out + + +def test_create_job_from_template(capsys, test_bucket): + + job_template_name = ( + f"projects/{project_number}/locations/{location}/jobTemplates/{template_id}" + ) + + create_job_template.create_job_template(project_id, location, template_id) + out, _ = capsys.readouterr() + assert job_template_name in out + + create_job_from_template.create_job_from_template( + project_id, location, input_uri, output_uri_for_template, template_id + ) + out, _ = capsys.readouterr() + job_name_prefix = f"projects/{project_number}/locations/{location}/jobs/" + assert job_name_prefix in out + + str_slice = out.split("/") + job_id = str_slice[len(str_slice) - 1].rstrip("\n") + job_name = f"projects/{project_number}/locations/{location}/jobs/{job_id}" + assert job_name in out + + get_job.get_job(project_id, location, job_id) + out, _ = capsys.readouterr() + assert job_name in out + + time.sleep(30) + + _assert_job_state_succeeded(capsys, job_id) + + list_jobs.list_jobs(project_id, location) + out, _ = capsys.readouterr() + assert job_name in out + + delete_job.delete_job(project_id, location, job_id) + out, _ = capsys.readouterr() + assert "Deleted job" in out + + delete_job_template.delete_job_template(project_id, location, template_id) + out, _ = capsys.readouterr() + assert "Deleted job template" in out + + +def test_create_job_from_ad_hoc(capsys, test_bucket): + create_job_from_ad_hoc.create_job_from_ad_hoc( + project_id, location, input_uri, output_uri_for_adhoc + ) + out, _ = capsys.readouterr() + job_name_prefix = f"projects/{project_number}/locations/{location}/jobs/" + assert job_name_prefix in out + + str_slice = out.split("/") + job_id = str_slice[len(str_slice) - 1].rstrip("\n") + job_name = f"projects/{project_number}/locations/{location}/jobs/{job_id}" + assert job_name in out + + get_job.get_job(project_id, location, job_id) + out, _ = capsys.readouterr() + assert job_name in out + + time.sleep(30) + + _assert_job_state_succeeded(capsys, job_id) + + list_jobs.list_jobs(project_id, location) + out, _ = capsys.readouterr() + assert job_name in out + + delete_job.delete_job(project_id, location, job_id) + out, _ = capsys.readouterr() + assert "Deleted job" in out + + +def test_create_job_with_static_overlay(capsys, test_bucket): + create_job_with_static_overlay.create_job_with_static_overlay( + project_id, + location, + input_uri, + overlay_image_uri, + output_uri_for_static_overlay, + ) + out, _ = capsys.readouterr() + job_name_prefix = f"projects/{project_number}/locations/{location}/jobs/" + assert job_name_prefix in out + + str_slice = out.split("/") + job_id = str_slice[len(str_slice) - 1].rstrip("\n") + job_name = f"projects/{project_number}/locations/{location}/jobs/{job_id}" + assert job_name in out + + get_job.get_job(project_id, location, job_id) + out, _ = capsys.readouterr() + assert job_name in out + + time.sleep(30) + + _assert_job_state_succeeded(capsys, job_id) + + list_jobs.list_jobs(project_id, location) + out, _ = capsys.readouterr() + assert job_name in out + + delete_job.delete_job(project_id, location, job_id) + out, _ = capsys.readouterr() + assert "Deleted job" in out + + +def test_create_job_with_animated_overlay(capsys, test_bucket): + create_job_with_animated_overlay.create_job_with_animated_overlay( + project_id, + location, + input_uri, + overlay_image_uri, + output_uri_for_animated_overlay, + ) + out, _ = capsys.readouterr() + job_name_prefix = f"projects/{project_number}/locations/{location}/jobs/" + assert job_name_prefix in out + + str_slice = out.split("/") + job_id = str_slice[len(str_slice) - 1].rstrip("\n") + job_name = f"projects/{project_number}/locations/{location}/jobs/{job_id}" + assert job_name in out + + get_job.get_job(project_id, location, job_id) + out, _ = capsys.readouterr() + assert job_name in out + + time.sleep(30) + + _assert_job_state_succeeded(capsys, job_id) + + list_jobs.list_jobs(project_id, location) + out, _ = capsys.readouterr() + assert job_name in out + + delete_job.delete_job(project_id, location, job_id) + out, _ = capsys.readouterr() + assert "Deleted job" in out + + +def test_create_job_with_set_number_spritesheet(capsys, test_bucket): + create_job_with_set_number_images_spritesheet.create_job_with_set_number_images_spritesheet( + project_id, + location, + input_uri, + output_uri_for_set_number_spritesheet, + ) + out, _ = capsys.readouterr() + job_name_prefix = f"projects/{project_number}/locations/{location}/jobs/" + assert job_name_prefix in out + + str_slice = out.split("/") + job_id = str_slice[len(str_slice) - 1].rstrip("\n") + job_name = f"projects/{project_number}/locations/{location}/jobs/{job_id}" + assert job_name in out + + get_job.get_job(project_id, location, job_id) + out, _ = capsys.readouterr() + assert ( + job_name in out + ) # Get the job name so you can use it later to get the job and delete the job. + + time.sleep( + 30 + ) # Transcoding jobs need time to complete. Once the job completes, check the job state. + + _assert_job_state_succeeded(capsys, job_id) + _assert_file_in_bucket( + capsys, + test_bucket, + output_dir_for_set_number_spritesheet + + small_spritesheet_file_prefix + + spritesheet_file_suffix, + ) + _assert_file_in_bucket( + capsys, + test_bucket, + output_dir_for_set_number_spritesheet + + large_spritesheet_file_prefix + + spritesheet_file_suffix, + ) + + list_jobs.list_jobs(project_id, location) + out, _ = capsys.readouterr() + assert job_name in out + + delete_job.delete_job(project_id, location, job_id) + out, _ = capsys.readouterr() + assert "Deleted job" in out + + +def test_create_job_with_periodic_spritesheet(capsys, test_bucket): + create_job_with_periodic_images_spritesheet.create_job_with_periodic_images_spritesheet( + project_id, + location, + input_uri, + output_uri_for_periodic_spritesheet, + ) + out, _ = capsys.readouterr() + job_name_prefix = f"projects/{project_number}/locations/{location}/jobs/" + assert job_name_prefix in out + + str_slice = out.split("/") + job_id = str_slice[len(str_slice) - 1].rstrip("\n") + job_name = f"projects/{project_number}/locations/{location}/jobs/{job_id}" + assert job_name in out + + get_job.get_job(project_id, location, job_id) + out, _ = capsys.readouterr() + assert ( + job_name in out + ) # Get the job name so you can use it later to get the job and delete the job. + + time.sleep( + 30 + ) # Transcoding jobs need time to complete. Once the job completes, check the job state. + + _assert_job_state_succeeded(capsys, job_id) + _assert_file_in_bucket( + capsys, + test_bucket, + output_dir_for_periodic_spritesheet + + small_spritesheet_file_prefix + + spritesheet_file_suffix, + ) + _assert_file_in_bucket( + capsys, + test_bucket, + output_dir_for_periodic_spritesheet + + large_spritesheet_file_prefix + + spritesheet_file_suffix, + ) + + list_jobs.list_jobs(project_id, location) + out, _ = capsys.readouterr() + assert job_name in out + + delete_job.delete_job(project_id, location, job_id) + out, _ = capsys.readouterr() + assert "Deleted job" in out + + +def test_create_job_with_concatenated_inputs(capsys, test_bucket): + create_job_with_concatenated_inputs.create_job_with_concatenated_inputs( + project_id, + location, + concat1_uri, + "0s", + "8.1s", + concat2_uri, + "3.5s", + "15s", + output_uri_for_concat, + ) + out, _ = capsys.readouterr() + job_name_prefix = f"projects/{project_number}/locations/{location}/jobs/" + assert job_name_prefix in out + + str_slice = out.split("/") + job_id = str_slice[len(str_slice) - 1].rstrip("\n") + job_name = f"projects/{project_number}/locations/{location}/jobs/{job_id}" + assert job_name in out + + get_job.get_job(project_id, location, job_id) + out, _ = capsys.readouterr() + assert job_name in out + + time.sleep( + 30 + ) # Transcoding jobs need time to complete. Once the job completes, check the job state. + + _assert_job_state_succeeded(capsys, job_id) + + list_jobs.list_jobs(project_id, location) + out, _ = capsys.readouterr() + assert job_name in out + + delete_job.delete_job(project_id, location, job_id) + out, _ = capsys.readouterr() + assert "Deleted job" in out + + +# Retrying up to 10 mins. +@backoff.on_exception(backoff.expo, AssertionError, max_time=600) +def _assert_job_state_succeeded(capsys, job_id): + try: + get_job_state.get_job_state(project_id, location, job_id) + except HttpError as err: + raise AssertionError(f"Could not get job state: {err.resp.status}") + + out, _ = capsys.readouterr() + assert job_succeeded_state in out + + +def _assert_file_in_bucket(capsys, test_bucket, directory_and_filename): + blob = test_bucket.blob(directory_and_filename) + assert blob.exists() diff --git a/samples/snippets/list_job_templates.py b/samples/snippets/list_job_templates.py new file mode 100644 index 0000000..020f7a3 --- /dev/null +++ b/samples/snippets/list_job_templates.py @@ -0,0 +1,59 @@ +#!/usr/bin/env python + +# Copyright 2020 Google Inc. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Google Cloud Transcoder sample for listing job templates in a location. + +Example usage: + python list_job_templates.py --project_id --location +""" + +# [START transcoder_list_job_templates] + +import argparse + +from google.cloud.video.transcoder_v1.services.transcoder_service import ( + TranscoderServiceClient, +) + + +def list_job_templates(project_id, location): + """Lists all job templates in a location. + + Args: + project_id: The GCP project ID. + location: The location of the templates.""" + + client = TranscoderServiceClient() + + parent = f"projects/{project_id}/locations/{location}" + response = client.list_job_templates(parent=parent) + print("Job templates:") + for jobTemplate in response.job_templates: + print({jobTemplate.name}) + + return response + + +# [END transcoder_list_job_templates] + +if __name__ == "__main__": + parser = argparse.ArgumentParser() + parser.add_argument("--project_id", help="Your Cloud project ID.", required=True) + parser.add_argument( + "--location", help="The location of the templates.", required=True + ) + args = parser.parse_args() + list_job_templates(args.project_id, args.location) diff --git a/samples/snippets/list_jobs.py b/samples/snippets/list_jobs.py new file mode 100644 index 0000000..cf1fdbd --- /dev/null +++ b/samples/snippets/list_jobs.py @@ -0,0 +1,57 @@ +#!/usr/bin/env python + +# Copyright 2020 Google Inc. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Google Cloud Transcoder sample for listing jobs in a location. + +Example usage: + python list_jobs.py --project_id --location +""" + +# [START transcoder_list_jobs] + +import argparse + +from google.cloud.video.transcoder_v1.services.transcoder_service import ( + TranscoderServiceClient, +) + + +def list_jobs(project_id, location): + """Lists all jobs in a location. + + Args: + project_id: The GCP project ID. + location: The location of the jobs.""" + + client = TranscoderServiceClient() + + parent = f"projects/{project_id}/locations/{location}" + response = client.list_jobs(parent=parent) + print("Jobs:") + for job in response.jobs: + print({job.name}) + + return response + + +# [END transcoder_list_jobs] + +if __name__ == "__main__": + parser = argparse.ArgumentParser() + parser.add_argument("--project_id", help="Your Cloud project ID.", required=True) + parser.add_argument("--location", help="The location of the jobs.", required=True) + args = parser.parse_args() + list_jobs(args.project_id, args.location) diff --git a/samples/snippets/requirements-test.txt b/samples/snippets/requirements-test.txt new file mode 100644 index 0000000..52c6682 --- /dev/null +++ b/samples/snippets/requirements-test.txt @@ -0,0 +1,3 @@ +backoff==1.11.1 +google-cloud-storage==1.43.0 +pytest==6.2.4 diff --git a/samples/snippets/requirements.txt b/samples/snippets/requirements.txt new file mode 100644 index 0000000..7011d0b --- /dev/null +++ b/samples/snippets/requirements.txt @@ -0,0 +1,3 @@ +google-api-python-client==2.34.0 +grpcio==1.43.0 +google-cloud-video-transcoder==1.2.1 From 38c540a773b76be890ed1602e97af50ae58dff23 Mon Sep 17 00:00:00 2001 From: Owl Bot Date: Sun, 16 Jan 2022 14:46:17 +0000 Subject: [PATCH 2/2] =?UTF-8?q?=F0=9F=A6=89=20Updates=20from=20OwlBot?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit See https://github.com/googleapis/repo-automation-bots/blob/main/packages/owl-bot/README.md --- samples/snippets/create_job_from_ad_hoc.py | 9 +- samples/snippets/create_job_from_preset.py | 10 +- samples/snippets/create_job_from_template.py | 4 +- .../create_job_with_animated_overlay.py | 29 +- .../create_job_with_concatenated_inputs.py | 14 +- ...te_job_with_periodic_images_spritesheet.py | 17 +- ..._job_with_set_number_images_spritesheet.py | 9 +- .../create_job_with_static_overlay.py | 18 +- samples/snippets/job_test.py | 10 +- samples/snippets/noxfile.py | 278 ++++++++++++++++++ 10 files changed, 307 insertions(+), 91 deletions(-) create mode 100644 samples/snippets/noxfile.py diff --git a/samples/snippets/create_job_from_ad_hoc.py b/samples/snippets/create_job_from_ad_hoc.py index 7fc6dce..2e08c61 100644 --- a/samples/snippets/create_job_from_ad_hoc.py +++ b/samples/snippets/create_job_from_ad_hoc.py @@ -100,9 +100,7 @@ def create_job_from_ad_hoc(project_id, location, input_uri, output_uri): parser = argparse.ArgumentParser() parser.add_argument("--project_id", help="Your Cloud project ID.", required=True) parser.add_argument( - "--location", - help="The location to start this job in.", - default="us-central1", + "--location", help="The location to start this job in.", default="us-central1", ) parser.add_argument( "--input_uri", @@ -116,8 +114,5 @@ def create_job_from_ad_hoc(project_id, location, input_uri, output_uri): ) args = parser.parse_args() create_job_from_ad_hoc( - args.project_id, - args.location, - args.input_uri, - args.output_uri, + args.project_id, args.location, args.input_uri, args.output_uri, ) diff --git a/samples/snippets/create_job_from_preset.py b/samples/snippets/create_job_from_preset.py index 67c1672..3539b32 100644 --- a/samples/snippets/create_job_from_preset.py +++ b/samples/snippets/create_job_from_preset.py @@ -59,9 +59,7 @@ def create_job_from_preset(project_id, location, input_uri, output_uri, preset): parser = argparse.ArgumentParser() parser.add_argument("--project_id", help="Your Cloud project ID.", required=True) parser.add_argument( - "--location", - help="The location to start this job in.", - default="us-central1", + "--location", help="The location to start this job in.", default="us-central1", ) parser.add_argument( "--input_uri", @@ -80,9 +78,5 @@ def create_job_from_preset(project_id, location, input_uri, output_uri, preset): ) args = parser.parse_args() create_job_from_preset( - args.project_id, - args.location, - args.input_uri, - args.output_uri, - args.preset, + args.project_id, args.location, args.input_uri, args.output_uri, args.preset, ) diff --git a/samples/snippets/create_job_from_template.py b/samples/snippets/create_job_from_template.py index 685c3f6..0a69704 100644 --- a/samples/snippets/create_job_from_template.py +++ b/samples/snippets/create_job_from_template.py @@ -59,9 +59,7 @@ def create_job_from_template(project_id, location, input_uri, output_uri, templa parser = argparse.ArgumentParser() parser.add_argument("--project_id", help="Your Cloud project ID.", required=True) parser.add_argument( - "--location", - help="The location to start this job in.", - default="us-central1", + "--location", help="The location to start this job in.", default="us-central1", ) parser.add_argument( "--input_uri", diff --git a/samples/snippets/create_job_with_animated_overlay.py b/samples/snippets/create_job_with_animated_overlay.py index a3f3cbb..a90c542 100644 --- a/samples/snippets/create_job_with_animated_overlay.py +++ b/samples/snippets/create_job_with_animated_overlay.py @@ -81,8 +81,7 @@ def create_job_with_animated_overlay( image=transcoder_v1.types.Overlay.Image( uri=overlay_image_uri, resolution=transcoder_v1.types.Overlay.NormalizedCoordinate( - x=0, - y=0, + x=0, y=0, ), alpha=1, ), @@ -91,30 +90,20 @@ def create_job_with_animated_overlay( animation_fade=transcoder_v1.types.Overlay.AnimationFade( fade_type=transcoder_v1.types.Overlay.FadeType.FADE_IN, xy=transcoder_v1.types.Overlay.NormalizedCoordinate( - x=0.5, - y=0.5, - ), - start_time_offset=duration.Duration( - seconds=5, - ), - end_time_offset=duration.Duration( - seconds=10, + x=0.5, y=0.5, ), + start_time_offset=duration.Duration(seconds=5,), + end_time_offset=duration.Duration(seconds=10,), ), ), transcoder_v1.types.Overlay.Animation( animation_fade=transcoder_v1.types.Overlay.AnimationFade( fade_type=transcoder_v1.types.Overlay.FadeType.FADE_OUT, xy=transcoder_v1.types.Overlay.NormalizedCoordinate( - x=0.5, - y=0.5, - ), - start_time_offset=duration.Duration( - seconds=12, - ), - end_time_offset=duration.Duration( - seconds=15, + x=0.5, y=0.5, ), + start_time_offset=duration.Duration(seconds=12,), + end_time_offset=duration.Duration(seconds=15,), ), ), ], @@ -132,9 +121,7 @@ def create_job_with_animated_overlay( parser = argparse.ArgumentParser() parser.add_argument("--project_id", help="Your Cloud project ID.", required=True) parser.add_argument( - "--location", - help="The location to start this job in.", - default="us-central1", + "--location", help="The location to start this job in.", default="us-central1", ) parser.add_argument( "--input_uri", diff --git a/samples/snippets/create_job_with_concatenated_inputs.py b/samples/snippets/create_job_with_concatenated_inputs.py index 4a64f88..0a2d3ad 100644 --- a/samples/snippets/create_job_with_concatenated_inputs.py +++ b/samples/snippets/create_job_with_concatenated_inputs.py @@ -80,14 +80,8 @@ def create_job_with_concatenated_inputs( job.output_uri = output_uri job.config = transcoder_v1.types.JobConfig( inputs=[ - transcoder_v1.types.Input( - key="input1", - uri=input1_uri, - ), - transcoder_v1.types.Input( - key="input2", - uri=input2_uri, - ), + transcoder_v1.types.Input(key="input1", uri=input1_uri,), + transcoder_v1.types.Input(key="input2", uri=input2_uri,), ], edit_list=[ transcoder_v1.types.EditAtom( @@ -141,9 +135,7 @@ def create_job_with_concatenated_inputs( parser = argparse.ArgumentParser() parser.add_argument("--project_id", help="Your Cloud project ID.", required=True) parser.add_argument( - "--location", - help="The location to start this job in.", - default="us-central1", + "--location", help="The location to start this job in.", default="us-central1", ) parser.add_argument( "--input1_uri", diff --git a/samples/snippets/create_job_with_periodic_images_spritesheet.py b/samples/snippets/create_job_with_periodic_images_spritesheet.py index a3f7af5..5028a27 100644 --- a/samples/snippets/create_job_with_periodic_images_spritesheet.py +++ b/samples/snippets/create_job_with_periodic_images_spritesheet.py @@ -88,18 +88,14 @@ def create_job_with_periodic_images_spritesheet( file_prefix="small-sprite-sheet", sprite_width_pixels=64, sprite_height_pixels=32, - interval=duration.Duration( - seconds=7, - ), + interval=duration.Duration(seconds=7,), ), # Generate a sprite sheet with 128x72px images. An image is taken every 7 seconds from the video. transcoder_v1.types.SpriteSheet( file_prefix="large-sprite-sheet", sprite_width_pixels=128, sprite_height_pixels=72, - interval=duration.Duration( - seconds=7, - ), + interval=duration.Duration(seconds=7,), ), ], ) @@ -114,9 +110,7 @@ def create_job_with_periodic_images_spritesheet( parser = argparse.ArgumentParser() parser.add_argument("--project_id", help="Your Cloud project ID.", required=True) parser.add_argument( - "--location", - help="The location to start this job in.", - default="us-central1", + "--location", help="The location to start this job in.", default="us-central1", ) parser.add_argument( "--input_uri", @@ -130,8 +124,5 @@ def create_job_with_periodic_images_spritesheet( ) args = parser.parse_args() create_job_with_periodic_images_spritesheet( - args.project_id, - args.location, - args.input_uri, - args.output_uri, + args.project_id, args.location, args.input_uri, args.output_uri, ) diff --git a/samples/snippets/create_job_with_set_number_images_spritesheet.py b/samples/snippets/create_job_with_set_number_images_spritesheet.py index b21b249..d416eec 100644 --- a/samples/snippets/create_job_with_set_number_images_spritesheet.py +++ b/samples/snippets/create_job_with_set_number_images_spritesheet.py @@ -113,9 +113,7 @@ def create_job_with_set_number_images_spritesheet( parser = argparse.ArgumentParser() parser.add_argument("--project_id", help="Your Cloud project ID.", required=True) parser.add_argument( - "--location", - help="The location to start this job in.", - default="us-central1", + "--location", help="The location to start this job in.", default="us-central1", ) parser.add_argument( "--input_uri", @@ -129,8 +127,5 @@ def create_job_with_set_number_images_spritesheet( ) args = parser.parse_args() create_job_with_set_number_images_spritesheet( - args.project_id, - args.location, - args.input_uri, - args.output_uri, + args.project_id, args.location, args.input_uri, args.output_uri, ) diff --git a/samples/snippets/create_job_with_static_overlay.py b/samples/snippets/create_job_with_static_overlay.py index 37ce26b..5386a8a 100644 --- a/samples/snippets/create_job_with_static_overlay.py +++ b/samples/snippets/create_job_with_static_overlay.py @@ -81,8 +81,7 @@ def create_job_with_static_overlay( image=transcoder_v1.types.Overlay.Image( uri=overlay_image_uri, resolution=transcoder_v1.types.Overlay.NormalizedCoordinate( - x=1, - y=0.5, + x=1, y=0.5, ), alpha=1, ), @@ -90,19 +89,14 @@ def create_job_with_static_overlay( transcoder_v1.types.Overlay.Animation( animation_static=transcoder_v1.types.Overlay.AnimationStatic( xy=transcoder_v1.types.Overlay.NormalizedCoordinate( - x=0, - y=0, - ), - start_time_offset=duration.Duration( - seconds=0, + x=0, y=0, ), + start_time_offset=duration.Duration(seconds=0,), ), ), transcoder_v1.types.Overlay.Animation( animation_end=transcoder_v1.types.Overlay.AnimationEnd( - start_time_offset=duration.Duration( - seconds=10, - ), + start_time_offset=duration.Duration(seconds=10,), ), ), ], @@ -120,9 +114,7 @@ def create_job_with_static_overlay( parser = argparse.ArgumentParser() parser.add_argument("--project_id", help="Your Cloud project ID.", required=True) parser.add_argument( - "--location", - help="The location to start this job in.", - default="us-central1", + "--location", help="The location to start this job in.", default="us-central1", ) parser.add_argument( "--input_uri", diff --git a/samples/snippets/job_test.py b/samples/snippets/job_test.py index bf54918..2d9a6ba 100644 --- a/samples/snippets/job_test.py +++ b/samples/snippets/job_test.py @@ -259,10 +259,7 @@ def test_create_job_with_animated_overlay(capsys, test_bucket): def test_create_job_with_set_number_spritesheet(capsys, test_bucket): create_job_with_set_number_images_spritesheet.create_job_with_set_number_images_spritesheet( - project_id, - location, - input_uri, - output_uri_for_set_number_spritesheet, + project_id, location, input_uri, output_uri_for_set_number_spritesheet, ) out, _ = capsys.readouterr() job_name_prefix = f"projects/{project_number}/locations/{location}/jobs/" @@ -310,10 +307,7 @@ def test_create_job_with_set_number_spritesheet(capsys, test_bucket): def test_create_job_with_periodic_spritesheet(capsys, test_bucket): create_job_with_periodic_images_spritesheet.create_job_with_periodic_images_spritesheet( - project_id, - location, - input_uri, - output_uri_for_periodic_spritesheet, + project_id, location, input_uri, output_uri_for_periodic_spritesheet, ) out, _ = capsys.readouterr() job_name_prefix = f"projects/{project_number}/locations/{location}/jobs/" diff --git a/samples/snippets/noxfile.py b/samples/snippets/noxfile.py new file mode 100644 index 0000000..3bbef5d --- /dev/null +++ b/samples/snippets/noxfile.py @@ -0,0 +1,278 @@ +# Copyright 2019 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import print_function + +import glob +import os +from pathlib import Path +import sys +from typing import Callable, Dict, List, Optional + +import nox + + +# WARNING - WARNING - WARNING - WARNING - WARNING +# WARNING - WARNING - WARNING - WARNING - WARNING +# DO NOT EDIT THIS FILE EVER! +# WARNING - WARNING - WARNING - WARNING - WARNING +# WARNING - WARNING - WARNING - WARNING - WARNING + +BLACK_VERSION = "black==19.10b0" + +# Copy `noxfile_config.py` to your directory and modify it instead. + +# `TEST_CONFIG` dict is a configuration hook that allows users to +# modify the test configurations. The values here should be in sync +# with `noxfile_config.py`. Users will copy `noxfile_config.py` into +# their directory and modify it. + +TEST_CONFIG = { + # You can opt out from the test for specific Python versions. + "ignored_versions": [], + # Old samples are opted out of enforcing Python type hints + # All new samples should feature them + "enforce_type_hints": False, + # An envvar key for determining the project id to use. Change it + # to 'BUILD_SPECIFIC_GCLOUD_PROJECT' if you want to opt in using a + # build specific Cloud project. You can also use your own string + # to use your own Cloud project. + "gcloud_project_env": "GOOGLE_CLOUD_PROJECT", + # 'gcloud_project_env': 'BUILD_SPECIFIC_GCLOUD_PROJECT', + # If you need to use a specific version of pip, + # change pip_version_override to the string representation + # of the version number, for example, "20.2.4" + "pip_version_override": None, + # A dictionary you want to inject into your test. Don't put any + # secrets here. These values will override predefined values. + "envs": {}, +} + + +try: + # Ensure we can import noxfile_config in the project's directory. + sys.path.append(".") + from noxfile_config import TEST_CONFIG_OVERRIDE +except ImportError as e: + print("No user noxfile_config found: detail: {}".format(e)) + TEST_CONFIG_OVERRIDE = {} + +# Update the TEST_CONFIG with the user supplied values. +TEST_CONFIG.update(TEST_CONFIG_OVERRIDE) + + +def get_pytest_env_vars() -> Dict[str, str]: + """Returns a dict for pytest invocation.""" + ret = {} + + # Override the GCLOUD_PROJECT and the alias. + env_key = TEST_CONFIG["gcloud_project_env"] + # This should error out if not set. + ret["GOOGLE_CLOUD_PROJECT"] = os.environ[env_key] + + # Apply user supplied envs. + ret.update(TEST_CONFIG["envs"]) + return ret + + +# DO NOT EDIT - automatically generated. +# All versions used to test samples. +ALL_VERSIONS = ["3.6", "3.7", "3.8", "3.9", "3.10"] + +# Any default versions that should be ignored. +IGNORED_VERSIONS = TEST_CONFIG["ignored_versions"] + +TESTED_VERSIONS = sorted([v for v in ALL_VERSIONS if v not in IGNORED_VERSIONS]) + +INSTALL_LIBRARY_FROM_SOURCE = os.environ.get("INSTALL_LIBRARY_FROM_SOURCE", False) in ( + "True", + "true", +) + +# Error if a python version is missing +nox.options.error_on_missing_interpreters = True + +# +# Style Checks +# + + +def _determine_local_import_names(start_dir: str) -> List[str]: + """Determines all import names that should be considered "local". + + This is used when running the linter to insure that import order is + properly checked. + """ + file_ext_pairs = [os.path.splitext(path) for path in os.listdir(start_dir)] + return [ + basename + for basename, extension in file_ext_pairs + if extension == ".py" + or os.path.isdir(os.path.join(start_dir, basename)) + and basename not in ("__pycache__") + ] + + +# Linting with flake8. +# +# We ignore the following rules: +# E203: whitespace before ‘:’ +# E266: too many leading ‘#’ for block comment +# E501: line too long +# I202: Additional newline in a section of imports +# +# We also need to specify the rules which are ignored by default: +# ['E226', 'W504', 'E126', 'E123', 'W503', 'E24', 'E704', 'E121'] +FLAKE8_COMMON_ARGS = [ + "--show-source", + "--builtin=gettext", + "--max-complexity=20", + "--import-order-style=google", + "--exclude=.nox,.cache,env,lib,generated_pb2,*_pb2.py,*_pb2_grpc.py", + "--ignore=E121,E123,E126,E203,E226,E24,E266,E501,E704,W503,W504,I202", + "--max-line-length=88", +] + + +@nox.session +def lint(session: nox.sessions.Session) -> None: + if not TEST_CONFIG["enforce_type_hints"]: + session.install("flake8", "flake8-import-order") + else: + session.install("flake8", "flake8-import-order", "flake8-annotations") + + local_names = _determine_local_import_names(".") + args = FLAKE8_COMMON_ARGS + [ + "--application-import-names", + ",".join(local_names), + ".", + ] + session.run("flake8", *args) + + +# +# Black +# + + +@nox.session +def blacken(session: nox.sessions.Session) -> None: + session.install(BLACK_VERSION) + python_files = [path for path in os.listdir(".") if path.endswith(".py")] + + session.run("black", *python_files) + + +# +# Sample Tests +# + + +PYTEST_COMMON_ARGS = ["--junitxml=sponge_log.xml"] + + +def _session_tests( + session: nox.sessions.Session, post_install: Callable = None +) -> None: + # check for presence of tests + test_list = glob.glob("*_test.py") + glob.glob("test_*.py") + if len(test_list) == 0: + print("No tests found, skipping directory.") + else: + if TEST_CONFIG["pip_version_override"]: + pip_version = TEST_CONFIG["pip_version_override"] + session.install(f"pip=={pip_version}") + """Runs py.test for a particular project.""" + if os.path.exists("requirements.txt"): + if os.path.exists("constraints.txt"): + session.install("-r", "requirements.txt", "-c", "constraints.txt") + else: + session.install("-r", "requirements.txt") + + if os.path.exists("requirements-test.txt"): + if os.path.exists("constraints-test.txt"): + session.install( + "-r", "requirements-test.txt", "-c", "constraints-test.txt" + ) + else: + session.install("-r", "requirements-test.txt") + + if INSTALL_LIBRARY_FROM_SOURCE: + session.install("-e", _get_repo_root()) + + if post_install: + post_install(session) + + session.run( + "pytest", + *(PYTEST_COMMON_ARGS + session.posargs), + # Pytest will return 5 when no tests are collected. This can happen + # on travis where slow and flaky tests are excluded. + # See http://doc.pytest.org/en/latest/_modules/_pytest/main.html + success_codes=[0, 5], + env=get_pytest_env_vars(), + ) + + +@nox.session(python=ALL_VERSIONS) +def py(session: nox.sessions.Session) -> None: + """Runs py.test for a sample using the specified version of Python.""" + if session.python in TESTED_VERSIONS: + _session_tests(session) + else: + session.skip( + "SKIPPED: {} tests are disabled for this sample.".format(session.python) + ) + + +# +# Readmegen +# + + +def _get_repo_root() -> Optional[str]: + """ Returns the root folder of the project. """ + # Get root of this repository. Assume we don't have directories nested deeper than 10 items. + p = Path(os.getcwd()) + for i in range(10): + if p is None: + break + if Path(p / ".git").exists(): + return str(p) + # .git is not available in repos cloned via Cloud Build + # setup.py is always in the library's root, so use that instead + # https://github.com/googleapis/synthtool/issues/792 + if Path(p / "setup.py").exists(): + return str(p) + p = p.parent + raise Exception("Unable to detect repository root.") + + +GENERATED_READMES = sorted([x for x in Path(".").rglob("*.rst.in")]) + + +@nox.session +@nox.parametrize("path", GENERATED_READMES) +def readmegen(session: nox.sessions.Session, path: str) -> None: + """(Re-)generates the readme for a sample.""" + session.install("jinja2", "pyyaml") + dir_ = os.path.dirname(path) + + if os.path.exists(os.path.join(dir_, "requirements.txt")): + session.install("-r", os.path.join(dir_, "requirements.txt")) + + in_file = os.path.join(dir_, "README.rst.in") + session.run( + "python", _get_repo_root() + "/scripts/readme-gen/readme_gen.py", in_file + ) pFad - Phonifier reborn

Pfad - The Proxy pFad of © 2024 Garber Painting. All rights reserved.

Note: This service is not intended for secure transactions such as banking, social media, email, or purchasing. Use at your own risk. We assume no liability whatsoever for broken pages.


Alternative Proxies:

Alternative Proxy

pFad Proxy

pFad v3 Proxy

pFad v4 Proxy