xref: /aosp_15_r20/external/googleapis/google/cloud/video/transcoder/v1/resources.proto (revision d5c09012810ac0c9f33fe448fb6da8260d444cc9)
1// Copyright 2023 Google LLC
2//
3// Licensed under the Apache License, Version 2.0 (the "License");
4// you may not use this file except in compliance with the License.
5// You may obtain a copy of the License at
6//
7//     http://www.apache.org/licenses/LICENSE-2.0
8//
9// Unless required by applicable law or agreed to in writing, software
10// distributed under the License is distributed on an "AS IS" BASIS,
11// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12// See the License for the specific language governing permissions and
13// limitations under the License.
14
15syntax = "proto3";
16
17package google.cloud.video.transcoder.v1;
18
19import "google/api/field_behavior.proto";
20import "google/api/resource.proto";
21import "google/protobuf/duration.proto";
22import "google/protobuf/timestamp.proto";
23import "google/rpc/status.proto";
24
25option go_package = "cloud.google.com/go/video/transcoder/apiv1/transcoderpb;transcoderpb";
26option java_multiple_files = true;
27option java_outer_classname = "ResourcesProto";
28option java_package = "com.google.cloud.video.transcoder.v1";
29
30// Transcoding job resource.
31message Job {
32  option (google.api.resource) = {
33    type: "transcoder.googleapis.com/Job"
34    pattern: "projects/{project}/locations/{location}/jobs/{job}"
35  };
36
37  // The current state of the job.
38  enum ProcessingState {
39    // The processing state is not specified.
40    PROCESSING_STATE_UNSPECIFIED = 0;
41
42    // The job is enqueued and will be picked up for processing soon.
43    PENDING = 1;
44
45    // The job is being processed.
46    RUNNING = 2;
47
48    // The job has been completed successfully.
49    SUCCEEDED = 3;
50
51    // The job has failed. For additional information, see `failure_reason` and
52    // `failure_details`
53    FAILED = 4;
54  }
55
56  // The processing mode of the job.
57  enum ProcessingMode {
58    // The job processing mode is not specified.
59    PROCESSING_MODE_UNSPECIFIED = 0;
60
61    // The job processing mode is interactive mode.
62    // Interactive job will either be ran or rejected if quota does not allow
63    // for it.
64    PROCESSING_MODE_INTERACTIVE = 1;
65
66    // The job processing mode is batch mode.
67    // Batch mode allows queuing of jobs.
68    PROCESSING_MODE_BATCH = 2;
69  }
70
71  // The optimization strategy of the job. The default is `AUTODETECT`.
72  enum OptimizationStrategy {
73    // The optimization strategy is not specified.
74    OPTIMIZATION_STRATEGY_UNSPECIFIED = 0;
75
76    // Prioritize job processing speed.
77    AUTODETECT = 1;
78
79    // Disable all optimizations.
80    DISABLED = 2;
81  }
82
83  // The resource name of the job.
84  // Format: `projects/{project_number}/locations/{location}/jobs/{job}`
85  string name = 1;
86
87  // Input only. Specify the `input_uri` to populate empty `uri` fields in each
88  // element of `Job.config.inputs` or `JobTemplate.config.inputs` when using
89  // template. URI of the media. Input files must be at least 5 seconds in
90  // duration and stored in Cloud Storage (for example,
91  // `gs://bucket/inputs/file.mp4`). See [Supported input and output
92  // formats](https://cloud.google.com/transcoder/docs/concepts/supported-input-and-output-formats).
93  string input_uri = 2 [(google.api.field_behavior) = INPUT_ONLY];
94
95  // Input only. Specify the `output_uri` to populate an empty
96  // `Job.config.output.uri` or `JobTemplate.config.output.uri` when using
97  // template. URI for the output file(s). For example,
98  // `gs://my-bucket/outputs/`. See [Supported input and output
99  // formats](https://cloud.google.com/transcoder/docs/concepts/supported-input-and-output-formats).
100  string output_uri = 3 [(google.api.field_behavior) = INPUT_ONLY];
101
102  // Specify the `job_config` for the transcoding job. If you don't specify the
103  // `job_config`, the API selects `templateId`; this template ID is set to
104  // `preset/web-hd` by default. When you use a `template_id` to create a job,
105  // the `Job.config` is populated by the `JobTemplate.config`.<br>
106  oneof job_config {
107    // Input only. Specify the `template_id` to use for populating `Job.config`.
108    // The default is `preset/web-hd`, which is the only supported preset.
109    //
110    // User defined JobTemplate: `{job_template_id}`
111    string template_id = 4 [(google.api.field_behavior) = INPUT_ONLY];
112
113    // The configuration for this job.
114    JobConfig config = 5;
115  }
116
117  // Output only. The current state of the job.
118  ProcessingState state = 8 [(google.api.field_behavior) = OUTPUT_ONLY];
119
120  // Output only. The time the job was created.
121  google.protobuf.Timestamp create_time = 12
122      [(google.api.field_behavior) = OUTPUT_ONLY];
123
124  // Output only. The time the transcoding started.
125  google.protobuf.Timestamp start_time = 13
126      [(google.api.field_behavior) = OUTPUT_ONLY];
127
128  // Output only. The time the transcoding finished.
129  google.protobuf.Timestamp end_time = 14
130      [(google.api.field_behavior) = OUTPUT_ONLY];
131
132  // Job time to live value in days, which will be effective after job
133  // completion. Job should be deleted automatically after the given TTL. Enter
134  // a value between 1 and 90. The default is 30.
135  int32 ttl_after_completion_days = 15;
136
137  // The labels associated with this job. You can use these to organize and
138  // group your jobs.
139  map<string, string> labels = 16;
140
141  // Output only. An error object that describes the reason for the failure.
142  // This property is always present when `state` is `FAILED`.
143  google.rpc.Status error = 17 [(google.api.field_behavior) = OUTPUT_ONLY];
144
145  // The processing mode of the job.
146  // The default is `PROCESSING_MODE_INTERACTIVE`.
147  ProcessingMode mode = 20;
148
149  // The processing priority of a batch job.
150  // This field can only be set for batch mode jobs. The default value is 0.
151  // This value cannot be negative. Higher values correspond to higher
152  // priorities for the job.
153  int32 batch_mode_priority = 21;
154
155  // Optional. The optimization strategy of the job. The default is
156  // `AUTODETECT`.
157  OptimizationStrategy optimization = 22
158      [(google.api.field_behavior) = OPTIONAL];
159}
160
161// Transcoding job template resource.
162message JobTemplate {
163  option (google.api.resource) = {
164    type: "transcoder.googleapis.com/JobTemplate"
165    pattern: "projects/{project}/locations/{location}/jobTemplates/{job_template}"
166  };
167
168  // The resource name of the job template.
169  // Format:
170  // `projects/{project_number}/locations/{location}/jobTemplates/{job_template}`
171  string name = 1;
172
173  // The configuration for this template.
174  JobConfig config = 2;
175
176  // The labels associated with this job template. You can use these to organize
177  // and group your job templates.
178  map<string, string> labels = 3;
179}
180
181// Job configuration
182message JobConfig {
183  // List of input assets stored in Cloud Storage.
184  repeated Input inputs = 1;
185
186  // List of `Edit atom`s. Defines the ultimate timeline of the resulting
187  // file or manifest.
188  repeated EditAtom edit_list = 2;
189
190  // List of elementary streams.
191  repeated ElementaryStream elementary_streams = 3;
192
193  // List of multiplexing settings for output streams.
194  repeated MuxStream mux_streams = 4;
195
196  // List of output manifests.
197  repeated Manifest manifests = 5;
198
199  // Output configuration.
200  Output output = 6;
201
202  // List of ad breaks. Specifies where to insert ad break tags in the output
203  // manifests.
204  repeated AdBreak ad_breaks = 7;
205
206  // Destination on Pub/Sub.
207  PubsubDestination pubsub_destination = 8;
208
209  // List of output sprite sheets.
210  // Spritesheets require at least one VideoStream in the Jobconfig.
211  repeated SpriteSheet sprite_sheets = 9;
212
213  // List of overlays on the output video, in descending Z-order.
214  repeated Overlay overlays = 10;
215
216  // List of encryption configurations for the content.
217  // Each configuration has an ID. Specify this ID in the
218  // [MuxStream.encryption_id][google.cloud.video.transcoder.v1.MuxStream.encryption_id]
219  // field to indicate the configuration to use for that `MuxStream` output.
220  repeated Encryption encryptions = 11;
221}
222
223// Input asset.
224message Input {
225  // A unique key for this input. Must be specified when using advanced
226  // mapping and edit lists.
227  string key = 1;
228
229  // URI of the media. Input files must be at least 5 seconds in duration and
230  // stored in Cloud Storage (for example, `gs://bucket/inputs/file.mp4`).
231  // If empty, the value is populated from `Job.input_uri`. See
232  // [Supported input and output
233  // formats](https://cloud.google.com/transcoder/docs/concepts/supported-input-and-output-formats).
234  string uri = 2;
235
236  // Preprocessing configurations.
237  PreprocessingConfig preprocessing_config = 3;
238}
239
240// Location of output file(s) in a Cloud Storage bucket.
241message Output {
242  // URI for the output file(s). For example, `gs://my-bucket/outputs/`.
243  // If empty, the value is populated from `Job.output_uri`. See
244  // [Supported input and output
245  // formats](https://cloud.google.com/transcoder/docs/concepts/supported-input-and-output-formats).
246  string uri = 1;
247}
248
249// Edit atom.
250message EditAtom {
251  // A unique key for this atom. Must be specified when using advanced
252  // mapping.
253  string key = 1;
254
255  // List of `Input.key`s identifying files that should be used in this atom.
256  // The listed `inputs` must have the same timeline.
257  repeated string inputs = 2;
258
259  // End time in seconds for the atom, relative to the input file timeline.
260  // When `end_time_offset` is not specified, the `inputs` are used until
261  // the end of the atom.
262  google.protobuf.Duration end_time_offset = 3;
263
264  // Start time in seconds for the atom, relative to the input file timeline.
265  // The default is `0s`.
266  google.protobuf.Duration start_time_offset = 4;
267}
268
269// Ad break.
270message AdBreak {
271  // Start time in seconds for the ad break, relative to the output file
272  // timeline. The default is `0s`.
273  google.protobuf.Duration start_time_offset = 1;
274}
275
276// Encoding of an input file such as an audio, video, or text track.
277// Elementary streams must be packaged before
278// mapping and sharing between different output formats.
279message ElementaryStream {
280  // A unique key for this elementary stream.
281  string key = 4;
282
283  // Encoding of an audio, video, or text track.
284  oneof elementary_stream {
285    // Encoding of a video stream.
286    VideoStream video_stream = 1;
287
288    // Encoding of an audio stream.
289    AudioStream audio_stream = 2;
290
291    // Encoding of a text stream. For example, closed captions or subtitles.
292    TextStream text_stream = 3;
293  }
294}
295
296// Multiplexing settings for output stream.
297message MuxStream {
298  // A unique key for this multiplexed stream. HLS media manifests will be
299  // named `MuxStream.key` with the `.m3u8` extension suffix.
300  string key = 1;
301
302  // The name of the generated file. The default is `MuxStream.key` with the
303  // extension suffix corresponding to the `MuxStream.container`.
304  //
305  // Individual segments also have an incremental 10-digit zero-padded suffix
306  // starting from 0 before the extension, such as `mux_stream0000000123.ts`.
307  string file_name = 2;
308
309  // The container format. The default is `mp4`
310  //
311  // Supported container formats:
312  //
313  // - `ts`
314  // - `fmp4`- the corresponding file extension is `.m4s`
315  // - `mp4`
316  // - `vtt`
317  //
318  // See also:
319  // [Supported input and output
320  // formats](https://cloud.google.com/transcoder/docs/concepts/supported-input-and-output-formats)
321  string container = 3;
322
323  // List of `ElementaryStream.key`s multiplexed in this stream.
324  repeated string elementary_streams = 4;
325
326  // Segment settings for `ts`, `fmp4` and `vtt`.
327  SegmentSettings segment_settings = 5;
328
329  // Identifier of the encryption configuration to use. If omitted, output will
330  // be unencrypted.
331  string encryption_id = 7;
332}
333
334// Manifest configuration.
335message Manifest {
336  // The manifest type, which corresponds to the adaptive streaming format used.
337  enum ManifestType {
338    // The manifest type is not specified.
339    MANIFEST_TYPE_UNSPECIFIED = 0;
340
341    // Create an HLS manifest. The corresponding file extension is `.m3u8`.
342    HLS = 1;
343
344    // Create an MPEG-DASH manifest. The corresponding file extension is `.mpd`.
345    DASH = 2;
346  }
347
348  // `DASH` manifest configuration.
349  message DashConfig {
350    // The segment reference scheme for a `DASH` manifest.
351    enum SegmentReferenceScheme {
352      // The segment reference scheme is not specified.
353      SEGMENT_REFERENCE_SCHEME_UNSPECIFIED = 0;
354
355      // Lists the URLs of media files for each segment.
356      SEGMENT_LIST = 1;
357
358      // Lists each segment from a template with $Number$ variable.
359      SEGMENT_TEMPLATE_NUMBER = 2;
360    }
361
362    // The segment reference scheme for a `DASH` manifest. The default is
363    // `SEGMENT_LIST`.
364    SegmentReferenceScheme segment_reference_scheme = 1;
365  }
366
367  // The name of the generated file. The default is `manifest` with the
368  // extension suffix corresponding to the `Manifest.type`.
369  string file_name = 1;
370
371  // Required. Type of the manifest.
372  ManifestType type = 2 [(google.api.field_behavior) = REQUIRED];
373
374  // Required. List of user given `MuxStream.key`s that should appear in this
375  // manifest.
376  //
377  // When `Manifest.type` is `HLS`, a media manifest with name `MuxStream.key`
378  // and `.m3u8` extension is generated for each element of the
379  // `Manifest.mux_streams`.
380  repeated string mux_streams = 3 [(google.api.field_behavior) = REQUIRED];
381
382  // Specifies the manifest configuration.
383  oneof manifest_config {
384    // `DASH` manifest configuration.
385    DashConfig dash = 4;
386  }
387}
388
389// A Pub/Sub destination.
390message PubsubDestination {
391  // The name of the Pub/Sub topic to publish job completion notification
392  // to. For example: `projects/{project}/topics/{topic}`.
393  string topic = 1;
394}
395
396// Sprite sheet configuration.
397message SpriteSheet {
398  // Format type. The default is `jpeg`.
399  //
400  // Supported formats:
401  //
402  // - `jpeg`
403  string format = 1;
404
405  // Required. File name prefix for the generated sprite sheets.
406  //
407  // Each sprite sheet has an incremental 10-digit zero-padded suffix starting
408  // from 0 before the extension, such as `sprite_sheet0000000123.jpeg`.
409  string file_prefix = 2 [(google.api.field_behavior) = REQUIRED];
410
411  // Required. The width of sprite in pixels. Must be an even integer. To
412  // preserve the source aspect ratio, set the
413  // [SpriteSheet.sprite_width_pixels][google.cloud.video.transcoder.v1.SpriteSheet.sprite_width_pixels]
414  // field or the
415  // [SpriteSheet.sprite_height_pixels][google.cloud.video.transcoder.v1.SpriteSheet.sprite_height_pixels]
416  // field, but not both (the API will automatically calculate the missing
417  // field).
418  //
419  // For portrait videos that contain horizontal ASR and rotation metadata,
420  // provide the width, in pixels, per the horizontal ASR. The API calculates
421  // the height per the horizontal ASR. The API detects any rotation metadata
422  // and swaps the requested height and width for the output.
423  int32 sprite_width_pixels = 3 [(google.api.field_behavior) = REQUIRED];
424
425  // Required. The height of sprite in pixels. Must be an even integer. To
426  // preserve the source aspect ratio, set the
427  // [SpriteSheet.sprite_height_pixels][google.cloud.video.transcoder.v1.SpriteSheet.sprite_height_pixels]
428  // field or the
429  // [SpriteSheet.sprite_width_pixels][google.cloud.video.transcoder.v1.SpriteSheet.sprite_width_pixels]
430  // field, but not both (the API will automatically calculate the missing
431  // field).
432  //
433  // For portrait videos that contain horizontal ASR and rotation metadata,
434  // provide the height, in pixels, per the horizontal ASR. The API calculates
435  // the width per the horizontal ASR. The API detects any rotation metadata
436  // and swaps the requested height and width for the output.
437  int32 sprite_height_pixels = 4 [(google.api.field_behavior) = REQUIRED];
438
439  // The maximum number of sprites per row in a sprite sheet. The default is 0,
440  // which indicates no maximum limit.
441  int32 column_count = 5;
442
443  // The maximum number of rows per sprite sheet. When the sprite sheet is full,
444  // a new sprite sheet is created. The default is 0, which indicates no maximum
445  // limit.
446  int32 row_count = 6;
447
448  // Start time in seconds, relative to the output file timeline. Determines the
449  // first sprite to pick. The default is `0s`.
450  google.protobuf.Duration start_time_offset = 7;
451
452  // End time in seconds, relative to the output file timeline. When
453  // `end_time_offset` is not specified, the sprites are generated until the end
454  // of the output file.
455  google.protobuf.Duration end_time_offset = 8;
456
457  // Specify either total number of sprites or interval to create sprites.
458  oneof extraction_strategy {
459    // Total number of sprites. Create the specified number of sprites
460    // distributed evenly across the timeline of the output media. The default
461    // is 100.
462    int32 total_count = 9;
463
464    // Starting from `0s`, create sprites at regular intervals. Specify the
465    // interval value in seconds.
466    google.protobuf.Duration interval = 10;
467  }
468
469  // The quality of the generated sprite sheet. Enter a value between 1
470  // and 100, where 1 is the lowest quality and 100 is the highest quality.
471  // The default is 100. A high quality value corresponds to a low image data
472  // compression ratio.
473  int32 quality = 11;
474}
475
476// Overlay configuration.
477message Overlay {
478  // 2D normalized coordinates. Default: `{0.0, 0.0}`
479  message NormalizedCoordinate {
480    // Normalized x coordinate.
481    double x = 1;
482
483    // Normalized y coordinate.
484    double y = 2;
485  }
486
487  // Overlaid image.
488  message Image {
489    // Required. URI of the image in Cloud Storage. For example,
490    // `gs://bucket/inputs/image.png`. Only PNG and JPEG images are supported.
491    string uri = 1 [(google.api.field_behavior) = REQUIRED];
492
493    // Normalized image resolution, based on output video resolution. Valid
494    // values: `0.0`–`1.0`. To respect the original image aspect ratio, set
495    // either `x` or `y` to `0.0`. To use the original image resolution, set
496    // both `x` and `y` to `0.0`.
497    NormalizedCoordinate resolution = 2;
498
499    // Target image opacity. Valid values are from  `1.0` (solid, default) to
500    // `0.0` (transparent), exclusive. Set this to a value greater than `0.0`.
501    double alpha = 3;
502  }
503
504  // Display static overlay object.
505  message AnimationStatic {
506    // Normalized coordinates based on output video resolution. Valid
507    // values: `0.0`–`1.0`. `xy` is the upper-left coordinate of the overlay
508    // object. For example, use the x and y coordinates {0,0} to position the
509    // top-left corner of the overlay animation in the top-left corner of the
510    // output video.
511    NormalizedCoordinate xy = 1;
512
513    // The time to start displaying the overlay object, in seconds. Default: 0
514    google.protobuf.Duration start_time_offset = 2;
515  }
516
517  // Fade type for the overlay: `FADE_IN` or `FADE_OUT`.
518  enum FadeType {
519    // The fade type is not specified.
520    FADE_TYPE_UNSPECIFIED = 0;
521
522    // Fade the overlay object into view.
523    FADE_IN = 1;
524
525    // Fade the overlay object out of view.
526    FADE_OUT = 2;
527  }
528
529  // Display overlay object with fade animation.
530  message AnimationFade {
531    // Required. Type of fade animation: `FADE_IN` or `FADE_OUT`.
532    FadeType fade_type = 1 [(google.api.field_behavior) = REQUIRED];
533
534    // Normalized coordinates based on output video resolution. Valid
535    // values: `0.0`–`1.0`. `xy` is the upper-left coordinate of the overlay
536    // object. For example, use the x and y coordinates {0,0} to position the
537    // top-left corner of the overlay animation in the top-left corner of the
538    // output video.
539    NormalizedCoordinate xy = 2;
540
541    // The time to start the fade animation, in seconds. Default: 0
542    google.protobuf.Duration start_time_offset = 3;
543
544    // The time to end the fade animation, in seconds. Default:
545    // `start_time_offset` + 1s
546    google.protobuf.Duration end_time_offset = 4;
547  }
548
549  // End previous overlay animation from the video. Without AnimationEnd, the
550  // overlay object will keep the state of previous animation until the end of
551  // the video.
552  message AnimationEnd {
553    // The time to end overlay object, in seconds. Default: 0
554    google.protobuf.Duration start_time_offset = 1;
555  }
556
557  // Animation types.
558  message Animation {
559    // Animations can be static or fade, or they can end the previous animation.
560    oneof animation_type {
561      // Display static overlay object.
562      AnimationStatic animation_static = 1;
563
564      // Display overlay object with fade animation.
565      AnimationFade animation_fade = 2;
566
567      // End previous animation.
568      AnimationEnd animation_end = 3;
569    }
570  }
571
572  // Image overlay.
573  Image image = 1;
574
575  // List of Animations. The list should be chronological, without any time
576  // overlap.
577  repeated Animation animations = 2;
578}
579
580// Preprocessing configurations.
581message PreprocessingConfig {
582  // Color preprocessing configuration.
583  //
584  // **Note:** This configuration is not supported.
585  message Color {
586    // Control color saturation of the video. Enter a value between -1 and 1,
587    // where -1 is fully desaturated and 1 is maximum saturation. 0 is no
588    // change. The default is 0.
589    double saturation = 1;
590
591    // Control black and white contrast of the video. Enter a value between -1
592    // and 1, where -1 is minimum contrast and 1 is maximum contrast. 0 is no
593    // change. The default is 0.
594    double contrast = 2;
595
596    // Control brightness of the video. Enter a value between -1 and 1, where -1
597    // is minimum brightness and 1 is maximum brightness. 0 is no change. The
598    // default is 0.
599    double brightness = 3;
600  }
601
602  // Denoise preprocessing configuration.
603  //
604  // **Note:** This configuration is not supported.
605  message Denoise {
606    // Set strength of the denoise. Enter a value between 0 and 1. The higher
607    // the value, the smoother the image. 0 is no denoising. The default is 0.
608    double strength = 1;
609
610    // Set the denoiser mode. The default is `standard`.
611    //
612    // Supported denoiser modes:
613    //
614    // - `standard`
615    // - `grain`
616    string tune = 2;
617  }
618
619  // Deblock preprocessing configuration.
620  //
621  // **Note:** This configuration is not supported.
622  message Deblock {
623    // Set strength of the deblocker. Enter a value between 0 and 1. The higher
624    // the value, the stronger the block removal. 0 is no deblocking. The
625    // default is 0.
626    double strength = 1;
627
628    // Enable deblocker. The default is `false`.
629    bool enabled = 2;
630  }
631
632  // Audio preprocessing configuration.
633  message Audio {
634    // Specify audio loudness normalization in loudness units relative to full
635    // scale (LUFS). Enter a value between -24 and 0 (the default), where:
636    //
637    // *   -24 is the Advanced Television Systems Committee (ATSC A/85) standard
638    // *   -23 is the EU R128 broadcast standard
639    // *   -19 is the prior standard for online mono audio
640    // *   -18 is the ReplayGain standard
641    // *   -16 is the prior standard for stereo audio
642    // *   -14 is the new online audio standard recommended by Spotify, as well
643    //     as Amazon Echo
644    // *   0 disables normalization
645    double lufs = 1;
646
647    // Enable boosting high frequency components. The default is `false`.
648    //
649    // **Note:** This field is not supported.
650    bool high_boost = 2;
651
652    // Enable boosting low frequency components. The default is `false`.
653    //
654    // **Note:** This field is not supported.
655    bool low_boost = 3;
656  }
657
658  // Video cropping configuration for the input video. The cropped input video
659  // is scaled to match the output resolution.
660  message Crop {
661    // The number of pixels to crop from the top. The default is 0.
662    int32 top_pixels = 1;
663
664    // The number of pixels to crop from the bottom. The default is 0.
665    int32 bottom_pixels = 2;
666
667    // The number of pixels to crop from the left. The default is 0.
668    int32 left_pixels = 3;
669
670    // The number of pixels to crop from the right. The default is 0.
671    int32 right_pixels = 4;
672  }
673
674  // Pad filter configuration for the input video. The padded input video
675  // is scaled after padding with black to match the output resolution.
676  message Pad {
677    // The number of pixels to add to the top. The default is 0.
678    int32 top_pixels = 1;
679
680    // The number of pixels to add to the bottom. The default is 0.
681    int32 bottom_pixels = 2;
682
683    // The number of pixels to add to the left. The default is 0.
684    int32 left_pixels = 3;
685
686    // The number of pixels to add to the right. The default is 0.
687    int32 right_pixels = 4;
688  }
689
690  // Deinterlace configuration for input video.
691  message Deinterlace {
692    // Yet Another Deinterlacing Filter Configuration.
693    message YadifConfig {
694      // Specifies the deinterlacing mode to adopt.
695      // The default is `send_frame`.
696      // Supported values:
697      //
698      // - `send_frame`: Output one frame for each frame
699      // - `send_field`: Output one frame for each field
700      string mode = 1;
701
702      // Disable spacial interlacing.
703      // The default is `false`.
704      bool disable_spatial_interlacing = 2;
705
706      // The picture field parity assumed for the input interlaced video.
707      // The default is `auto`.
708      // Supported values:
709      //
710      // - `tff`: Assume the top field is first
711      // - `bff`: Assume the bottom field is first
712      // - `auto`: Enable automatic detection of field parity
713      string parity = 3;
714
715      // Deinterlace all frames rather than just the frames identified as
716      // interlaced. The default is `false`.
717      bool deinterlace_all_frames = 4;
718    }
719
720    // Bob Weaver Deinterlacing Filter Configuration.
721    message BwdifConfig {
722      // Specifies the deinterlacing mode to adopt.
723      // The default is `send_frame`.
724      // Supported values:
725      //
726      // - `send_frame`: Output one frame for each frame
727      // - `send_field`: Output one frame for each field
728      string mode = 1;
729
730      // The picture field parity assumed for the input interlaced video.
731      // The default is `auto`.
732      // Supported values:
733      //
734      // - `tff`: Assume the top field is first
735      // - `bff`: Assume the bottom field is first
736      // - `auto`: Enable automatic detection of field parity
737      string parity = 2;
738
739      // Deinterlace all frames rather than just the frames identified as
740      // interlaced. The default is `false`.
741      bool deinterlace_all_frames = 3;
742    }
743
744    // Specify the video deinterlacing filter. The default is `yadif`.
745    oneof deinterlacing_filter {
746      // Specifies the Yet Another Deinterlacing Filter Configuration.
747      YadifConfig yadif = 1;
748
749      // Specifies the Bob Weaver Deinterlacing Filter Configuration.
750      BwdifConfig bwdif = 2;
751    }
752  }
753
754  // Color preprocessing configuration.
755  Color color = 1;
756
757  // Denoise preprocessing configuration.
758  Denoise denoise = 2;
759
760  // Deblock preprocessing configuration.
761  Deblock deblock = 3;
762
763  // Audio preprocessing configuration.
764  Audio audio = 4;
765
766  // Specify the video cropping configuration.
767  Crop crop = 5;
768
769  // Specify the video pad filter configuration.
770  Pad pad = 6;
771
772  // Specify the video deinterlace configuration.
773  Deinterlace deinterlace = 7;
774}
775
776// Video stream resource.
777message VideoStream {
778  // H264 codec settings.
779  message H264CodecSettings {
780    // The width of the video in pixels. Must be an even integer.
781    // When not specified, the width is adjusted to match the specified height
782    // and input aspect ratio. If both are omitted, the input width is used.
783    //
784    // For portrait videos that contain horizontal ASR and rotation metadata,
785    // provide the width, in pixels, per the horizontal ASR. The API calculates
786    // the height per the horizontal ASR. The API detects any rotation metadata
787    // and swaps the requested height and width for the output.
788    int32 width_pixels = 1;
789
790    // The height of the video in pixels. Must be an even integer.
791    // When not specified, the height is adjusted to match the specified width
792    // and input aspect ratio. If both are omitted, the input height is used.
793    //
794    // For portrait videos that contain horizontal ASR and rotation metadata,
795    // provide the height, in pixels, per the horizontal ASR. The API calculates
796    // the width per the horizontal ASR. The API detects any rotation metadata
797    // and swaps the requested height and width for the output.
798    int32 height_pixels = 2;
799
800    // Required. The target video frame rate in frames per second (FPS). Must be
801    // less than or equal to 120. Will default to the input frame rate if larger
802    // than the input frame rate. The API will generate an output FPS that is
803    // divisible by the input FPS, and smaller or equal to the target FPS. See
804    // [Calculating frame
805    // rate](https://cloud.google.com/transcoder/docs/concepts/frame-rate) for
806    // more information.
807    double frame_rate = 3 [(google.api.field_behavior) = REQUIRED];
808
809    // Required. The video bitrate in bits per second. The minimum value is
810    // 1,000. The maximum value is 800,000,000.
811    int32 bitrate_bps = 4 [(google.api.field_behavior) = REQUIRED];
812
813    // Pixel format to use. The default is `yuv420p`.
814    //
815    // Supported pixel formats:
816    //
817    // - `yuv420p` pixel format
818    // - `yuv422p` pixel format
819    // - `yuv444p` pixel format
820    // - `yuv420p10` 10-bit HDR pixel format
821    // - `yuv422p10` 10-bit HDR pixel format
822    // - `yuv444p10` 10-bit HDR pixel format
823    // - `yuv420p12` 12-bit HDR pixel format
824    // - `yuv422p12` 12-bit HDR pixel format
825    // - `yuv444p12` 12-bit HDR pixel format
826    string pixel_format = 5;
827
828    // Specify the `rate_control_mode`. The default is `vbr`.
829    //
830    // Supported rate control modes:
831    //
832    // - `vbr` - variable bitrate
833    // - `crf` - constant rate factor
834    string rate_control_mode = 6;
835
836    // Target CRF level. Must be between 10 and 36, where 10 is the highest
837    // quality and 36 is the most efficient compression. The default is 21.
838    int32 crf_level = 7;
839
840    // Specifies whether an open Group of Pictures (GOP) structure should be
841    // allowed or not. The default is `false`.
842    bool allow_open_gop = 8;
843
844    // GOP mode can be either by frame count or duration.
845    oneof gop_mode {
846      // Select the GOP size based on the specified frame count. Must be greater
847      // than zero.
848      int32 gop_frame_count = 9;
849
850      // Select the GOP size based on the specified duration. The default is
851      // `3s`. Note that `gopDuration` must be less than or equal to
852      // [`segmentDuration`](#SegmentSettings), and
853      // [`segmentDuration`](#SegmentSettings) must be divisible by
854      // `gopDuration`.
855      google.protobuf.Duration gop_duration = 10;
856    }
857
858    // Use two-pass encoding strategy to achieve better video quality.
859    // `VideoStream.rate_control_mode` must be `vbr`. The default is `false`.
860    bool enable_two_pass = 11;
861
862    // Size of the Video Buffering Verifier (VBV) buffer in bits. Must be
863    // greater than zero. The default is equal to `VideoStream.bitrate_bps`.
864    int32 vbv_size_bits = 12;
865
866    // Initial fullness of the Video Buffering Verifier (VBV) buffer in bits.
867    // Must be greater than zero. The default is equal to 90% of
868    // `VideoStream.vbv_size_bits`.
869    int32 vbv_fullness_bits = 13;
870
871    // The entropy coder to use. The default is `cabac`.
872    //
873    // Supported entropy coders:
874    //
875    // - `cavlc`
876    // - `cabac`
877    string entropy_coder = 14;
878
879    // Allow B-pyramid for reference frame selection. This may not be supported
880    // on all decoders. The default is `false`.
881    bool b_pyramid = 15;
882
883    // The number of consecutive B-frames. Must be greater than or equal to
884    // zero. Must be less than `VideoStream.gop_frame_count` if set. The default
885    // is 0.
886    int32 b_frame_count = 16;
887
888    // Specify the intensity of the adaptive quantizer (AQ). Must be between 0
889    // and 1, where 0 disables the quantizer and 1 maximizes the quantizer. A
890    // higher value equals a lower bitrate but smoother image. The default is 0.
891    double aq_strength = 17;
892
893    // Enforces the specified codec profile. The following profiles are
894    // supported:
895    //
896    // *   `baseline`
897    // *   `main`
898    // *   `high` (default)
899    //
900    // The available options are
901    // [FFmpeg-compatible](https://trac.ffmpeg.org/wiki/Encode/H.264#Tune).
902    // Note that certain values for this field may cause the
903    // transcoder to override other fields you set in the `H264CodecSettings`
904    // message.
905    string profile = 18;
906
907    // Enforces the specified codec tune. The available options are
908    // [FFmpeg-compatible](https://trac.ffmpeg.org/wiki/Encode/H.264#Tune).
909    // Note that certain values for this field may cause the
910    // transcoder to override other fields you set in the `H264CodecSettings`
911    // message.
912    string tune = 19;
913
914    // Enforces the specified codec preset. The default is `veryfast`. The
915    // available options are
916    // [FFmpeg-compatible](https://trac.ffmpeg.org/wiki/Encode/H.264#Preset).
917    // Note that certain values for this field may cause the
918    // transcoder to override other fields you set in the `H264CodecSettings`
919    // message.
920    string preset = 20;
921  }
922
923  // H265 codec settings.
924  message H265CodecSettings {
925    // The width of the video in pixels. Must be an even integer.
926    // When not specified, the width is adjusted to match the specified height
927    // and input aspect ratio. If both are omitted, the input width is used.
928    //
929    // For portrait videos that contain horizontal ASR and rotation metadata,
930    // provide the width, in pixels, per the horizontal ASR. The API calculates
931    // the height per the horizontal ASR. The API detects any rotation metadata
932    // and swaps the requested height and width for the output.
933    int32 width_pixels = 1;
934
935    // The height of the video in pixels. Must be an even integer.
936    // When not specified, the height is adjusted to match the specified width
937    // and input aspect ratio. If both are omitted, the input height is used.
938    //
939    // For portrait videos that contain horizontal ASR and rotation metadata,
940    // provide the height, in pixels, per the horizontal ASR. The API calculates
941    // the width per the horizontal ASR. The API detects any rotation metadata
942    // and swaps the requested height and width for the output.
943    int32 height_pixels = 2;
944
945    // Required. The target video frame rate in frames per second (FPS). Must be
946    // less than or equal to 120. Will default to the input frame rate if larger
947    // than the input frame rate. The API will generate an output FPS that is
948    // divisible by the input FPS, and smaller or equal to the target FPS. See
949    // [Calculating frame
950    // rate](https://cloud.google.com/transcoder/docs/concepts/frame-rate) for
951    // more information.
952    double frame_rate = 3 [(google.api.field_behavior) = REQUIRED];
953
954    // Required. The video bitrate in bits per second. The minimum value is
955    // 1,000. The maximum value is 800,000,000.
956    int32 bitrate_bps = 4 [(google.api.field_behavior) = REQUIRED];
957
958    // Pixel format to use. The default is `yuv420p`.
959    //
960    // Supported pixel formats:
961    //
962    // - `yuv420p` pixel format
963    // - `yuv422p` pixel format
964    // - `yuv444p` pixel format
965    // - `yuv420p10` 10-bit HDR pixel format
966    // - `yuv422p10` 10-bit HDR pixel format
967    // - `yuv444p10` 10-bit HDR pixel format
968    // - `yuv420p12` 12-bit HDR pixel format
969    // - `yuv422p12` 12-bit HDR pixel format
970    // - `yuv444p12` 12-bit HDR pixel format
971    string pixel_format = 5;
972
973    // Specify the `rate_control_mode`. The default is `vbr`.
974    //
975    // Supported rate control modes:
976    //
977    // - `vbr` - variable bitrate
978    // - `crf` - constant rate factor
979    string rate_control_mode = 6;
980
981    // Target CRF level. Must be between 10 and 36, where 10 is the highest
982    // quality and 36 is the most efficient compression. The default is 21.
983    int32 crf_level = 7;
984
985    // Specifies whether an open Group of Pictures (GOP) structure should be
986    // allowed or not. The default is `false`.
987    bool allow_open_gop = 8;
988
989    // GOP mode can be either by frame count or duration.
990    oneof gop_mode {
991      // Select the GOP size based on the specified frame count. Must be greater
992      // than zero.
993      int32 gop_frame_count = 9;
994
995      // Select the GOP size based on the specified duration. The default is
996      // `3s`. Note that `gopDuration` must be less than or equal to
997      // [`segmentDuration`](#SegmentSettings), and
998      // [`segmentDuration`](#SegmentSettings) must be divisible by
999      // `gopDuration`.
1000      google.protobuf.Duration gop_duration = 10;
1001    }
1002
1003    // Use two-pass encoding strategy to achieve better video quality.
1004    // `VideoStream.rate_control_mode` must be `vbr`. The default is `false`.
1005    bool enable_two_pass = 11;
1006
1007    // Size of the Video Buffering Verifier (VBV) buffer in bits. Must be
1008    // greater than zero. The default is equal to `VideoStream.bitrate_bps`.
1009    int32 vbv_size_bits = 12;
1010
1011    // Initial fullness of the Video Buffering Verifier (VBV) buffer in bits.
1012    // Must be greater than zero. The default is equal to 90% of
1013    // `VideoStream.vbv_size_bits`.
1014    int32 vbv_fullness_bits = 13;
1015
1016    // Allow B-pyramid for reference frame selection. This may not be supported
1017    // on all decoders. The default is `false`.
1018    bool b_pyramid = 14;
1019
1020    // The number of consecutive B-frames. Must be greater than or equal to
1021    // zero. Must be less than `VideoStream.gop_frame_count` if set. The default
1022    // is 0.
1023    int32 b_frame_count = 15;
1024
1025    // Specify the intensity of the adaptive quantizer (AQ). Must be between 0
1026    // and 1, where 0 disables the quantizer and 1 maximizes the quantizer. A
1027    // higher value equals a lower bitrate but smoother image. The default is 0.
1028    double aq_strength = 16;
1029
1030    // Enforces the specified codec profile. The following profiles are
1031    // supported:
1032    //
1033    // *   8-bit profiles
1034    //     *   `main` (default)
1035    //     *   `main-intra`
1036    //     *   `mainstillpicture`
1037    // *   10-bit profiles
1038    //     *   `main10` (default)
1039    //     *   `main10-intra`
1040    //     *   `main422-10`
1041    //     *   `main422-10-intra`
1042    //     *   `main444-10`
1043    //     *   `main444-10-intra`
1044    // *   12-bit profiles
1045    //     *   `main12` (default)
1046    //     *   `main12-intra`
1047    //     *   `main422-12`
1048    //     *   `main422-12-intra`
1049    //     *   `main444-12`
1050    //     *   `main444-12-intra`
1051    //
1052    // The available options are
1053    // [FFmpeg-compatible](https://x265.readthedocs.io/).
1054    // Note that certain values for this field may cause the
1055    // transcoder to override other fields you set in the `H265CodecSettings`
1056    // message.
1057    string profile = 17;
1058
1059    // Enforces the specified codec tune. The available options are
1060    // [FFmpeg-compatible](https://trac.ffmpeg.org/wiki/Encode/H.265).
1061    // Note that certain values for this field may cause the
1062    // transcoder to override other fields you set in the `H265CodecSettings`
1063    // message.
1064    string tune = 18;
1065
1066    // Enforces the specified codec preset. The default is `veryfast`. The
1067    // available options are
1068    // [FFmpeg-compatible](https://trac.ffmpeg.org/wiki/Encode/H.265).
1069    // Note that certain values for this field may cause the
1070    // transcoder to override other fields you set in the `H265CodecSettings`
1071    // message.
1072    string preset = 19;
1073  }
1074
1075  // VP9 codec settings.
1076  message Vp9CodecSettings {
1077    // The width of the video in pixels. Must be an even integer.
1078    // When not specified, the width is adjusted to match the specified height
1079    // and input aspect ratio. If both are omitted, the input width is used.
1080    //
1081    // For portrait videos that contain horizontal ASR and rotation metadata,
1082    // provide the width, in pixels, per the horizontal ASR. The API calculates
1083    // the height per the horizontal ASR. The API detects any rotation metadata
1084    // and swaps the requested height and width for the output.
1085    int32 width_pixels = 1;
1086
1087    // The height of the video in pixels. Must be an even integer.
1088    // When not specified, the height is adjusted to match the specified width
1089    // and input aspect ratio. If both are omitted, the input height is used.
1090    //
1091    // For portrait videos that contain horizontal ASR and rotation metadata,
1092    // provide the height, in pixels, per the horizontal ASR. The API calculates
1093    // the width per the horizontal ASR. The API detects any rotation metadata
1094    // and swaps the requested height and width for the output.
1095    int32 height_pixels = 2;
1096
1097    // Required. The target video frame rate in frames per second (FPS). Must be
1098    // less than or equal to 120. Will default to the input frame rate if larger
1099    // than the input frame rate. The API will generate an output FPS that is
1100    // divisible by the input FPS, and smaller or equal to the target FPS. See
1101    // [Calculating frame
1102    // rate](https://cloud.google.com/transcoder/docs/concepts/frame-rate) for
1103    // more information.
1104    double frame_rate = 3 [(google.api.field_behavior) = REQUIRED];
1105
1106    // Required. The video bitrate in bits per second. The minimum value is
1107    // 1,000. The maximum value is 480,000,000.
1108    int32 bitrate_bps = 4 [(google.api.field_behavior) = REQUIRED];
1109
1110    // Pixel format to use. The default is `yuv420p`.
1111    //
1112    // Supported pixel formats:
1113    //
1114    // - `yuv420p` pixel format
1115    // - `yuv422p` pixel format
1116    // - `yuv444p` pixel format
1117    // - `yuv420p10` 10-bit HDR pixel format
1118    // - `yuv422p10` 10-bit HDR pixel format
1119    // - `yuv444p10` 10-bit HDR pixel format
1120    // - `yuv420p12` 12-bit HDR pixel format
1121    // - `yuv422p12` 12-bit HDR pixel format
1122    // - `yuv444p12` 12-bit HDR pixel format
1123    string pixel_format = 5;
1124
1125    // Specify the `rate_control_mode`. The default is `vbr`.
1126    //
1127    // Supported rate control modes:
1128    //
1129    // - `vbr` - variable bitrate
1130    string rate_control_mode = 6;
1131
1132    // Target CRF level. Must be between 10 and 36, where 10 is the highest
1133    // quality and 36 is the most efficient compression. The default is 21.
1134    //
1135    // **Note:** This field is not supported.
1136    int32 crf_level = 7;
1137
1138    // GOP mode can be either by frame count or duration.
1139    oneof gop_mode {
1140      // Select the GOP size based on the specified frame count. Must be greater
1141      // than zero.
1142      int32 gop_frame_count = 8;
1143
1144      // Select the GOP size based on the specified duration. The default is
1145      // `3s`. Note that `gopDuration` must be less than or equal to
1146      // [`segmentDuration`](#SegmentSettings), and
1147      // [`segmentDuration`](#SegmentSettings) must be divisible by
1148      // `gopDuration`.
1149      google.protobuf.Duration gop_duration = 9;
1150    }
1151
1152    // Enforces the specified codec profile. The following profiles are
1153    // supported:
1154    //
1155    // *   `profile0` (default)
1156    // *   `profile1`
1157    // *   `profile2`
1158    // *   `profile3`
1159    //
1160    // The available options are
1161    // [WebM-compatible](https://www.webmproject.org/vp9/profiles/).
1162    // Note that certain values for this field may cause the
1163    // transcoder to override other fields you set in the `Vp9CodecSettings`
1164    // message.
1165    string profile = 10;
1166  }
1167
1168  // Codec settings can be h264, h265, or vp9.
1169  oneof codec_settings {
1170    // H264 codec settings.
1171    H264CodecSettings h264 = 1;
1172
1173    // H265 codec settings.
1174    H265CodecSettings h265 = 2;
1175
1176    // VP9 codec settings.
1177    Vp9CodecSettings vp9 = 3;
1178  }
1179}
1180
1181// Audio stream resource.
1182message AudioStream {
1183  // The mapping for the `Job.edit_list` atoms with audio `EditAtom.inputs`.
1184  message AudioMapping {
1185    // Required. The `EditAtom.key` that references the atom with audio inputs
1186    // in the `Job.edit_list`.
1187    string atom_key = 1 [(google.api.field_behavior) = REQUIRED];
1188
1189    // Required. The `Input.key` that identifies the input file.
1190    string input_key = 2 [(google.api.field_behavior) = REQUIRED];
1191
1192    // Required. The zero-based index of the track in the input file.
1193    int32 input_track = 3 [(google.api.field_behavior) = REQUIRED];
1194
1195    // Required. The zero-based index of the channel in the input audio stream.
1196    int32 input_channel = 4 [(google.api.field_behavior) = REQUIRED];
1197
1198    // Required. The zero-based index of the channel in the output audio stream.
1199    int32 output_channel = 5 [(google.api.field_behavior) = REQUIRED];
1200
1201    // Audio volume control in dB. Negative values decrease volume,
1202    // positive values increase. The default is 0.
1203    double gain_db = 6;
1204  }
1205
1206  // The codec for this audio stream. The default is `aac`.
1207  //
1208  // Supported audio codecs:
1209  //
1210  // - `aac`
1211  // - `aac-he`
1212  // - `aac-he-v2`
1213  // - `mp3`
1214  // - `ac3`
1215  // - `eac3`
1216  string codec = 1;
1217
1218  // Required. Audio bitrate in bits per second. Must be between 1 and
1219  // 10,000,000.
1220  int32 bitrate_bps = 2 [(google.api.field_behavior) = REQUIRED];
1221
1222  // Number of audio channels. Must be between 1 and 6. The default is 2.
1223  int32 channel_count = 3;
1224
1225  // A list of channel names specifying layout of the audio channels.
1226  // This only affects the metadata embedded in the container headers, if
1227  // supported by the specified format. The default is `["fl", "fr"]`.
1228  //
1229  // Supported channel names:
1230  //
1231  // - `fl` - Front left channel
1232  // - `fr` - Front right channel
1233  // - `sl` - Side left channel
1234  // - `sr` - Side right channel
1235  // - `fc` - Front center channel
1236  // - `lfe` - Low frequency
1237  repeated string channel_layout = 4;
1238
1239  // The mapping for the `Job.edit_list` atoms with audio `EditAtom.inputs`.
1240  repeated AudioMapping mapping = 5;
1241
1242  // The audio sample rate in Hertz. The default is 48000 Hertz.
1243  int32 sample_rate_hertz = 6;
1244
1245  // The BCP-47 language code, such as `en-US` or `sr-Latn`. For more
1246  // information, see
1247  // https://www.unicode.org/reports/tr35/#Unicode_locale_identifier. Not
1248  // supported in MP4 files.
1249  string language_code = 7;
1250
1251  // The name for this particular audio stream that
1252  // will be added to the HLS/DASH manifest. Not supported in MP4 files.
1253  string display_name = 8;
1254}
1255
1256// Encoding of a text stream. For example, closed captions or subtitles.
1257message TextStream {
1258  // The mapping for the `Job.edit_list` atoms with text `EditAtom.inputs`.
1259  message TextMapping {
1260    // Required. The `EditAtom.key` that references atom with text inputs in the
1261    // `Job.edit_list`.
1262    string atom_key = 1 [(google.api.field_behavior) = REQUIRED];
1263
1264    // Required. The `Input.key` that identifies the input file.
1265    string input_key = 2 [(google.api.field_behavior) = REQUIRED];
1266
1267    // Required. The zero-based index of the track in the input file.
1268    int32 input_track = 3 [(google.api.field_behavior) = REQUIRED];
1269  }
1270
1271  // The codec for this text stream. The default is `webvtt`.
1272  //
1273  // Supported text codecs:
1274  //
1275  // - `srt`
1276  // - `ttml`
1277  // - `cea608`
1278  // - `cea708`
1279  // - `webvtt`
1280  string codec = 1;
1281
1282  // The BCP-47 language code, such as `en-US` or `sr-Latn`. For more
1283  // information, see
1284  // https://www.unicode.org/reports/tr35/#Unicode_locale_identifier. Not
1285  // supported in MP4 files.
1286  string language_code = 2;
1287
1288  // The mapping for the `Job.edit_list` atoms with text `EditAtom.inputs`.
1289  repeated TextMapping mapping = 3;
1290
1291  // The name for this particular text stream that
1292  // will be added to the HLS/DASH manifest. Not supported in MP4 files.
1293  string display_name = 4;
1294}
1295
1296// Segment settings for `ts`, `fmp4` and `vtt`.
1297message SegmentSettings {
1298  // Duration of the segments in seconds. The default is `6.0s`. Note that
1299  // `segmentDuration` must be greater than or equal to
1300  // [`gopDuration`](#videostream), and `segmentDuration` must be divisible by
1301  // [`gopDuration`](#videostream).
1302  google.protobuf.Duration segment_duration = 1;
1303
1304  // Required. Create an individual segment file. The default is `false`.
1305  bool individual_segments = 3 [(google.api.field_behavior) = REQUIRED];
1306}
1307
1308// Encryption settings.
1309message Encryption {
1310  // Configuration for AES-128 encryption.
1311  message Aes128Encryption {}
1312
1313  // Configuration for SAMPLE-AES encryption.
1314  message SampleAesEncryption {}
1315
1316  // Configuration for MPEG Common Encryption (MPEG-CENC).
1317  message MpegCommonEncryption {
1318    // Required. Specify the encryption scheme.
1319    //
1320    // Supported encryption schemes:
1321    //
1322    // - `cenc`
1323    // - `cbcs`
1324    string scheme = 2 [(google.api.field_behavior) = REQUIRED];
1325  }
1326
1327  // Configuration for secrets stored in Google Secret Manager.
1328  message SecretManagerSource {
1329    // Required. The name of the Secret Version containing the encryption key in
1330    // the following format:
1331    // `projects/{project}/secrets/{secret_id}/versions/{version_number}`
1332    //
1333    // Note that only numbered versions are supported. Aliases like "latest" are
1334    // not supported.
1335    string secret_version = 1 [(google.api.field_behavior) = REQUIRED];
1336  }
1337
1338  // Widevine configuration.
1339  message Widevine {}
1340
1341  // Fairplay configuration.
1342  message Fairplay {}
1343
1344  // Playready configuration.
1345  message Playready {}
1346
1347  // Clearkey configuration.
1348  message Clearkey {}
1349
1350  // Defines configuration for DRM systems in use.
1351  message DrmSystems {
1352    // Widevine configuration.
1353    Widevine widevine = 1;
1354
1355    // Fairplay configuration.
1356    Fairplay fairplay = 2;
1357
1358    // Playready configuration.
1359    Playready playready = 3;
1360
1361    // Clearkey configuration.
1362    Clearkey clearkey = 4;
1363  }
1364
1365  // Required. Identifier for this set of encryption options.
1366  string id = 6 [(google.api.field_behavior) = REQUIRED];
1367
1368  // Encryption mode can be either `aes` or `cenc`.
1369  oneof encryption_mode {
1370    // Configuration for AES-128 encryption.
1371    Aes128Encryption aes_128 = 3;
1372
1373    // Configuration for SAMPLE-AES encryption.
1374    SampleAesEncryption sample_aes = 4;
1375
1376    // Configuration for MPEG Common Encryption (MPEG-CENC).
1377    MpegCommonEncryption mpeg_cenc = 5;
1378  }
1379
1380  // Defines where content keys are stored.
1381  oneof secret_source {
1382    // Keys are stored in Google Secret Manager.
1383    SecretManagerSource secret_manager_key_source = 7;
1384  }
1385
1386  // Required. DRM system(s) to use; at least one must be specified. If a
1387  // DRM system is omitted, it is considered disabled.
1388  DrmSystems drm_systems = 8 [(google.api.field_behavior) = REQUIRED];
1389}
1390