xref: /aosp_15_r20/external/openscreen/cast/cast_core/api/runtime/cast_audio_channel_service.proto (revision 3f982cf4871df8771c9d4abe6e9a6f8d829b2736)
1// Copyright 2021 The Chromium Authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file.
4//
5// **** DO NOT EDIT - this file was automatically generated. ****
6syntax = "proto3";
7
8package cast.media;
9
10import "google/protobuf/duration.proto";
11
12option optimize_for = LITE_RUNTIME;
13
14// Cast audio service hosted by Cast Core.
15//
16// It defines a state machine with the following states:
17// - Uninitialized
18// - Playing
19// - Stopped
20// - Paused
21//
22// Note that the received ordering between different RPC calls is not
23// guaranteed to match the sent order.
24service CastAudioChannelService {
25  // Initializes the service and places the pipeline into the 'Stopped' state.
26  // This must be the first call received by the server, and no other calls
27  // may be sent prior to receiving this call's response.
28  rpc Initialize(InitializeRequest) returns (InitializeResponse);
29
30  // Returns the minimum buffering delay (min_delay) required by Cast.  This is
31  // a constant value and only needs to be queried once for each service.
32  // During a StartRequest or ResumeRequest, the system timestamp must be
33  // greater than this delay and the current time in order for the buffer to be
34  // successfully rendered on remote devices.
35  rpc GetMinimumBufferDelay(GetMinimumBufferDelayRequest)
36      returns (GetMinimumBufferDelayResponse);
37
38  // Update the pipeline state.
39  //
40  // StartRequest:
41  //   Places pipeline into 'Playing' state. Playback will start at the
42  //   specified buffer and system timestamp.
43  //
44  //   May only be called in the 'Stopped' state, and following this call the
45  //   state machine will be in the 'Playing' state.
46  //
47  // StopRequest
48  //   Stops media playback and drops all pushed buffers which have not yet been
49  //   played.
50  //
51  //   May only be called in the 'Playing' or 'Paused' states, and following
52  //   this call the state machine will be in the 'Stopped' state.
53  //
54  // PauseRequest
55  //   Pauses media playback.
56  //
57  //   May only be called in the 'Playing' state, and following this call the
58  //   state machine will be in the 'Paused' state.
59  //
60  // ResumeRequest
61  //   Resumes media playback at the specified buffer and system timestamp.
62  //
63  //   May only be called in the 'Paused' state, and following this call the
64  //   state machine will be in the 'Playing'' state.
65  //
66  // TimestampUpdateRequest
67  //   Sends a timestamp update for a specified buffer for audio
68  //   synchronization. This should be called when operating in
69  //   CAST_AUDIO_DECODER_MODE_MULTIROOM_ONLY when the runtime has detected a
70  //   discrepancy in the system clock or pipeline delay from the original
71  //   playback schedule.  See example below:
72  //
73  //   Assume all buffers have duration of 100us.
74  //
75  //   StartRequest(id=1, system_timestamp=0);
76  //   -> Cast expects id=1 to play at 0, id=2 at 100us, id=3 at 200 us...
77  //
78  //   TimestampUpdateRequest(id=4, system_timestamp=405us);
79  //   -> Cast expects id=4 to play at 405, id=5 at 505us, id=6 at 605 us...
80  //
81  //   May be called from any state.
82  //
83  // A state transition may only occur after a successful PushBuffer()
84  // call has been made with a valid configuration.
85  rpc StateChange(StateChangeRequest) returns (StateChangeResponse);
86
87  // Sets the volume multiplier for this audio stream.
88  // The multiplier is in the range [0.0, 1.0].  If not called, a default
89  // multiplier of 1.0 is assumed.
90  //
91  // May be called in any state, and following this call the state machine
92  // will be in the same state.
93  rpc SetVolume(SetVolumeRequest) returns (SetVolumeResponse);
94
95  // Sets the playback rate for this audio stream.
96  //
97  // May be called in any state, and following this call the state machine
98  // will be in the same state.
99  rpc SetPlaybackRate(SetPlaybackRateRequest) returns (SetPlaybackRateResponse);
100
101  // Sends decoded bits and responses to the audio service. The client must
102  // wait for a response from the server before sending another
103  // PushBufferRequest.
104  //
105  // May only be called in the 'Playing' or 'Paused' states, and following
106  // this call the state machine will remain the same state.
107  //
108  rpc PushBuffer(PushBufferRequest) returns (PushBufferResponse);
109
110  // Returns the current media time that has been rendered.
111  rpc GetMediaTime(GetMediaTimeRequest) returns (GetMediaTimeResponse);
112}
113
114message InitializeRequest {
115  // Cast session ID.
116  string cast_session_id = 1;
117
118  // Configures how the server should operate.
119  CastAudioDecoderMode mode = 2;
120}
121
122message InitializeResponse {}
123
124message GetMinimumBufferDelayRequest {}
125
126message GetMinimumBufferDelayResponse {
127  // The minimum buffering delay in microseconds.
128  int64 delay_micros = 1;
129}
130
131message StateChangeRequest {
132  oneof request {
133    StartRequest start = 1;
134    StopRequest stop = 2;
135    PauseRequest pause = 3;
136    ResumeRequest resume = 4;
137    TimestampUpdateRequest timestamp_update = 5;
138  }
139}
140
141message StateChangeResponse {
142  // Pipeline state after state change.
143  PipelineState state = 1;
144}
145
146message SetVolumeRequest {
147  // The multiplier is in the range [0.0, 1.0].
148  float multiplier = 1;
149}
150
151message SetVolumeResponse {}
152
153message SetPlaybackRateRequest {
154  // Playback rate greater than 0.
155  double rate = 1;
156}
157
158message SetPlaybackRateResponse {}
159
160message PushBufferRequest {
161  AudioDecoderBuffer buffer = 1;
162
163  // Audio configuration for this buffer and all subsequent buffers. This
164  // field must be populated for the first request or if there is an audio
165  // configuration change.
166  AudioConfiguration audio_config = 2;
167}
168
169message PushBufferResponse {
170  // The total number of  decoded bytes.
171  int64 decoded_bytes = 1;
172}
173
174message GetMediaTimeRequest {}
175
176message GetMediaTimeResponse {
177  // The current media time that has been rendered.
178  MediaTime media_time = 1;
179}
180
181enum PipelineState {
182  PIPELINE_STATE_UNINITIALIZED = 0;
183  PIPELINE_STATE_STOPPED = 1;
184  PIPELINE_STATE_PLAYING = 2;
185  PIPELINE_STATE_PAUSED = 3;
186}
187
188enum CastAudioDecoderMode {
189  // Both multiroom and audio rendering is enabled.
190  CAST_AUDIO_DECODER_MODE_ALL = 0;
191
192  // Only multiroom is enabled and audio rendering is disabled.  This should
193  // be used if the runtime is taking over responsibility for rendering audio.
194  CAST_AUDIO_DECODER_MODE_MULTIROOM_ONLY = 1;
195
196  // Only audio rendering is enabled and multiroom is disabled.
197  CAST_AUDIO_DECODER_MODE_AUDIO_ONLY = 2;
198}
199
200message AudioConfiguration {
201  enum AudioCodec {
202    AUDIO_CODEC_UNKNOWN = 0;
203    AUDIO_CODEC_AAC = 1;
204    AUDIO_CODEC_MP3 = 2;
205    AUDIO_CODEC_PCM = 3;
206    AUDIO_CODEC_PCM_S16BE = 4;
207    AUDIO_CODEC_VORBIS = 5;
208    AUDIO_CODEC_OPUS = 6;
209    AUDIO_CODEC_EAC3 = 7;
210    AUDIO_CODEC_AC3 = 8;
211    AUDIO_CODEC_DTS = 9;
212    AUDIO_CODEC_FLAC = 10;
213    AUDIO_CODEC_MPEG_H_AUDIO = 11;
214  }
215
216  enum ChannelLayout {
217    CHANNEL_LAYOUT_UNSUPPORTED = 0;
218
219    // Front C
220    CHANNEL_LAYOUT_MONO = 1;
221
222    // Front L, Front R
223    CHANNEL_LAYOUT_STEREO = 2;
224
225    // Front L, Front R, Front C, LFE, Side L, Side R
226    CHANNEL_LAYOUT_SURROUND_5_1 = 3;
227
228    // Actual channel layout is specified in the bitstream and the actual
229    // channel count is unknown at Chromium media pipeline level (useful for
230    // audio pass-through mode).
231    CHANNEL_LAYOUT_BITSTREAM = 4;
232
233    // Channels are not explicitly mapped to speakers.
234    CHANNEL_LAYOUT_DISCRETE = 5;
235  }
236
237  enum SampleFormat {
238    SAMPLE_FORMAT_UNKNOWN = 0;
239    SAMPLE_FORMAT_U8 = 1;          // Unsigned 8-bit w/ bias of 128.
240    SAMPLE_FORMAT_S16 = 2;         // Signed 16-bit.
241    SAMPLE_FORMAT_S32 = 3;         // Signed 32-bit.
242    SAMPLE_FORMAT_F32 = 4;         // Float 32-bit.
243    SAMPLE_FORMAT_PLANAR_S16 = 5;  // Signed 16-bit planar.
244    SAMPLE_FORMAT_PLANAR_F32 = 6;  // Float 32-bit planar.
245    SAMPLE_FORMAT_PLANAR_S32 = 7;  // Signed 32-bit planar.
246    SAMPLE_FORMAT_S24 = 8;         // Signed 24-bit.
247  }
248
249  // Audio codec.
250  AudioCodec codec = 1;
251
252  // Audio channel layout.
253  ChannelLayout channel_layout = 2;
254
255  // The format of each audio sample.
256  SampleFormat sample_format = 3;
257
258  // Number of bytes in each channel.
259  int64 bytes_per_channel = 4;
260
261  // Number of channels in this audio stream.
262  int32 channel_number = 5;
263
264  // Number of audio samples per second.
265  int64 samples_per_second = 6;
266
267  // Extra data buffer for certain codec initialization.
268  bytes extra_data = 7;
269}
270
271// The data buffer associated with a single frame of audio data.
272message AudioDecoderBuffer {
273  // The PTS of the frame in microseconds. This is a property of the audio frame
274  // and is used by the receiver to correctly order the audio frames and to
275  // determine when they should be decoded.
276  int64 pts_micros = 1;
277
278  // A single frame of audio data as a byte array.
279  bytes data = 2;
280
281  // Indicates if this is a special frame that indicates the end of the stream.
282  // If true, functions to access the frame content cannot be called.
283  bool end_of_stream = 3;
284
285  // Unique identifier.  This field should be greater than equal to 0 and
286  // incremented by one for each PushBuffeRequest.
287  int64 id = 4;
288}
289
290message MediaTime {
291  // The currents PTS that has been rendered.
292  int64 current_pts_micros = 1;
293
294  // The end of stream has been rendered.
295  bool end_of_stream = 2;
296
297  // Capture time with respect to CLOCK_MONOTONIC_RAW at which the delay
298  // measurement was taken.
299  google.protobuf.Duration capture_time = 3;
300}
301
302message TimestampInfo {
303  // System timestamp with respect to CLOCK_MONOTONIC_RAW at which the
304  // corresponding buffer is expected to be rendered.
305  google.protobuf.Duration system_timestamp = 1;
306
307  // AudioDecoderBuffer.id associated with the |system_timestamp|.
308  int64 buffer_id = 2;
309}
310
311message StartRequest {
312  // The start presentation timestamp in microseconds.
313  int64 pts_micros = 1;
314
315  // Timestamp information associated with the request.
316  // This field is optional and only used when this service is configured
317  // for CAST_AUDIO_DECODER_MODE_MULTIROOM_ONLY.
318  TimestampInfo timestamp_info = 2;
319}
320
321message StopRequest {}
322
323message PauseRequest {}
324
325message ResumeRequest {
326  // Timestamp information associated with the request.
327  // This field is optional and only used when this service is configured
328  // for CAST_AUDIO_DECODER_MODE_MULTIROOM_ONLY.
329  TimestampInfo resume_timestamp_info = 1;
330}
331
332message TimestampUpdateRequest {
333  TimestampInfo timestamp_info = 1;
334}
335