1// Copyright 2022 Google LLC 2// 3// Licensed under the Apache License, Version 2.0 (the "License"); 4// you may not use this file except in compliance with the License. 5// You may obtain a copy of the License at 6// 7// https://www.apache.org/licenses/LICENSE-2.0 8// 9// Unless required by applicable law or agreed to in writing, software 10// distributed under the License is distributed on an "AS IS" BASIS, 11// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12// See the License for the specific language governing permissions and 13// limitations under the License. 14 15syntax = "proto3"; 16 17package pandora; 18 19import "pandora/host.proto"; 20import "google/protobuf/empty.proto"; 21import "google/protobuf/wrappers.proto"; 22 23option java_outer_classname = "A2DPProto"; 24 25// Service to trigger A2DP (Advanced Audio Distribution Profile) procedures. 26// 27// Requirements for the implementer: 28// - Streams must not be automatically opened, even if discovered. 29// - The `Host` service must be implemented 30// 31// References: 32// - [A2DP] Bluetooth SIG, Specification of the Bluetooth System, 33// Advanced Audio Distribution, Version 1.3 or Later 34// - [AVDTP] Bluetooth SIG, Specification of the Bluetooth System, 35// Audio/Video Distribution Transport Protocol, Version 1.3 or Later 36service A2DP { 37 // Open a stream from a local **Source** endpoint to a remote **Sink** 38 // endpoint. 39 // 40 // The returned source should be in the AVDTP_OPEN state (see [AVDTP] 9.1). 41 // The rpc must block until the stream has reached this state. 42 // 43 // A cancellation of this call must result in aborting the current 44 // AVDTP procedure (see [AVDTP] 9.9). 45 rpc OpenSource(OpenSourceRequest) returns (OpenSourceResponse); 46 // Open a stream from a local **Sink** endpoint to a remote **Source** 47 // endpoint. 48 // 49 // The returned sink must be in the AVDTP_OPEN state (see [AVDTP] 9.1). 50 // The rpc must block until the stream has reached this state. 51 // 52 // A cancellation of this call must result in aborting the current 53 // AVDTP procedure (see [AVDTP] 9.9). 54 rpc OpenSink(OpenSinkRequest) returns (OpenSinkResponse); 55 // Wait for a stream from a local **Source** endpoint to 56 // a remote **Sink** endpoint to open. 57 // 58 // The returned source should be in the AVDTP_OPEN state (see [AVDTP] 9.1). 59 // The rpc must block until the stream has reached this state. 60 // 61 // If the peer has opened a source prior to this call, the server will 62 // return it. The server must return the same source only once. 63 rpc WaitSource(WaitSourceRequest) returns (WaitSourceResponse); 64 // Wait for a stream from a local **Sink** endpoint to 65 // a remote **Source** endpoint to open. 66 // 67 // The returned sink should be in the AVDTP_OPEN state (see [AVDTP] 9.1). 68 // The rpc must block until the stream has reached this state. 69 // 70 // If the peer has opened a sink prior to this call, the server will 71 // return it. The server must return the same sink only once. 72 rpc WaitSink(WaitSinkRequest) returns (WaitSinkResponse); 73 // Get if the stream is suspended 74 rpc IsSuspended(IsSuspendedRequest) returns (google.protobuf.BoolValue); 75 // Start an opened stream. 76 // 77 // The rpc must block until the stream has reached the 78 // AVDTP_STREAMING state (see [AVDTP] 9.1). 79 rpc Start(StartRequest) returns (StartResponse); 80 // Suspend a streaming stream. 81 // 82 // The rpc must block until the stream has reached the AVDTP_OPEN 83 // state (see [AVDTP] 9.1). 84 rpc Suspend(SuspendRequest) returns (SuspendResponse); 85 // Close a stream, the source or sink tokens must not be reused afterwards. 86 rpc Close(CloseRequest) returns (CloseResponse); 87 // Get the `AudioEncoding` value of a stream 88 rpc GetAudioEncoding(GetAudioEncodingRequest) 89 returns (GetAudioEncodingResponse); 90 // Playback audio by a `Source` 91 rpc PlaybackAudio(stream PlaybackAudioRequest) 92 returns (PlaybackAudioResponse); 93 // Capture audio from a `Sink` 94 rpc CaptureAudio(CaptureAudioRequest) returns (stream CaptureAudioResponse); 95 // Get codec configuration 96 rpc GetConfiguration(GetConfigurationRequest) returns (GetConfigurationResponse); 97 // Set codec configuration 98 rpc SetConfiguration(SetConfigurationRequest) returns (SetConfigurationResponse); 99} 100 101// Audio encoding formats. 102enum AudioEncoding { 103 // Interleaved stereo frames with 16-bit signed little-endian linear PCM 104 // samples at 44100Hz sample rate 105 PCM_S16_LE_44K1_STEREO = 0; 106 // Interleaved stereo frames with 16-bit signed little-endian linear PCM 107 // samples at 48000Hz sample rate 108 PCM_S16_LE_48K_STEREO = 1; 109} 110 111// Channel mode. 112enum ChannelMode { 113 UNKNOWN = 0; 114 MONO = 1; 115 STEREO = 2; 116 DUALMONO = 3; 117} 118 119// A Token representing a Source stream (see [A2DP] 2.2). 120// It's acquired via an OpenSource on the A2DP service. 121message Source { 122 // Opaque value filled by the GRPC server, must not 123 // be modified nor crafted. 124 bytes cookie = 1; 125} 126 127// A Token representing a Sink stream (see [A2DP] 2.2). 128// It's acquired via an OpenSink on the A2DP service. 129message Sink { 130 // Opaque value filled by the GRPC server, must not 131 // be modified nor crafted. 132 bytes cookie = 1; 133} 134 135// Vendor codec. 136message Vendor { 137 // 16 bits - Vendor identifier, assigned by BT Sig [Assigned Numbers - 7.1] 138 uint32 id = 1; 139 // 16 bits - Assigned by the vendor 140 uint32 codecId = 2; 141} 142 143// Codec identifier defined for A2DP 144message CodecId { 145 oneof type { 146 google.protobuf.Empty sbc = 1; 147 google.protobuf.Empty mpeg_aac = 2; 148 Vendor vendor = 3; 149 } 150} 151 152message CodecParameters { 153 // Channel mode: Mono, Dual-Mono or Stereo 154 ChannelMode channel_mode = 1; 155 // Sampling frequencies in Hz. 156 uint32 sampling_frequency_hz = 2; 157 // Fixed point resolution in bits per sample. 158 uint32 bit_depth = 3; 159 // Bitrate limits on a frame basis, defined in bits per second. 160 // The 0 value for both means "undefined" or "don't care". 161 uint32 min_bitrate = 4; 162 uint32 max_bitrate = 5; 163 // Low-latency configuration. The interpretation is vendor specific. 164 bool low_latency = 6; 165 // Lossless effort indication. The 'False' value can be used as "don't care". 166 bool lossless = 7; 167 // Vendor specific parameters. 168 bytes vendor_specific_parameters = 8; 169} 170 171message Configuration { 172 // Codec indentifier. 173 CodecId id = 1; 174 // Codec parameters. 175 CodecParameters parameters = 2; 176} 177 178// Request for the `OpenSource` method. 179message OpenSourceRequest { 180 // The connection that will open the stream. 181 Connection connection = 1; 182} 183 184// Response for the `OpenSource` method. 185message OpenSourceResponse { 186 // Result of the `OpenSource` call. 187 oneof result { 188 // Opened stream. 189 Source source = 1; 190 // The Connection disconnected. 191 google.protobuf.Empty disconnected = 2; 192 } 193} 194 195// Request for the `OpenSink` method. 196message OpenSinkRequest { 197 // The connection that will open the stream. 198 Connection connection = 1; 199} 200 201// Response for the `OpenSink` method. 202message OpenSinkResponse { 203 // Result of the `OpenSink` call. 204 oneof result { 205 // Opened stream. 206 Sink sink = 1; 207 // The Connection disconnected. 208 google.protobuf.Empty disconnected = 2; 209 } 210} 211 212// Request for the `WaitSource` method. 213message WaitSourceRequest { 214 // The connection that is awaiting the stream. 215 Connection connection = 1; 216} 217 218// Response for the `WaitSource` method. 219message WaitSourceResponse { 220 // Result of the `WaitSource` call. 221 oneof result { 222 // Awaited stream. 223 Source source = 1; 224 // The Connection disconnected. 225 google.protobuf.Empty disconnected = 2; 226 } 227} 228 229// Request for the `WaitSink` method. 230message WaitSinkRequest { 231 // The connection that is awaiting the stream. 232 Connection connection = 1; 233} 234 235// Response for the `WaitSink` method. 236message WaitSinkResponse { 237 // Result of the `WaitSink` call. 238 oneof result { 239 // Awaited stream. 240 Sink sink = 1; 241 // The Connection disconnected. 242 google.protobuf.Empty disconnected = 2; 243 } 244} 245 246// Request for the `IsSuspended` method. 247message IsSuspendedRequest { 248 // The stream on which the function will check if it's suspended 249 oneof target { 250 Sink sink = 1; 251 Source source = 2; 252 } 253} 254 255// Request for the `Start` method. 256message StartRequest { 257 // Target of the start, either a Sink or a Source. 258 oneof target { 259 Sink sink = 1; 260 Source source = 2; 261 } 262} 263 264// Response for the `Start` method. 265message StartResponse { 266 // Result of the `Start` call. 267 oneof result { 268 // Stream successfully started. 269 google.protobuf.Empty started = 1; 270 // Stream is already in AVDTP_STREAMING state. 271 google.protobuf.Empty already_started = 2; 272 // The Connection disconnected. 273 google.protobuf.Empty disconnected = 3; 274 } 275} 276 277// Request for the `Suspend` method. 278message SuspendRequest { 279 // Target of the suspend, either a Sink or a Source. 280 oneof target { 281 Sink sink = 1; 282 Source source = 2; 283 } 284} 285 286// Response for the `Suspend` method. 287message SuspendResponse { 288 // Result of the `Suspend` call. 289 oneof result { 290 // Stream successfully suspended. 291 google.protobuf.Empty suspended = 1; 292 // Stream is already in AVDTP_OPEN state. 293 google.protobuf.Empty already_suspended = 2; 294 // The Connection disconnected. 295 google.protobuf.Empty disconnected = 3; 296 } 297} 298 299// Request for the `Close` method. 300message CloseRequest { 301 // Target of the close, either a Sink or a Source. 302 oneof target { 303 Sink sink = 1; 304 Source source = 2; 305 } 306} 307 308// Response for the `Close` method. 309message CloseResponse {} 310 311// Request for the `GetAudioEncoding` method. 312message GetAudioEncodingRequest { 313 // The stream on which the function will read the `AudioEncoding`. 314 oneof target { 315 Sink sink = 1; 316 Source source = 2; 317 } 318} 319 320// Response for the `GetAudioEncoding` method. 321message GetAudioEncodingResponse { 322 // Audio encoding of the stream. 323 AudioEncoding encoding = 1; 324} 325 326// Request for the `PlaybackAudio` method. 327message PlaybackAudioRequest { 328 // Source that will playback audio. 329 Source source = 1; 330 // Audio data to playback. 331 // The audio data must be encoded in the specified `AudioEncoding` value 332 // obtained in response of a `GetAudioEncoding` method call. 333 bytes data = 2; 334} 335 336// Response for the `PlaybackAudio` method. 337message PlaybackAudioResponse {} 338 339// Request for the `CaptureAudio` method. 340message CaptureAudioRequest { 341 // Sink that will capture audio 342 Sink sink = 1; 343} 344 345// Response for the `CaptureAudio` method. 346message CaptureAudioResponse { 347 // Captured audio data. 348 // The audio data is encoded in the specified `AudioEncoding` value 349 // obtained in response of a `GetAudioEncoding` method call. 350 bytes data = 1; 351} 352 353// Request for the `GetConfiguration` method. 354message GetConfigurationRequest { 355 // The connection to get codec configuration from. 356 Connection connection = 1; 357} 358 359// Response for the `GetConfiguration` method. 360message GetConfigurationResponse { 361 // Codec configuration. 362 Configuration configuration = 1; 363} 364 365// Request for the `SetConfiguration` method. 366message SetConfigurationRequest { 367 // The connection to set codec configuration. 368 Connection connection = 1; 369 // New codec configuration. 370 Configuration configuration = 2; 371} 372 373// Response for the `SetConfiguration` method. 374message SetConfigurationResponse { 375 // Set configuration result 376 bool success = 1; 377} 378