xref: /aosp_15_r20/hardware/interfaces/audio/aidl/default/r_submix/StreamRemoteSubmix.cpp (revision 4d7e907c777eeecc4c5bd7cf640a754fac206ff7)
1 /*
2  * Copyright (C) 2023 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 #define LOG_TAG "AHAL_StreamRemoteSubmix"
18 #include <android-base/logging.h>
19 #include <audio_utils/clock.h>
20 #include <error/Result.h>
21 #include <error/expected_utils.h>
22 
23 #include "core-impl/StreamRemoteSubmix.h"
24 
25 using aidl::android::hardware::audio::common::SinkMetadata;
26 using aidl::android::hardware::audio::common::SourceMetadata;
27 using aidl::android::hardware::audio::core::r_submix::SubmixRoute;
28 using aidl::android::media::audio::common::AudioDeviceAddress;
29 using aidl::android::media::audio::common::AudioOffloadInfo;
30 using aidl::android::media::audio::common::MicrophoneDynamicInfo;
31 using aidl::android::media::audio::common::MicrophoneInfo;
32 
33 namespace aidl::android::hardware::audio::core {
34 
35 using deprecated::InnerStreamWrapper;
36 using deprecated::StreamCommonInterfaceEx;
37 using deprecated::StreamSwitcher;
38 
StreamRemoteSubmix(StreamContext * context,const Metadata & metadata,const AudioDeviceAddress & deviceAddress)39 StreamRemoteSubmix::StreamRemoteSubmix(StreamContext* context, const Metadata& metadata,
40                                        const AudioDeviceAddress& deviceAddress)
41     : StreamCommonImpl(context, metadata),
42       mDeviceAddress(deviceAddress),
43       mIsInput(isInput(metadata)) {
44     mStreamConfig.frameSize = context->getFrameSize();
45     mStreamConfig.format = context->getFormat();
46     mStreamConfig.channelLayout = context->getChannelLayout();
47     mStreamConfig.sampleRate = context->getSampleRate();
48 }
49 
~StreamRemoteSubmix()50 StreamRemoteSubmix::~StreamRemoteSubmix() {
51     cleanupWorker();
52 }
53 
init()54 ::android::status_t StreamRemoteSubmix::init() {
55     mCurrentRoute = SubmixRoute::findOrCreateRoute(mDeviceAddress, mStreamConfig);
56     if (mCurrentRoute == nullptr) {
57         return ::android::NO_INIT;
58     }
59     if (!mCurrentRoute->isStreamConfigValid(mIsInput, mStreamConfig)) {
60         LOG(ERROR) << __func__ << ": invalid stream config";
61         return ::android::NO_INIT;
62     }
63     sp<MonoPipe> sink = mCurrentRoute->getSink();
64     if (sink == nullptr) {
65         LOG(ERROR) << __func__ << ": nullptr sink when opening stream";
66         return ::android::NO_INIT;
67     }
68     if ((!mIsInput || mCurrentRoute->isStreamInOpen()) && sink->isShutdown()) {
69         LOG(DEBUG) << __func__ << ": Shut down sink when opening stream";
70         if (::android::OK != mCurrentRoute->resetPipe()) {
71             LOG(ERROR) << __func__ << ": reset pipe failed";
72             return ::android::NO_INIT;
73         }
74     }
75     mCurrentRoute->openStream(mIsInput);
76     return ::android::OK;
77 }
78 
drain(StreamDescriptor::DrainMode)79 ::android::status_t StreamRemoteSubmix::drain(StreamDescriptor::DrainMode) {
80     usleep(1000);
81     return ::android::OK;
82 }
83 
flush()84 ::android::status_t StreamRemoteSubmix::flush() {
85     usleep(1000);
86     return ::android::OK;
87 }
88 
pause()89 ::android::status_t StreamRemoteSubmix::pause() {
90     usleep(1000);
91     return ::android::OK;
92 }
93 
standby()94 ::android::status_t StreamRemoteSubmix::standby() {
95     mCurrentRoute->standby(mIsInput);
96     return ::android::OK;
97 }
98 
start()99 ::android::status_t StreamRemoteSubmix::start() {
100     mCurrentRoute->exitStandby(mIsInput);
101     mStartTimeNs = ::android::uptimeNanos();
102     mFramesSinceStart = 0;
103     return ::android::OK;
104 }
105 
prepareToClose()106 ndk::ScopedAStatus StreamRemoteSubmix::prepareToClose() {
107     if (!mIsInput) {
108         std::shared_ptr<SubmixRoute> route = SubmixRoute::findRoute(mDeviceAddress);
109         if (route != nullptr) {
110             sp<MonoPipe> sink = route->getSink();
111             if (sink == nullptr) {
112                 ndk::ScopedAStatus::fromExceptionCode(EX_ILLEGAL_STATE);
113             }
114             LOG(DEBUG) << __func__ << ": shutting down MonoPipe sink";
115 
116             sink->shutdown(true);
117             // The client already considers this stream as closed, release the output end.
118             route->closeStream(mIsInput);
119         } else {
120             LOG(DEBUG) << __func__ << ": stream already closed.";
121             ndk::ScopedAStatus::fromExceptionCode(EX_ILLEGAL_STATE);
122         }
123     }
124     return ndk::ScopedAStatus::ok();
125 }
126 
127 // Remove references to the specified input and output streams.  When the device no longer
128 // references input and output streams destroy the associated pipe.
shutdown()129 void StreamRemoteSubmix::shutdown() {
130     mCurrentRoute->closeStream(mIsInput);
131     // If all stream instances are closed, we can remove route information for this port.
132     if (!mCurrentRoute->hasAtleastOneStreamOpen()) {
133         mCurrentRoute->releasePipe();
134         LOG(DEBUG) << __func__ << ": pipe destroyed";
135         SubmixRoute::removeRoute(mDeviceAddress);
136     }
137     mCurrentRoute.reset();
138 }
139 
transfer(void * buffer,size_t frameCount,size_t * actualFrameCount,int32_t * latencyMs)140 ::android::status_t StreamRemoteSubmix::transfer(void* buffer, size_t frameCount,
141                                                  size_t* actualFrameCount, int32_t* latencyMs) {
142     *latencyMs = getDelayInUsForFrameCount(getStreamPipeSizeInFrames()) / 1000;
143     LOG(VERBOSE) << __func__ << ": Latency " << *latencyMs << "ms";
144     mCurrentRoute->exitStandby(mIsInput);
145     ::android::status_t status = mIsInput ? inRead(buffer, frameCount, actualFrameCount)
146                                           : outWrite(buffer, frameCount, actualFrameCount);
147     if ((status != ::android::OK && mIsInput) ||
148         ((status != ::android::OK && status != ::android::DEAD_OBJECT) && !mIsInput)) {
149         return status;
150     }
151     mFramesSinceStart += *actualFrameCount;
152     if (!mIsInput && status != ::android::DEAD_OBJECT) return ::android::OK;
153     // Input streams always need to block, output streams need to block when there is no sink.
154     // When the sink exists, more sophisticated blocking algorithm is implemented by MonoPipe.
155     const long bufferDurationUs =
156             (*actualFrameCount) * MICROS_PER_SECOND / mContext.getSampleRate();
157     const auto totalDurationUs = (::android::uptimeNanos() - mStartTimeNs) / NANOS_PER_MICROSECOND;
158     const long totalOffsetUs =
159             mFramesSinceStart * MICROS_PER_SECOND / mContext.getSampleRate() - totalDurationUs;
160     LOG(VERBOSE) << __func__ << ": totalOffsetUs " << totalOffsetUs;
161     if (totalOffsetUs > 0) {
162         const long sleepTimeUs = std::min(totalOffsetUs, bufferDurationUs);
163         LOG(VERBOSE) << __func__ << ": sleeping for " << sleepTimeUs << " us";
164         usleep(sleepTimeUs);
165     }
166     return ::android::OK;
167 }
168 
refinePosition(StreamDescriptor::Position * position)169 ::android::status_t StreamRemoteSubmix::refinePosition(StreamDescriptor::Position* position) {
170     sp<MonoPipeReader> source = mCurrentRoute->getSource();
171     if (source == nullptr) {
172         return ::android::NO_INIT;
173     }
174     const ssize_t framesInPipe = source->availableToRead();
175     if (framesInPipe <= 0) {
176         // No need to update the position frames
177         return ::android::OK;
178     }
179     if (mIsInput) {
180         position->frames += framesInPipe;
181     } else if (position->frames >= framesInPipe) {
182         position->frames -= framesInPipe;
183     }
184     return ::android::OK;
185 }
186 
getDelayInUsForFrameCount(size_t frameCount)187 long StreamRemoteSubmix::getDelayInUsForFrameCount(size_t frameCount) {
188     return frameCount * MICROS_PER_SECOND / mStreamConfig.sampleRate;
189 }
190 
191 // Calculate the maximum size of the pipe buffer in frames for the specified stream.
getStreamPipeSizeInFrames()192 size_t StreamRemoteSubmix::getStreamPipeSizeInFrames() {
193     auto pipeConfig = mCurrentRoute->getPipeConfig();
194     const size_t maxFrameSize = std::max(mStreamConfig.frameSize, pipeConfig.frameSize);
195     return (pipeConfig.frameCount * pipeConfig.frameSize) / maxFrameSize;
196 }
197 
outWrite(void * buffer,size_t frameCount,size_t * actualFrameCount)198 ::android::status_t StreamRemoteSubmix::outWrite(void* buffer, size_t frameCount,
199                                                  size_t* actualFrameCount) {
200     sp<MonoPipe> sink = mCurrentRoute->getSink();
201     if (sink != nullptr) {
202         if (sink->isShutdown()) {
203             sink.clear();
204             if (++mWriteShutdownCount < kMaxErrorLogs) {
205                 LOG(DEBUG) << __func__ << ": pipe shutdown, ignoring the write. (limited logging)";
206             }
207             *actualFrameCount = frameCount;
208             return ::android::DEAD_OBJECT;  // Induce wait in `transfer`.
209         }
210     } else {
211         LOG(FATAL) << __func__ << ": without a pipe!";
212         return ::android::UNKNOWN_ERROR;
213     }
214     mWriteShutdownCount = 0;
215 
216     LOG(VERBOSE) << __func__ << ": " << mDeviceAddress.toString() << ", " << frameCount
217                  << " frames";
218 
219     const bool shouldBlockWrite = mCurrentRoute->shouldBlockWrite();
220     size_t availableToWrite = sink->availableToWrite();
221     // NOTE: sink has been checked above and sink and source life cycles are synchronized
222     sp<MonoPipeReader> source = mCurrentRoute->getSource();
223     // If the write to the sink should be blocked, flush enough frames from the pipe to make space
224     // to write the most recent data.
225     if (!shouldBlockWrite && availableToWrite < frameCount) {
226         static uint8_t flushBuffer[64];
227         const size_t flushBufferSizeFrames = sizeof(flushBuffer) / mStreamConfig.frameSize;
228         size_t framesToFlushFromSource = frameCount - availableToWrite;
229         LOG(DEBUG) << __func__ << ": flushing " << framesToFlushFromSource
230                    << " frames from the pipe to avoid blocking";
231         while (framesToFlushFromSource) {
232             const size_t flushSize = std::min(framesToFlushFromSource, flushBufferSizeFrames);
233             framesToFlushFromSource -= flushSize;
234             // read does not block
235             source->read(flushBuffer, flushSize);
236         }
237     }
238     availableToWrite = sink->availableToWrite();
239 
240     if (!shouldBlockWrite && frameCount > availableToWrite) {
241         LOG(WARNING) << __func__ << ": writing " << availableToWrite << " vs. requested "
242                      << frameCount;
243         // Truncate the request to avoid blocking.
244         frameCount = availableToWrite;
245     }
246     ssize_t writtenFrames = sink->write(buffer, frameCount);
247     if (writtenFrames < 0) {
248         if (writtenFrames == (ssize_t)::android::NEGOTIATE) {
249             LOG(ERROR) << __func__ << ": write to pipe returned NEGOTIATE";
250             sink.clear();
251             *actualFrameCount = 0;
252             return ::android::UNKNOWN_ERROR;
253         } else {
254             // write() returned UNDERRUN or WOULD_BLOCK, retry
255             LOG(ERROR) << __func__ << ": write to pipe returned unexpected " << writtenFrames;
256             writtenFrames = sink->write(buffer, frameCount);
257         }
258     }
259 
260     if (writtenFrames < 0) {
261         LOG(ERROR) << __func__ << ": failed writing to pipe with " << writtenFrames;
262         *actualFrameCount = 0;
263         return ::android::UNKNOWN_ERROR;
264     }
265     if (writtenFrames > 0 && frameCount > (size_t)writtenFrames) {
266         LOG(WARNING) << __func__ << ": wrote " << writtenFrames << " vs. requested " << frameCount;
267     }
268     *actualFrameCount = writtenFrames;
269     return ::android::OK;
270 }
271 
inRead(void * buffer,size_t frameCount,size_t * actualFrameCount)272 ::android::status_t StreamRemoteSubmix::inRead(void* buffer, size_t frameCount,
273                                                size_t* actualFrameCount) {
274     // in any case, it is emulated that data for the entire buffer was available
275     memset(buffer, 0, mStreamConfig.frameSize * frameCount);
276     *actualFrameCount = frameCount;
277 
278     // about to read from audio source
279     sp<MonoPipeReader> source = mCurrentRoute->getSource();
280     if (source == nullptr) {
281         if (++mReadErrorCount < kMaxErrorLogs) {
282             LOG(ERROR) << __func__
283                        << ": no audio pipe yet we're trying to read! (not all errors will be "
284                           "logged)";
285         }
286         return ::android::OK;
287     }
288     mReadErrorCount = 0;
289 
290     LOG(VERBOSE) << __func__ << ": " << mDeviceAddress.toString() << ", " << frameCount
291                  << " frames";
292     // read the data from the pipe
293     char* buff = (char*)buffer;
294     size_t actuallyRead = 0;
295     long remainingFrames = frameCount;
296     // Try to wait as long as possible for the audio duration, but leave some time for the call to
297     // 'transfer' to complete. 'kReadAttemptSleepUs' is a good constant for this purpose because it
298     // is by definition "strictly inferior" to the typical buffer duration.
299     const long durationUs =
300             std::max(0L, getDelayInUsForFrameCount(frameCount) - kReadAttemptSleepUs);
301     const int64_t deadlineTimeNs = ::android::uptimeNanos() + durationUs * NANOS_PER_MICROSECOND;
302     while (remainingFrames > 0) {
303         ssize_t framesRead = source->read(buff, remainingFrames);
304         LOG(VERBOSE) << __func__ << ": frames read " << framesRead;
305         if (framesRead > 0) {
306             remainingFrames -= framesRead;
307             buff += framesRead * mStreamConfig.frameSize;
308             LOG(VERBOSE) << __func__ << ": got " << framesRead
309                          << " frames, remaining =" << remainingFrames;
310             actuallyRead += framesRead;
311         }
312         if (::android::uptimeNanos() >= deadlineTimeNs) break;
313         if (framesRead <= 0) {
314             LOG(VERBOSE) << __func__ << ": read returned " << framesRead
315                          << ", read failure, sleeping for " << kReadAttemptSleepUs << " us";
316             usleep(kReadAttemptSleepUs);
317         }
318     }
319     if (actuallyRead < frameCount) {
320         if (++mReadFailureCount < kMaxReadFailureAttempts) {
321             LOG(WARNING) << __func__ << ": read " << actuallyRead << " vs. requested " << frameCount
322                          << " (not all errors will be logged)";
323         }
324     } else {
325         mReadFailureCount = 0;
326     }
327     mCurrentRoute->updateReadCounterFrames(*actualFrameCount);
328     return ::android::OK;
329 }
330 
StreamInRemoteSubmix(StreamContext && context,const SinkMetadata & sinkMetadata,const std::vector<MicrophoneInfo> & microphones)331 StreamInRemoteSubmix::StreamInRemoteSubmix(StreamContext&& context,
332                                            const SinkMetadata& sinkMetadata,
333                                            const std::vector<MicrophoneInfo>& microphones)
334     : StreamIn(std::move(context), microphones), StreamSwitcher(&mContextInstance, sinkMetadata) {}
335 
getActiveMicrophones(std::vector<MicrophoneDynamicInfo> * _aidl_return)336 ndk::ScopedAStatus StreamInRemoteSubmix::getActiveMicrophones(
337         std::vector<MicrophoneDynamicInfo>* _aidl_return) {
338     LOG(DEBUG) << __func__ << ": not supported";
339     *_aidl_return = std::vector<MicrophoneDynamicInfo>();
340     return ndk::ScopedAStatus::ok();
341 }
342 
switchCurrentStream(const std::vector<::aidl::android::media::audio::common::AudioDevice> & devices)343 StreamSwitcher::DeviceSwitchBehavior StreamInRemoteSubmix::switchCurrentStream(
344         const std::vector<::aidl::android::media::audio::common::AudioDevice>& devices) {
345     // This implementation effectively postpones stream creation until
346     // receiving the first call to 'setConnectedDevices' with a non-empty list.
347     if (isStubStream()) {
348         if (devices.size() == 1) {
349             auto deviceDesc = devices.front().type;
350             if (deviceDesc.type ==
351                 ::aidl::android::media::audio::common::AudioDeviceType::IN_SUBMIX) {
352                 return DeviceSwitchBehavior::CREATE_NEW_STREAM;
353             }
354             LOG(ERROR) << __func__ << ": Device type " << toString(deviceDesc.type)
355                        << " not supported";
356         } else {
357             LOG(ERROR) << __func__ << ": Only single device supported.";
358         }
359         return DeviceSwitchBehavior::UNSUPPORTED_DEVICES;
360     }
361     return DeviceSwitchBehavior::USE_CURRENT_STREAM;
362 }
363 
createNewStream(const std::vector<::aidl::android::media::audio::common::AudioDevice> & devices,StreamContext * context,const Metadata & metadata)364 std::unique_ptr<StreamCommonInterfaceEx> StreamInRemoteSubmix::createNewStream(
365         const std::vector<::aidl::android::media::audio::common::AudioDevice>& devices,
366         StreamContext* context, const Metadata& metadata) {
367     return std::unique_ptr<StreamCommonInterfaceEx>(
368             new InnerStreamWrapper<StreamRemoteSubmix>(context, metadata, devices.front().address));
369 }
370 
StreamOutRemoteSubmix(StreamContext && context,const SourceMetadata & sourceMetadata,const std::optional<AudioOffloadInfo> & offloadInfo)371 StreamOutRemoteSubmix::StreamOutRemoteSubmix(StreamContext&& context,
372                                              const SourceMetadata& sourceMetadata,
373                                              const std::optional<AudioOffloadInfo>& offloadInfo)
374     : StreamOut(std::move(context), offloadInfo),
375       StreamSwitcher(&mContextInstance, sourceMetadata) {}
376 
switchCurrentStream(const std::vector<::aidl::android::media::audio::common::AudioDevice> & devices)377 StreamSwitcher::DeviceSwitchBehavior StreamOutRemoteSubmix::switchCurrentStream(
378         const std::vector<::aidl::android::media::audio::common::AudioDevice>& devices) {
379     // This implementation effectively postpones stream creation until
380     // receiving the first call to 'setConnectedDevices' with a non-empty list.
381     if (isStubStream()) {
382         if (devices.size() == 1) {
383             auto deviceDesc = devices.front().type;
384             if (deviceDesc.type ==
385                 ::aidl::android::media::audio::common::AudioDeviceType::OUT_SUBMIX) {
386                 return DeviceSwitchBehavior::CREATE_NEW_STREAM;
387             }
388             LOG(ERROR) << __func__ << ": Device type " << toString(deviceDesc.type)
389                        << " not supported";
390         } else {
391             LOG(ERROR) << __func__ << ": Only single device supported.";
392         }
393         return DeviceSwitchBehavior::UNSUPPORTED_DEVICES;
394     }
395     return DeviceSwitchBehavior::USE_CURRENT_STREAM;
396 }
397 
createNewStream(const std::vector<::aidl::android::media::audio::common::AudioDevice> & devices,StreamContext * context,const Metadata & metadata)398 std::unique_ptr<StreamCommonInterfaceEx> StreamOutRemoteSubmix::createNewStream(
399         const std::vector<::aidl::android::media::audio::common::AudioDevice>& devices,
400         StreamContext* context, const Metadata& metadata) {
401     return std::unique_ptr<StreamCommonInterfaceEx>(
402             new InnerStreamWrapper<StreamRemoteSubmix>(context, metadata, devices.front().address));
403 }
404 
405 }  // namespace aidl::android::hardware::audio::core
406