xref: /aosp_15_r20/frameworks/av/media/libmediaplayerservice/nuplayer/NuPlayerRenderer.cpp (revision ec779b8e0859a360c3d303172224686826e6e0e1)
1 /*
2  * Copyright (C) 2010 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 //#define LOG_NDEBUG 0
18 #define ATRACE_TAG ATRACE_TAG_AUDIO
19 #define LOG_TAG "NuPlayerRenderer"
20 #include <utils/Log.h>
21 
22 #include "AWakeLock.h"
23 #include "NuPlayerRenderer.h"
24 #include <algorithm>
25 #include <cutils/properties.h>
26 #include <media/stagefright/foundation/ADebug.h>
27 #include <media/stagefright/foundation/AMessage.h>
28 #include <media/stagefright/foundation/AUtils.h>
29 #include <media/stagefright/MediaClock.h>
30 #include <media/stagefright/MediaCodecConstants.h>
31 #include <media/stagefright/MediaDefs.h>
32 #include <media/stagefright/MediaErrors.h>
33 #include <media/stagefright/MetaData.h>
34 #include <media/stagefright/Utils.h>
35 #include <media/stagefright/VideoFrameScheduler.h>
36 #include <media/MediaCodecBuffer.h>
37 #include <utils/SystemClock.h>
38 
39 #include <inttypes.h>
40 
41 #include <android-base/stringprintf.h>
42 using ::android::base::StringPrintf;
43 
44 namespace android {
45 
46 /*
47  * Example of common configuration settings in shell script form
48 
49    #Turn offload audio off (use PCM for Play Music) -- AudioPolicyManager
50    adb shell setprop audio.offload.disable 1
51 
52    #Allow offload audio with video (requires offloading to be enabled) -- AudioPolicyManager
53    adb shell setprop audio.offload.video 1
54 
55    #Use audio callbacks for PCM data
56    adb shell setprop media.stagefright.audio.cbk 1
57 
58    #Use deep buffer for PCM data with video (it is generally enabled for audio-only)
59    adb shell setprop media.stagefright.audio.deep 1
60 
61    #Set size of buffers for pcm audio sink in msec (example: 1000 msec)
62    adb shell setprop media.stagefright.audio.sink 1000
63 
64  * These configurations take effect for the next track played (not the current track).
65  */
66 
getUseAudioCallbackSetting()67 static inline bool getUseAudioCallbackSetting() {
68     return property_get_bool("media.stagefright.audio.cbk", false /* default_value */);
69 }
70 
getAudioSinkPcmMsSetting()71 static inline int32_t getAudioSinkPcmMsSetting() {
72     return property_get_int32(
73             "media.stagefright.audio.sink", 500 /* default_value */);
74 }
75 
76 // Maximum time in paused state when offloading audio decompression. When elapsed, the AudioSink
77 // is closed to allow the audio DSP to power down.
78 static const int64_t kOffloadPauseMaxUs = 10000000LL;
79 
80 // Additional delay after teardown before releasing the wake lock to allow time for the audio path
81 // to be completely released
82 static const int64_t kWakelockReleaseDelayUs = 2000000LL;
83 
84 // Maximum allowed delay from AudioSink, 1.5 seconds.
85 static const int64_t kMaxAllowedAudioSinkDelayUs = 1500000LL;
86 
87 static const int64_t kMinimumAudioClockUpdatePeriodUs = 20 /* msec */ * 1000;
88 
89 // Default video frame display duration when only video exists.
90 // Used to set max media time in MediaClock.
91 static const int64_t kDefaultVideoFrameIntervalUs = 100000LL;
92 
93 // static
94 const NuPlayer::Renderer::PcmInfo NuPlayer::Renderer::AUDIO_PCMINFO_INITIALIZER = {
95         AUDIO_CHANNEL_NONE,
96         AUDIO_OUTPUT_FLAG_NONE,
97         AUDIO_FORMAT_INVALID,
98         0, // mNumChannels
99         0 // mSampleRate
100 };
101 
102 // static
103 const int64_t NuPlayer::Renderer::kMinPositionUpdateDelayUs = 100000ll;
104 
Renderer(const sp<MediaPlayerBase::AudioSink> & sink,const sp<MediaClock> & mediaClock,const sp<AMessage> & notify,uint32_t flags)105 NuPlayer::Renderer::Renderer(
106         const sp<MediaPlayerBase::AudioSink> &sink,
107         const sp<MediaClock> &mediaClock,
108         const sp<AMessage> &notify,
109         uint32_t flags)
110     : mAudioSink(sink),
111       mUseVirtualAudioSink(false),
112       mNotify(notify),
113       mFlags(flags),
114       mNumFramesWritten(0),
115       mDrainAudioQueuePending(false),
116       mDrainVideoQueuePending(false),
117       mAudioQueueGeneration(0),
118       mVideoQueueGeneration(0),
119       mAudioDrainGeneration(0),
120       mVideoDrainGeneration(0),
121       mAudioEOSGeneration(0),
122       mMediaClock(mediaClock),
123       mPlaybackSettings(AUDIO_PLAYBACK_RATE_DEFAULT),
124       mAudioFirstAnchorTimeMediaUs(-1),
125       mAudioAnchorTimeMediaUs(-1),
126       mAnchorTimeMediaUs(-1),
127       mAnchorNumFramesWritten(-1),
128       mVideoLateByUs(0LL),
129       mNextVideoTimeMediaUs(-1),
130       mHasAudio(false),
131       mHasVideo(false),
132       mNotifyCompleteAudio(false),
133       mNotifyCompleteVideo(false),
134       mSyncQueues(false),
135       mPaused(false),
136       mPauseDrainAudioAllowedUs(0),
137       mVideoSampleReceived(false),
138       mVideoRenderingStarted(false),
139       mVideoRenderingStartGeneration(0),
140       mAudioRenderingStartGeneration(0),
141       mRenderingDataDelivered(false),
142       mNextAudioClockUpdateTimeUs(-1),
143       mLastAudioMediaTimeUs(-1),
144       mAudioOffloadPauseTimeoutGeneration(0),
145       mAudioTornDown(false),
146       mCurrentOffloadInfo(AUDIO_INFO_INITIALIZER),
147       mCurrentPcmInfo(AUDIO_PCMINFO_INITIALIZER),
148       mTotalBuffersQueued(0),
149       mLastAudioBufferDrained(0),
150       mUseAudioCallback(false),
151       mWakeLock(new AWakeLock()),
152       mNeedVideoClearAnchor(false) {
153     CHECK(mediaClock != NULL);
154     mPlaybackRate = mPlaybackSettings.mSpeed;
155     mMediaClock->setPlaybackRate(mPlaybackRate);
156     (void)mSyncFlag.test_and_set();
157 }
158 
~Renderer()159 NuPlayer::Renderer::~Renderer() {
160     if (offloadingAudio()) {
161         mAudioSink->stop();
162         mAudioSink->flush();
163         mAudioSink->close();
164     }
165 
166     // Try to avoid racing condition in case callback is still on.
167     Mutex::Autolock autoLock(mLock);
168     if (mUseAudioCallback) {
169         flushQueue(&mAudioQueue);
170         flushQueue(&mVideoQueue);
171     }
172     mWakeLock.clear();
173     mVideoScheduler.clear();
174     mNotify.clear();
175     mAudioSink.clear();
176 }
177 
queueBuffer(bool audio,const sp<MediaCodecBuffer> & buffer,const sp<AMessage> & notifyConsumed)178 void NuPlayer::Renderer::queueBuffer(
179         bool audio,
180         const sp<MediaCodecBuffer> &buffer,
181         const sp<AMessage> &notifyConsumed) {
182     sp<AMessage> msg = new AMessage(kWhatQueueBuffer, this);
183     msg->setInt32("queueGeneration", getQueueGeneration(audio));
184     msg->setInt32("audio", static_cast<int32_t>(audio));
185     msg->setObject("buffer", buffer);
186     msg->setMessage("notifyConsumed", notifyConsumed);
187     msg->post();
188 }
189 
queueEOS(bool audio,status_t finalResult)190 void NuPlayer::Renderer::queueEOS(bool audio, status_t finalResult) {
191     CHECK_NE(finalResult, (status_t)OK);
192 
193     sp<AMessage> msg = new AMessage(kWhatQueueEOS, this);
194     msg->setInt32("queueGeneration", getQueueGeneration(audio));
195     msg->setInt32("audio", static_cast<int32_t>(audio));
196     msg->setInt32("finalResult", finalResult);
197     msg->post();
198 }
199 
setPlaybackSettings(const AudioPlaybackRate & rate)200 status_t NuPlayer::Renderer::setPlaybackSettings(const AudioPlaybackRate &rate) {
201     sp<AMessage> msg = new AMessage(kWhatConfigPlayback, this);
202     writeToAMessage(msg, rate);
203     sp<AMessage> response;
204     status_t err = msg->postAndAwaitResponse(&response);
205     if (err == OK && response != NULL) {
206         CHECK(response->findInt32("err", &err));
207     }
208     return err;
209 }
210 
onConfigPlayback(const AudioPlaybackRate & rate)211 status_t NuPlayer::Renderer::onConfigPlayback(const AudioPlaybackRate &rate /* sanitized */) {
212     if (rate.mSpeed == 0.f) {
213         onPause();
214         // don't call audiosink's setPlaybackRate if pausing, as pitch does not
215         // have to correspond to the any non-0 speed (e.g old speed). Keep
216         // settings nonetheless, using the old speed, in case audiosink changes.
217         AudioPlaybackRate newRate = rate;
218         newRate.mSpeed = mPlaybackSettings.mSpeed;
219         mPlaybackSettings = newRate;
220         return OK;
221     }
222 
223     if (mAudioSink != NULL && mAudioSink->ready()) {
224         status_t err = mAudioSink->setPlaybackRate(rate);
225         if (err != OK) {
226             return err;
227         }
228     }
229 
230     if (!mHasAudio && mHasVideo) {
231         mNeedVideoClearAnchor = true;
232     }
233     mPlaybackSettings = rate;
234     mPlaybackRate = rate.mSpeed;
235     mMediaClock->setPlaybackRate(mPlaybackRate);
236     return OK;
237 }
238 
getPlaybackSettings(AudioPlaybackRate * rate)239 status_t NuPlayer::Renderer::getPlaybackSettings(AudioPlaybackRate *rate /* nonnull */) {
240     sp<AMessage> msg = new AMessage(kWhatGetPlaybackSettings, this);
241     sp<AMessage> response;
242     status_t err = msg->postAndAwaitResponse(&response);
243     if (err == OK && response != NULL) {
244         CHECK(response->findInt32("err", &err));
245         if (err == OK) {
246             readFromAMessage(response, rate);
247         }
248     }
249     return err;
250 }
251 
onGetPlaybackSettings(AudioPlaybackRate * rate)252 status_t NuPlayer::Renderer::onGetPlaybackSettings(AudioPlaybackRate *rate /* nonnull */) {
253     if (mAudioSink != NULL && mAudioSink->ready()) {
254         status_t err = mAudioSink->getPlaybackRate(rate);
255         if (err == OK) {
256             if (!isAudioPlaybackRateEqual(*rate, mPlaybackSettings)) {
257                 ALOGW("correcting mismatch in internal/external playback rate");
258             }
259             // get playback settings used by audiosink, as it may be
260             // slightly off due to audiosink not taking small changes.
261             mPlaybackSettings = *rate;
262             if (mPaused) {
263                 rate->mSpeed = 0.f;
264             }
265         }
266         return err;
267     }
268     *rate = mPlaybackSettings;
269     return OK;
270 }
271 
setSyncSettings(const AVSyncSettings & sync,float videoFpsHint)272 status_t NuPlayer::Renderer::setSyncSettings(const AVSyncSettings &sync, float videoFpsHint) {
273     sp<AMessage> msg = new AMessage(kWhatConfigSync, this);
274     writeToAMessage(msg, sync, videoFpsHint);
275     sp<AMessage> response;
276     status_t err = msg->postAndAwaitResponse(&response);
277     if (err == OK && response != NULL) {
278         CHECK(response->findInt32("err", &err));
279     }
280     return err;
281 }
282 
onConfigSync(const AVSyncSettings & sync,float videoFpsHint __unused)283 status_t NuPlayer::Renderer::onConfigSync(const AVSyncSettings &sync, float videoFpsHint __unused) {
284     if (sync.mSource != AVSYNC_SOURCE_DEFAULT) {
285         return BAD_VALUE;
286     }
287     // TODO: support sync sources
288     return INVALID_OPERATION;
289 }
290 
getSyncSettings(AVSyncSettings * sync,float * videoFps)291 status_t NuPlayer::Renderer::getSyncSettings(AVSyncSettings *sync, float *videoFps) {
292     sp<AMessage> msg = new AMessage(kWhatGetSyncSettings, this);
293     sp<AMessage> response;
294     status_t err = msg->postAndAwaitResponse(&response);
295     if (err == OK && response != NULL) {
296         CHECK(response->findInt32("err", &err));
297         if (err == OK) {
298             readFromAMessage(response, sync, videoFps);
299         }
300     }
301     return err;
302 }
303 
onGetSyncSettings(AVSyncSettings * sync,float * videoFps)304 status_t NuPlayer::Renderer::onGetSyncSettings(
305         AVSyncSettings *sync /* nonnull */, float *videoFps /* nonnull */) {
306     *sync = mSyncSettings;
307     *videoFps = -1.f;
308     return OK;
309 }
310 
flush(bool audio,bool notifyComplete)311 void NuPlayer::Renderer::flush(bool audio, bool notifyComplete) {
312     {
313         Mutex::Autolock autoLock(mLock);
314         if (audio) {
315             mNotifyCompleteAudio |= notifyComplete;
316             clearAudioFirstAnchorTime_l();
317             ++mAudioQueueGeneration;
318             ++mAudioDrainGeneration;
319         } else {
320             mNotifyCompleteVideo |= notifyComplete;
321             ++mVideoQueueGeneration;
322             ++mVideoDrainGeneration;
323             mNextVideoTimeMediaUs = -1;
324         }
325 
326         mVideoLateByUs = 0;
327         mSyncQueues = false;
328     }
329 
330     // Wait until the current job in the message queue is done, to make sure
331     // buffer processing from the old generation is finished. After the current
332     // job is finished, access to buffers are protected by generation.
333     Mutex::Autolock syncLock(mSyncLock);
334     int64_t syncCount = mSyncCount;
335     mSyncFlag.clear();
336 
337     // Make sure message queue is not empty after mSyncFlag is cleared.
338     sp<AMessage> msg = new AMessage(kWhatFlush, this);
339     msg->setInt32("audio", static_cast<int32_t>(audio));
340     msg->post();
341 
342     int64_t uptimeMs = uptimeMillis();
343     while (mSyncCount == syncCount) {
344         (void)mSyncCondition.waitRelative(mSyncLock, ms2ns(1000));
345         if (uptimeMillis() - uptimeMs > 1000) {
346             ALOGW("flush(): no wake-up from sync point for 1s; stop waiting to "
347                   "prevent being stuck indefinitely.");
348             break;
349         }
350     }
351 }
352 
signalTimeDiscontinuity()353 void NuPlayer::Renderer::signalTimeDiscontinuity() {
354 }
355 
signalDisableOffloadAudio()356 void NuPlayer::Renderer::signalDisableOffloadAudio() {
357     (new AMessage(kWhatDisableOffloadAudio, this))->post();
358 }
359 
signalEnableOffloadAudio()360 void NuPlayer::Renderer::signalEnableOffloadAudio() {
361     (new AMessage(kWhatEnableOffloadAudio, this))->post();
362 }
363 
pause()364 void NuPlayer::Renderer::pause() {
365     (new AMessage(kWhatPause, this))->post();
366 }
367 
resume()368 void NuPlayer::Renderer::resume() {
369     (new AMessage(kWhatResume, this))->post();
370 }
371 
setVideoFrameRate(float fps)372 void NuPlayer::Renderer::setVideoFrameRate(float fps) {
373     sp<AMessage> msg = new AMessage(kWhatSetVideoFrameRate, this);
374     msg->setFloat("frame-rate", fps);
375     msg->post();
376 }
377 
378 // Called on any threads without mLock acquired.
getCurrentPosition(int64_t * mediaUs)379 status_t NuPlayer::Renderer::getCurrentPosition(int64_t *mediaUs) {
380     status_t result = mMediaClock->getMediaTime(ALooper::GetNowUs(), mediaUs);
381     if (result == OK) {
382         return result;
383     }
384 
385     // MediaClock has not started yet. Try to start it if possible.
386     {
387         Mutex::Autolock autoLock(mLock);
388         if (mAudioFirstAnchorTimeMediaUs == -1) {
389             return result;
390         }
391 
392         AudioTimestamp ts;
393         status_t res = mAudioSink->getTimestamp(ts);
394         if (res != OK) {
395             return result;
396         }
397 
398         // AudioSink has rendered some frames.
399         int64_t nowUs = ALooper::GetNowUs();
400         int64_t playedOutDurationUs = mAudioSink->getPlayedOutDurationUs(nowUs);
401         if (playedOutDurationUs == 0) {
402             *mediaUs = mAudioFirstAnchorTimeMediaUs;
403             return OK;
404         }
405         int64_t nowMediaUs = playedOutDurationUs + mAudioFirstAnchorTimeMediaUs;
406         mMediaClock->updateAnchor(nowMediaUs, nowUs, -1);
407     }
408 
409     return mMediaClock->getMediaTime(ALooper::GetNowUs(), mediaUs);
410 }
411 
clearAudioFirstAnchorTime_l()412 void NuPlayer::Renderer::clearAudioFirstAnchorTime_l() {
413     mAudioFirstAnchorTimeMediaUs = -1;
414     mMediaClock->setStartingTimeMedia(-1);
415 }
416 
setAudioFirstAnchorTimeIfNeeded_l(int64_t mediaUs)417 void NuPlayer::Renderer::setAudioFirstAnchorTimeIfNeeded_l(int64_t mediaUs) {
418     if (mAudioFirstAnchorTimeMediaUs == -1) {
419         mAudioFirstAnchorTimeMediaUs = mediaUs;
420         mMediaClock->setStartingTimeMedia(mediaUs);
421     }
422 }
423 
424 // Called on renderer looper.
clearAnchorTime()425 void NuPlayer::Renderer::clearAnchorTime() {
426     mMediaClock->clearAnchor();
427     mAudioAnchorTimeMediaUs = -1;
428     mAnchorTimeMediaUs = -1;
429     mAnchorNumFramesWritten = -1;
430 }
431 
setVideoLateByUs(int64_t lateUs)432 void NuPlayer::Renderer::setVideoLateByUs(int64_t lateUs) {
433     Mutex::Autolock autoLock(mLock);
434     mVideoLateByUs = lateUs;
435 }
436 
getVideoLateByUs()437 int64_t NuPlayer::Renderer::getVideoLateByUs() {
438     Mutex::Autolock autoLock(mLock);
439     return mVideoLateByUs;
440 }
441 
openAudioSink(const sp<AMessage> & format,bool offloadOnly,bool hasVideo,uint32_t flags,bool * isOffloaded,bool isStreaming)442 status_t NuPlayer::Renderer::openAudioSink(
443         const sp<AMessage> &format,
444         bool offloadOnly,
445         bool hasVideo,
446         uint32_t flags,
447         bool *isOffloaded,
448         bool isStreaming) {
449     sp<AMessage> msg = new AMessage(kWhatOpenAudioSink, this);
450     msg->setMessage("format", format);
451     msg->setInt32("offload-only", offloadOnly);
452     msg->setInt32("has-video", hasVideo);
453     msg->setInt32("flags", flags);
454     msg->setInt32("isStreaming", isStreaming);
455 
456     sp<AMessage> response;
457     status_t postStatus = msg->postAndAwaitResponse(&response);
458 
459     int32_t err;
460     if (postStatus != OK || response.get() == nullptr || !response->findInt32("err", &err)) {
461         err = INVALID_OPERATION;
462     } else if (err == OK && isOffloaded != NULL) {
463         int32_t offload;
464         CHECK(response->findInt32("offload", &offload));
465         *isOffloaded = (offload != 0);
466     }
467     return err;
468 }
469 
closeAudioSink()470 void NuPlayer::Renderer::closeAudioSink() {
471     sp<AMessage> msg = new AMessage(kWhatCloseAudioSink, this);
472 
473     sp<AMessage> response;
474     msg->postAndAwaitResponse(&response);
475 }
476 
dump(AString & logString)477 void NuPlayer::Renderer::dump(AString& logString) {
478     Mutex::Autolock autoLock(mLock);
479     logString.append("paused(");
480     logString.append(mPaused);
481     logString.append("), offloading(");
482     logString.append(offloadingAudio());
483     logString.append("), wakelock(acquired=");
484     mWakelockAcquireEvent.dump(logString);
485     logString.append(", timeout=");
486     mWakelockTimeoutEvent.dump(logString);
487     logString.append(", release=");
488     mWakelockReleaseEvent.dump(logString);
489     logString.append(", cancel=");
490     mWakelockCancelEvent.dump(logString);
491     logString.append(")");
492 }
493 
changeAudioFormat(const sp<AMessage> & format,bool offloadOnly,bool hasVideo,uint32_t flags,bool isStreaming,const sp<AMessage> & notify)494 void NuPlayer::Renderer::changeAudioFormat(
495         const sp<AMessage> &format,
496         bool offloadOnly,
497         bool hasVideo,
498         uint32_t flags,
499         bool isStreaming,
500         const sp<AMessage> &notify) {
501     sp<AMessage> meta = new AMessage;
502     meta->setMessage("format", format);
503     meta->setInt32("offload-only", offloadOnly);
504     meta->setInt32("has-video", hasVideo);
505     meta->setInt32("flags", flags);
506     meta->setInt32("isStreaming", isStreaming);
507 
508     sp<AMessage> msg = new AMessage(kWhatChangeAudioFormat, this);
509     msg->setInt32("queueGeneration", getQueueGeneration(true /* audio */));
510     msg->setMessage("notify", notify);
511     msg->setMessage("meta", meta);
512     msg->post();
513 }
514 
onMessageReceived(const sp<AMessage> & msg)515 void NuPlayer::Renderer::onMessageReceived(const sp<AMessage> &msg) {
516     switch (msg->what()) {
517         case kWhatOpenAudioSink:
518         {
519             sp<AMessage> format;
520             CHECK(msg->findMessage("format", &format));
521 
522             int32_t offloadOnly;
523             CHECK(msg->findInt32("offload-only", &offloadOnly));
524 
525             int32_t hasVideo;
526             CHECK(msg->findInt32("has-video", &hasVideo));
527 
528             uint32_t flags;
529             CHECK(msg->findInt32("flags", (int32_t *)&flags));
530 
531             uint32_t isStreaming;
532             CHECK(msg->findInt32("isStreaming", (int32_t *)&isStreaming));
533 
534             status_t err = onOpenAudioSink(format, offloadOnly, hasVideo, flags, isStreaming);
535 
536             sp<AMessage> response = new AMessage;
537             response->setInt32("err", err);
538             response->setInt32("offload", offloadingAudio());
539 
540             sp<AReplyToken> replyID;
541             CHECK(msg->senderAwaitsResponse(&replyID));
542             response->postReply(replyID);
543 
544             break;
545         }
546 
547         case kWhatCloseAudioSink:
548         {
549             sp<AReplyToken> replyID;
550             CHECK(msg->senderAwaitsResponse(&replyID));
551 
552             onCloseAudioSink();
553 
554             sp<AMessage> response = new AMessage;
555             response->postReply(replyID);
556             break;
557         }
558 
559         case kWhatStopAudioSink:
560         {
561             mAudioSink->stop();
562             break;
563         }
564 
565         case kWhatChangeAudioFormat:
566         {
567             int32_t queueGeneration;
568             CHECK(msg->findInt32("queueGeneration", &queueGeneration));
569 
570             sp<AMessage> notify;
571             CHECK(msg->findMessage("notify", &notify));
572 
573             if (offloadingAudio()) {
574                 ALOGW("changeAudioFormat should NOT be called in offload mode");
575                 notify->setInt32("err", INVALID_OPERATION);
576                 notify->post();
577                 break;
578             }
579 
580             sp<AMessage> meta;
581             CHECK(msg->findMessage("meta", &meta));
582 
583             if (queueGeneration != getQueueGeneration(true /* audio */)
584                     || mAudioQueue.empty()) {
585                 onChangeAudioFormat(meta, notify);
586                 break;
587             }
588 
589             QueueEntry entry;
590             entry.mNotifyConsumed = notify;
591             entry.mMeta = meta;
592 
593             Mutex::Autolock autoLock(mLock);
594             mAudioQueue.push_back(entry);
595             postDrainAudioQueue_l();
596 
597             break;
598         }
599 
600         case kWhatDrainAudioQueue:
601         {
602             mDrainAudioQueuePending = false;
603 
604             int32_t generation;
605             CHECK(msg->findInt32("drainGeneration", &generation));
606             if (generation != getDrainGeneration(true /* audio */)) {
607                 break;
608             }
609 
610             if (onDrainAudioQueue()) {
611                 uint32_t numFramesPlayed;
612                 CHECK_EQ(mAudioSink->getPosition(&numFramesPlayed),
613                          (status_t)OK);
614 
615                 // Handle AudioTrack race when start is immediately called after flush.
616                 uint32_t numFramesPendingPlayout =
617                     (mNumFramesWritten > numFramesPlayed ?
618                         mNumFramesWritten - numFramesPlayed : 0);
619 
620                 // This is how long the audio sink will have data to
621                 // play back.
622                 int64_t delayUs =
623                     mAudioSink->msecsPerFrame()
624                         * numFramesPendingPlayout * 1000LL;
625                 if (mPlaybackRate > 1.0f) {
626                     delayUs /= mPlaybackRate;
627                 }
628 
629                 // Let's give it more data after about half that time
630                 // has elapsed.
631                 delayUs /= 2;
632                 // check the buffer size to estimate maximum delay permitted.
633                 const int64_t maxDrainDelayUs = std::max(
634                         mAudioSink->getBufferDurationInUs(), (int64_t)500000 /* half second */);
635                 ALOGD_IF(delayUs > maxDrainDelayUs, "postDrainAudioQueue long delay: %lld > %lld",
636                         (long long)delayUs, (long long)maxDrainDelayUs);
637                 Mutex::Autolock autoLock(mLock);
638                 postDrainAudioQueue_l(delayUs);
639             }
640             break;
641         }
642 
643         case kWhatDrainVideoQueue:
644         {
645             int32_t generation;
646             CHECK(msg->findInt32("drainGeneration", &generation));
647             if (generation != getDrainGeneration(false /* audio */)) {
648                 break;
649             }
650 
651             mDrainVideoQueuePending = false;
652 
653             onDrainVideoQueue();
654 
655             postDrainVideoQueue();
656             break;
657         }
658 
659         case kWhatPostDrainVideoQueue:
660         {
661             int32_t generation;
662             CHECK(msg->findInt32("drainGeneration", &generation));
663             if (generation != getDrainGeneration(false /* audio */)) {
664                 break;
665             }
666 
667             mDrainVideoQueuePending = false;
668             postDrainVideoQueue();
669             break;
670         }
671 
672         case kWhatQueueBuffer:
673         {
674             onQueueBuffer(msg);
675             break;
676         }
677 
678         case kWhatQueueEOS:
679         {
680             onQueueEOS(msg);
681             break;
682         }
683 
684         case kWhatEOS:
685         {
686             int32_t generation;
687             CHECK(msg->findInt32("audioEOSGeneration", &generation));
688             if (generation != mAudioEOSGeneration) {
689                 break;
690             }
691             status_t finalResult;
692             CHECK(msg->findInt32("finalResult", &finalResult));
693             notifyEOS(true /* audio */, finalResult);
694             break;
695         }
696 
697         case kWhatConfigPlayback:
698         {
699             sp<AReplyToken> replyID;
700             CHECK(msg->senderAwaitsResponse(&replyID));
701             AudioPlaybackRate rate;
702             readFromAMessage(msg, &rate);
703             status_t err = onConfigPlayback(rate);
704             sp<AMessage> response = new AMessage;
705             response->setInt32("err", err);
706             response->postReply(replyID);
707             break;
708         }
709 
710         case kWhatGetPlaybackSettings:
711         {
712             sp<AReplyToken> replyID;
713             CHECK(msg->senderAwaitsResponse(&replyID));
714             AudioPlaybackRate rate = AUDIO_PLAYBACK_RATE_DEFAULT;
715             status_t err = onGetPlaybackSettings(&rate);
716             sp<AMessage> response = new AMessage;
717             if (err == OK) {
718                 writeToAMessage(response, rate);
719             }
720             response->setInt32("err", err);
721             response->postReply(replyID);
722             break;
723         }
724 
725         case kWhatConfigSync:
726         {
727             sp<AReplyToken> replyID;
728             CHECK(msg->senderAwaitsResponse(&replyID));
729             AVSyncSettings sync;
730             float videoFpsHint;
731             readFromAMessage(msg, &sync, &videoFpsHint);
732             status_t err = onConfigSync(sync, videoFpsHint);
733             sp<AMessage> response = new AMessage;
734             response->setInt32("err", err);
735             response->postReply(replyID);
736             break;
737         }
738 
739         case kWhatGetSyncSettings:
740         {
741             sp<AReplyToken> replyID;
742             CHECK(msg->senderAwaitsResponse(&replyID));
743 
744             ALOGV("kWhatGetSyncSettings");
745             AVSyncSettings sync;
746             float videoFps = -1.f;
747             status_t err = onGetSyncSettings(&sync, &videoFps);
748             sp<AMessage> response = new AMessage;
749             if (err == OK) {
750                 writeToAMessage(response, sync, videoFps);
751             }
752             response->setInt32("err", err);
753             response->postReply(replyID);
754             break;
755         }
756 
757         case kWhatFlush:
758         {
759             onFlush(msg);
760             break;
761         }
762 
763         case kWhatDisableOffloadAudio:
764         {
765             onDisableOffloadAudio();
766             break;
767         }
768 
769         case kWhatEnableOffloadAudio:
770         {
771             onEnableOffloadAudio();
772             break;
773         }
774 
775         case kWhatPause:
776         {
777             onPause();
778             break;
779         }
780 
781         case kWhatResume:
782         {
783             onResume();
784             break;
785         }
786 
787         case kWhatSetVideoFrameRate:
788         {
789             float fps;
790             CHECK(msg->findFloat("frame-rate", &fps));
791             onSetVideoFrameRate(fps);
792             break;
793         }
794 
795         case kWhatAudioTearDown:
796         {
797             int32_t reason;
798             CHECK(msg->findInt32("reason", &reason));
799 
800             onAudioTearDown((AudioTearDownReason)reason);
801             break;
802         }
803 
804         case kWhatAudioOffloadPauseTimeout:
805         {
806             int32_t generation;
807             CHECK(msg->findInt32("drainGeneration", &generation));
808             mWakelockTimeoutEvent.updateValues(
809                     uptimeMillis(),
810                     generation,
811                     mAudioOffloadPauseTimeoutGeneration);
812             if (generation != mAudioOffloadPauseTimeoutGeneration) {
813                 break;
814             }
815             ALOGV("Audio Offload tear down due to pause timeout.");
816             onAudioTearDown(kDueToTimeout);
817             sp<AMessage> newMsg = new AMessage(kWhatReleaseWakeLock, this);
818             newMsg->setInt32("drainGeneration", generation);
819             newMsg->post(kWakelockReleaseDelayUs);
820             break;
821         }
822 
823         case kWhatReleaseWakeLock:
824         {
825             int32_t generation;
826             CHECK(msg->findInt32("drainGeneration", &generation));
827             mWakelockReleaseEvent.updateValues(
828                 uptimeMillis(),
829                 generation,
830                 mAudioOffloadPauseTimeoutGeneration);
831             if (generation != mAudioOffloadPauseTimeoutGeneration) {
832                 break;
833             }
834             ALOGV("releasing audio offload pause wakelock.");
835             mWakeLock->release();
836             break;
837         }
838 
839         default:
840             TRESPASS();
841             break;
842     }
843     if (!mSyncFlag.test_and_set()) {
844         Mutex::Autolock syncLock(mSyncLock);
845         ++mSyncCount;
846         mSyncCondition.broadcast();
847     }
848 }
849 
postDrainAudioQueue_l(int64_t delayUs)850 void NuPlayer::Renderer::postDrainAudioQueue_l(int64_t delayUs) {
851     if (mDrainAudioQueuePending || mSyncQueues || mUseAudioCallback) {
852         return;
853     }
854 
855     if (mAudioQueue.empty()) {
856         return;
857     }
858 
859     // FIXME: if paused, wait until AudioTrack stop() is complete before delivering data.
860     if (mPaused) {
861         const int64_t diffUs = mPauseDrainAudioAllowedUs - ALooper::GetNowUs();
862         if (diffUs > delayUs) {
863             delayUs = diffUs;
864         }
865     }
866 
867     mDrainAudioQueuePending = true;
868     sp<AMessage> msg = new AMessage(kWhatDrainAudioQueue, this);
869     msg->setInt32("drainGeneration", mAudioDrainGeneration);
870     msg->post(delayUs);
871 }
872 
prepareForMediaRenderingStart_l()873 void NuPlayer::Renderer::prepareForMediaRenderingStart_l() {
874     mAudioRenderingStartGeneration = mAudioDrainGeneration;
875     mVideoRenderingStartGeneration = mVideoDrainGeneration;
876     mRenderingDataDelivered = false;
877 }
878 
notifyIfMediaRenderingStarted_l()879 void NuPlayer::Renderer::notifyIfMediaRenderingStarted_l() {
880     if (mVideoRenderingStartGeneration == mVideoDrainGeneration &&
881         mAudioRenderingStartGeneration == mAudioDrainGeneration) {
882         mRenderingDataDelivered = true;
883         if (mPaused) {
884             return;
885         }
886         mVideoRenderingStartGeneration = -1;
887         mAudioRenderingStartGeneration = -1;
888 
889         sp<AMessage> notify = mNotify->dup();
890         notify->setInt32("what", kWhatMediaRenderingStart);
891         notify->post();
892     }
893 }
894 
895 // static
AudioSinkCallback(const sp<MediaPlayerBase::AudioSink> &,void * buffer,size_t size,const wp<RefBase> & cookie,MediaPlayerBase::AudioSink::cb_event_t event)896 size_t NuPlayer::Renderer::AudioSinkCallback(
897         const sp<MediaPlayerBase::AudioSink>& /* audioSink */,
898         void *buffer,
899         size_t size,
900         const wp<RefBase>& cookie,
901         MediaPlayerBase::AudioSink::cb_event_t event) {
902     if (cookie == nullptr) return 0;
903     const auto ref = cookie.promote();
904     if (!ref) return 0;
905     const auto me = static_cast<NuPlayer::Renderer*>(ref.get()); // we already hold a sp.
906 
907     switch (event) {
908         case MediaPlayerBase::AudioSink::CB_EVENT_FILL_BUFFER:
909         {
910             return me->fillAudioBuffer(buffer, size);
911             break;
912         }
913 
914         case MediaPlayerBase::AudioSink::CB_EVENT_STREAM_END:
915         {
916             ALOGV("AudioSink::CB_EVENT_STREAM_END");
917             me->notifyEOSCallback();
918             break;
919         }
920 
921         case MediaPlayerBase::AudioSink::CB_EVENT_TEAR_DOWN:
922         {
923             ALOGV("AudioSink::CB_EVENT_TEAR_DOWN");
924             me->notifyAudioTearDown(kDueToError);
925             break;
926         }
927     }
928 
929     return 0;
930 }
931 
notifyEOSCallback()932 void NuPlayer::Renderer::notifyEOSCallback() {
933     Mutex::Autolock autoLock(mLock);
934 
935     if (!mUseAudioCallback) {
936         return;
937     }
938 
939     notifyEOS_l(true /* audio */, ERROR_END_OF_STREAM);
940 }
941 
fillAudioBuffer(void * buffer,size_t size)942 size_t NuPlayer::Renderer::fillAudioBuffer(void *buffer, size_t size) {
943     Mutex::Autolock autoLock(mLock);
944 
945     if (!mUseAudioCallback) {
946         return 0;
947     }
948 
949     bool hasEOS = false;
950 
951     size_t sizeCopied = 0;
952     bool firstEntry = true;
953     QueueEntry *entry;  // will be valid after while loop if hasEOS is set.
954     while (sizeCopied < size && !mAudioQueue.empty()) {
955         entry = &*mAudioQueue.begin();
956 
957         if (entry->mBuffer == NULL) { // EOS
958             hasEOS = true;
959             mAudioQueue.erase(mAudioQueue.begin());
960             break;
961         }
962 
963         if (firstEntry && entry->mOffset == 0) {
964             firstEntry = false;
965             int64_t mediaTimeUs;
966             CHECK(entry->mBuffer->meta()->findInt64("timeUs", &mediaTimeUs));
967             if (mediaTimeUs < 0) {
968                 ALOGD("fillAudioBuffer: reset negative media time %.2f secs to zero",
969                        mediaTimeUs / 1E6);
970                 mediaTimeUs = 0;
971             }
972             ALOGV("fillAudioBuffer: rendering audio at media time %.2f secs", mediaTimeUs / 1E6);
973             setAudioFirstAnchorTimeIfNeeded_l(mediaTimeUs);
974         }
975 
976         size_t copy = entry->mBuffer->size() - entry->mOffset;
977         size_t sizeRemaining = size - sizeCopied;
978         if (copy > sizeRemaining) {
979             copy = sizeRemaining;
980         }
981 
982         memcpy((char *)buffer + sizeCopied,
983                entry->mBuffer->data() + entry->mOffset,
984                copy);
985 
986         entry->mOffset += copy;
987         if (entry->mOffset == entry->mBuffer->size()) {
988             entry->mNotifyConsumed->post();
989             mAudioQueue.erase(mAudioQueue.begin());
990             entry = NULL;
991         }
992         sizeCopied += copy;
993 
994         notifyIfMediaRenderingStarted_l();
995     }
996 
997     if (mAudioFirstAnchorTimeMediaUs >= 0) {
998         int64_t nowUs = ALooper::GetNowUs();
999         int64_t nowMediaUs =
1000             mAudioFirstAnchorTimeMediaUs + mAudioSink->getPlayedOutDurationUs(nowUs);
1001         // we don't know how much data we are queueing for offloaded tracks.
1002         mMediaClock->updateAnchor(nowMediaUs, nowUs, INT64_MAX);
1003     }
1004 
1005     // for non-offloaded audio, we need to compute the frames written because
1006     // there is no EVENT_STREAM_END notification. The frames written gives
1007     // an estimate on the pending played out duration.
1008     if (!offloadingAudio()) {
1009         mNumFramesWritten += sizeCopied / mAudioSink->frameSize();
1010     }
1011 
1012     if (hasEOS) {
1013         (new AMessage(kWhatStopAudioSink, this))->post();
1014         // As there is currently no EVENT_STREAM_END callback notification for
1015         // non-offloaded audio tracks, we need to post the EOS ourselves.
1016         if (!offloadingAudio()) {
1017             int64_t postEOSDelayUs = 0;
1018             if (mAudioSink->needsTrailingPadding()) {
1019                 postEOSDelayUs = getPendingAudioPlayoutDurationUs(ALooper::GetNowUs());
1020             }
1021             ALOGV("fillAudioBuffer: notifyEOS_l "
1022                     "mNumFramesWritten:%u  finalResult:%d  postEOSDelay:%lld",
1023                     mNumFramesWritten, entry->mFinalResult, (long long)postEOSDelayUs);
1024             notifyEOS_l(true /* audio */, entry->mFinalResult, postEOSDelayUs);
1025         }
1026     }
1027     return sizeCopied;
1028 }
1029 
drainAudioQueueUntilLastEOS()1030 void NuPlayer::Renderer::drainAudioQueueUntilLastEOS() {
1031     List<QueueEntry>::iterator it = mAudioQueue.begin(), itEOS = it;
1032     bool foundEOS = false;
1033     while (it != mAudioQueue.end()) {
1034         int32_t eos;
1035         QueueEntry *entry = &*it++;
1036         if ((entry->mBuffer == nullptr && entry->mNotifyConsumed == nullptr)
1037                 || (entry->mNotifyConsumed->findInt32("eos", &eos) && eos != 0)) {
1038             itEOS = it;
1039             foundEOS = true;
1040         }
1041     }
1042 
1043     if (foundEOS) {
1044         // post all replies before EOS and drop the samples
1045         for (it = mAudioQueue.begin(); it != itEOS; it++) {
1046             if (it->mBuffer == nullptr) {
1047                 if (it->mNotifyConsumed == nullptr) {
1048                     // delay doesn't matter as we don't even have an AudioTrack
1049                     notifyEOS(true /* audio */, it->mFinalResult);
1050                 } else {
1051                     // TAG for re-opening audio sink.
1052                     onChangeAudioFormat(it->mMeta, it->mNotifyConsumed);
1053                 }
1054             } else {
1055                 it->mNotifyConsumed->post();
1056             }
1057         }
1058         mAudioQueue.erase(mAudioQueue.begin(), itEOS);
1059     }
1060 }
1061 
onDrainAudioQueue()1062 bool NuPlayer::Renderer::onDrainAudioQueue() {
1063     // do not drain audio during teardown as queued buffers may be invalid.
1064     if (mAudioTornDown) {
1065         return false;
1066     }
1067     // TODO: This call to getPosition checks if AudioTrack has been created
1068     // in AudioSink before draining audio. If AudioTrack doesn't exist, then
1069     // CHECKs on getPosition will fail.
1070     // We still need to figure out why AudioTrack is not created when
1071     // this function is called. One possible reason could be leftover
1072     // audio. Another possible place is to check whether decoder
1073     // has received INFO_FORMAT_CHANGED as the first buffer since
1074     // AudioSink is opened there, and possible interactions with flush
1075     // immediately after start. Investigate error message
1076     // "vorbis_dsp_synthesis returned -135", along with RTSP.
1077     uint32_t numFramesPlayed;
1078     if (mAudioSink->getPosition(&numFramesPlayed) != OK) {
1079         // When getPosition fails, renderer will not reschedule the draining
1080         // unless new samples are queued.
1081         // If we have pending EOS (or "eos" marker for discontinuities), we need
1082         // to post these now as NuPlayerDecoder might be waiting for it.
1083         drainAudioQueueUntilLastEOS();
1084 
1085         ALOGW("onDrainAudioQueue(): audio sink is not ready");
1086         return false;
1087     }
1088 
1089 #if 0
1090     ssize_t numFramesAvailableToWrite =
1091         mAudioSink->frameCount() - (mNumFramesWritten - numFramesPlayed);
1092 
1093     if (numFramesAvailableToWrite == mAudioSink->frameCount()) {
1094         ALOGI("audio sink underrun");
1095     } else {
1096         ALOGV("audio queue has %d frames left to play",
1097              mAudioSink->frameCount() - numFramesAvailableToWrite);
1098     }
1099 #endif
1100 
1101     uint32_t prevFramesWritten = mNumFramesWritten;
1102     while (!mAudioQueue.empty()) {
1103         QueueEntry *entry = &*mAudioQueue.begin();
1104 
1105         if (entry->mBuffer == NULL) {
1106             if (entry->mNotifyConsumed != nullptr) {
1107                 // TAG for re-open audio sink.
1108                 onChangeAudioFormat(entry->mMeta, entry->mNotifyConsumed);
1109                 mAudioQueue.erase(mAudioQueue.begin());
1110                 continue;
1111             }
1112 
1113             // EOS
1114             if (mPaused) {
1115                 // Do not notify EOS when paused.
1116                 // This is needed to avoid switch to next clip while in pause.
1117                 ALOGV("onDrainAudioQueue(): Do not notify EOS when paused");
1118                 return false;
1119             }
1120 
1121             int64_t postEOSDelayUs = 0;
1122             if (mAudioSink->needsTrailingPadding()) {
1123                 postEOSDelayUs = getPendingAudioPlayoutDurationUs(ALooper::GetNowUs());
1124             }
1125             notifyEOS(true /* audio */, entry->mFinalResult, postEOSDelayUs);
1126             mLastAudioMediaTimeUs = getDurationUsIfPlayedAtSampleRate(mNumFramesWritten);
1127 
1128             mAudioQueue.erase(mAudioQueue.begin());
1129             entry = NULL;
1130             if (mAudioSink->needsTrailingPadding()) {
1131                 // If we're not in gapless playback (i.e. through setNextPlayer), we
1132                 // need to stop the track here, because that will play out the last
1133                 // little bit at the end of the file. Otherwise short files won't play.
1134                 mAudioSink->stop();
1135                 mNumFramesWritten = 0;
1136             }
1137             return false;
1138         }
1139 
1140         mLastAudioBufferDrained = entry->mBufferOrdinal;
1141 
1142         // ignore 0-sized buffer which could be EOS marker with no data
1143         if (entry->mOffset == 0 && entry->mBuffer->size() > 0) {
1144             int64_t mediaTimeUs;
1145             CHECK(entry->mBuffer->meta()->findInt64("timeUs", &mediaTimeUs));
1146             ALOGV("onDrainAudioQueue: rendering audio at media time %.2f secs",
1147                     mediaTimeUs / 1E6);
1148             onNewAudioMediaTime(mediaTimeUs);
1149         }
1150 
1151         size_t copy = entry->mBuffer->size() - entry->mOffset;
1152 
1153         ssize_t written = mAudioSink->write(entry->mBuffer->data() + entry->mOffset,
1154                                             copy, false /* blocking */);
1155         if (written < 0) {
1156             // An error in AudioSink write. Perhaps the AudioSink was not properly opened.
1157             if (written == WOULD_BLOCK) {
1158                 ALOGV("AudioSink write would block when writing %zu bytes", copy);
1159             } else {
1160                 ALOGE("AudioSink write error(%zd) when writing %zu bytes", written, copy);
1161                 // This can only happen when AudioSink was opened with doNotReconnect flag set to
1162                 // true, in which case the NuPlayer will handle the reconnect.
1163                 notifyAudioTearDown(kDueToError);
1164             }
1165             break;
1166         }
1167 
1168         entry->mOffset += written;
1169         size_t remainder = entry->mBuffer->size() - entry->mOffset;
1170         if ((ssize_t)remainder < mAudioSink->frameSize()) {
1171             if (remainder > 0) {
1172                 ALOGW("Corrupted audio buffer has fractional frames, discarding %zu bytes.",
1173                         remainder);
1174                 entry->mOffset += remainder;
1175                 copy -= remainder;
1176             }
1177 
1178             entry->mNotifyConsumed->post();
1179             mAudioQueue.erase(mAudioQueue.begin());
1180 
1181             entry = NULL;
1182         }
1183 
1184         size_t copiedFrames = written / mAudioSink->frameSize();
1185         mNumFramesWritten += copiedFrames;
1186 
1187         {
1188             Mutex::Autolock autoLock(mLock);
1189             int64_t maxTimeMedia;
1190             maxTimeMedia =
1191                 mAnchorTimeMediaUs +
1192                         (int64_t)(max((long long)mNumFramesWritten - mAnchorNumFramesWritten, 0LL)
1193                                 * 1000LL * mAudioSink->msecsPerFrame());
1194             mMediaClock->updateMaxTimeMedia(maxTimeMedia);
1195 
1196             notifyIfMediaRenderingStarted_l();
1197         }
1198 
1199         if (written != (ssize_t)copy) {
1200             // A short count was received from AudioSink::write()
1201             //
1202             // AudioSink write is called in non-blocking mode.
1203             // It may return with a short count when:
1204             //
1205             // 1) Size to be copied is not a multiple of the frame size. Fractional frames are
1206             //    discarded.
1207             // 2) The data to be copied exceeds the available buffer in AudioSink.
1208             // 3) An error occurs and data has been partially copied to the buffer in AudioSink.
1209             // 4) AudioSink is an AudioCache for data retrieval, and the AudioCache is exceeded.
1210 
1211             // (Case 1)
1212             // Must be a multiple of the frame size.  If it is not a multiple of a frame size, it
1213             // needs to fail, as we should not carry over fractional frames between calls.
1214             CHECK_EQ(copy % mAudioSink->frameSize(), 0u);
1215 
1216             // (Case 2, 3, 4)
1217             // Return early to the caller.
1218             // Beware of calling immediately again as this may busy-loop if you are not careful.
1219             ALOGV("AudioSink write short frame count %zd < %zu", written, copy);
1220             break;
1221         }
1222     }
1223 
1224     // calculate whether we need to reschedule another write.
1225     bool reschedule = !mAudioQueue.empty()
1226             && (!mPaused
1227                 || prevFramesWritten != mNumFramesWritten); // permit pause to fill buffers
1228     //ALOGD("reschedule:%d  empty:%d  mPaused:%d  prevFramesWritten:%u  mNumFramesWritten:%u",
1229     //        reschedule, mAudioQueue.empty(), mPaused, prevFramesWritten, mNumFramesWritten);
1230     return reschedule;
1231 }
1232 
getDurationUsIfPlayedAtSampleRate(uint32_t numFrames)1233 int64_t NuPlayer::Renderer::getDurationUsIfPlayedAtSampleRate(uint32_t numFrames) {
1234     int32_t sampleRate = offloadingAudio() ?
1235             mCurrentOffloadInfo.sample_rate : mCurrentPcmInfo.mSampleRate;
1236     if (sampleRate == 0) {
1237         ALOGE("sampleRate is 0 in %s mode", offloadingAudio() ? "offload" : "non-offload");
1238         return 0;
1239     }
1240 
1241     return (int64_t)(numFrames * 1000000LL / sampleRate);
1242 }
1243 
1244 // Calculate duration of pending samples if played at normal rate (i.e., 1.0).
getPendingAudioPlayoutDurationUs(int64_t nowUs)1245 int64_t NuPlayer::Renderer::getPendingAudioPlayoutDurationUs(int64_t nowUs) {
1246     int64_t writtenAudioDurationUs = getDurationUsIfPlayedAtSampleRate(mNumFramesWritten);
1247     if (mUseVirtualAudioSink) {
1248         int64_t nowUs = ALooper::GetNowUs();
1249         int64_t mediaUs;
1250         if (mMediaClock->getMediaTime(nowUs, &mediaUs) != OK) {
1251             return 0LL;
1252         } else {
1253             return writtenAudioDurationUs - (mediaUs - mAudioFirstAnchorTimeMediaUs);
1254         }
1255     }
1256 
1257     const int64_t audioSinkPlayedUs = mAudioSink->getPlayedOutDurationUs(nowUs);
1258     int64_t pendingUs = writtenAudioDurationUs - audioSinkPlayedUs;
1259     if (pendingUs < 0) {
1260         // This shouldn't happen unless the timestamp is stale.
1261         ALOGW("%s: pendingUs %lld < 0, clamping to zero, potential resume after pause "
1262                 "writtenAudioDurationUs: %lld, audioSinkPlayedUs: %lld",
1263                 __func__, (long long)pendingUs,
1264                 (long long)writtenAudioDurationUs, (long long)audioSinkPlayedUs);
1265         pendingUs = 0;
1266     }
1267     return pendingUs;
1268 }
1269 
getRealTimeUs(int64_t mediaTimeUs,int64_t nowUs)1270 int64_t NuPlayer::Renderer::getRealTimeUs(int64_t mediaTimeUs, int64_t nowUs) {
1271     int64_t realUs;
1272     if (mMediaClock->getRealTimeFor(mediaTimeUs, &realUs) != OK) {
1273         // If failed to get current position, e.g. due to audio clock is
1274         // not ready, then just play out video immediately without delay.
1275         return nowUs;
1276     }
1277     return realUs;
1278 }
1279 
onNewAudioMediaTime(int64_t mediaTimeUs)1280 void NuPlayer::Renderer::onNewAudioMediaTime(int64_t mediaTimeUs) {
1281     Mutex::Autolock autoLock(mLock);
1282     // TRICKY: vorbis decoder generates multiple frames with the same
1283     // timestamp, so only update on the first frame with a given timestamp
1284     if (mediaTimeUs == mAudioAnchorTimeMediaUs) {
1285         return;
1286     }
1287     setAudioFirstAnchorTimeIfNeeded_l(mediaTimeUs);
1288 
1289     // mNextAudioClockUpdateTimeUs is -1 if we're waiting for audio sink to start
1290     if (mNextAudioClockUpdateTimeUs == -1) {
1291         AudioTimestamp ts;
1292         if (mAudioSink->getTimestamp(ts) == OK && ts.mPosition > 0) {
1293             mNextAudioClockUpdateTimeUs = 0; // start our clock updates
1294         }
1295     }
1296     int64_t nowUs = ALooper::GetNowUs();
1297     if (mNextAudioClockUpdateTimeUs >= 0) {
1298         if (nowUs >= mNextAudioClockUpdateTimeUs) {
1299             int64_t nowMediaUs = mediaTimeUs - getPendingAudioPlayoutDurationUs(nowUs);
1300             mMediaClock->updateAnchor(nowMediaUs, nowUs, mediaTimeUs);
1301             mUseVirtualAudioSink = false;
1302             mNextAudioClockUpdateTimeUs = nowUs + kMinimumAudioClockUpdatePeriodUs;
1303         }
1304     } else {
1305         int64_t unused;
1306         if ((mMediaClock->getMediaTime(nowUs, &unused) != OK)
1307                 && (getDurationUsIfPlayedAtSampleRate(mNumFramesWritten)
1308                         > kMaxAllowedAudioSinkDelayUs)) {
1309             // Enough data has been sent to AudioSink, but AudioSink has not rendered
1310             // any data yet. Something is wrong with AudioSink, e.g., the device is not
1311             // connected to audio out.
1312             // Switch to system clock. This essentially creates a virtual AudioSink with
1313             // initial latenty of getDurationUsIfPlayedAtSampleRate(mNumFramesWritten).
1314             // This virtual AudioSink renders audio data starting from the very first sample
1315             // and it's paced by system clock.
1316             ALOGW("AudioSink stuck. ARE YOU CONNECTED TO AUDIO OUT? Switching to system clock.");
1317             mMediaClock->updateAnchor(mAudioFirstAnchorTimeMediaUs, nowUs, mediaTimeUs);
1318             mUseVirtualAudioSink = true;
1319         }
1320     }
1321     mAnchorNumFramesWritten = mNumFramesWritten;
1322     mAudioAnchorTimeMediaUs = mediaTimeUs;
1323     mAnchorTimeMediaUs = mediaTimeUs;
1324 }
1325 
1326 // Called without mLock acquired.
postDrainVideoQueue()1327 void NuPlayer::Renderer::postDrainVideoQueue() {
1328     if (mDrainVideoQueuePending
1329             || getSyncQueues()
1330             || (mPaused && mVideoSampleReceived)) {
1331         return;
1332     }
1333 
1334     if (mVideoQueue.empty()) {
1335         return;
1336     }
1337 
1338     QueueEntry &entry = *mVideoQueue.begin();
1339 
1340     sp<AMessage> msg = new AMessage(kWhatDrainVideoQueue, this);
1341     msg->setInt32("drainGeneration", getDrainGeneration(false /* audio */));
1342 
1343     if (entry.mBuffer == NULL) {
1344         // EOS doesn't carry a timestamp.
1345         msg->post();
1346         mDrainVideoQueuePending = true;
1347         return;
1348     }
1349 
1350     int64_t nowUs = ALooper::GetNowUs();
1351     if (mFlags & FLAG_REAL_TIME) {
1352         int64_t realTimeUs;
1353         CHECK(entry.mBuffer->meta()->findInt64("timeUs", &realTimeUs));
1354 
1355         realTimeUs = mVideoScheduler->schedule(realTimeUs * 1000) / 1000;
1356 
1357         int64_t twoVsyncsUs = 2 * (mVideoScheduler->getVsyncPeriod() / 1000);
1358 
1359         int64_t delayUs = realTimeUs - nowUs;
1360 
1361         ALOGW_IF(delayUs > 500000, "unusually high delayUs: %lld", (long long)delayUs);
1362         // post 2 display refreshes before rendering is due
1363         msg->post(delayUs > twoVsyncsUs ? delayUs - twoVsyncsUs : 0);
1364 
1365         mDrainVideoQueuePending = true;
1366         return;
1367     }
1368 
1369     int64_t mediaTimeUs;
1370     CHECK(entry.mBuffer->meta()->findInt64("timeUs", &mediaTimeUs));
1371 
1372     {
1373         Mutex::Autolock autoLock(mLock);
1374         if (mNeedVideoClearAnchor && !mHasAudio) {
1375             mNeedVideoClearAnchor = false;
1376             clearAnchorTime();
1377         }
1378         if (mAnchorTimeMediaUs < 0) {
1379             mMediaClock->updateAnchor(mediaTimeUs, nowUs, mediaTimeUs);
1380             mAnchorTimeMediaUs = mediaTimeUs;
1381         }
1382     }
1383     mNextVideoTimeMediaUs = mediaTimeUs;
1384     if (!mHasAudio) {
1385         // smooth out videos >= 10fps
1386         mMediaClock->updateMaxTimeMedia(mediaTimeUs + kDefaultVideoFrameIntervalUs);
1387     }
1388 
1389     if (!mVideoSampleReceived || mediaTimeUs < mAudioFirstAnchorTimeMediaUs) {
1390         msg->post();
1391     } else {
1392         int64_t twoVsyncsUs = 2 * (mVideoScheduler->getVsyncPeriod() / 1000);
1393 
1394         // post 2 display refreshes before rendering is due
1395         mMediaClock->addTimer(msg, mediaTimeUs, -twoVsyncsUs);
1396     }
1397 
1398     mDrainVideoQueuePending = true;
1399 }
1400 
onDrainVideoQueue()1401 void NuPlayer::Renderer::onDrainVideoQueue() {
1402     if (mVideoQueue.empty()) {
1403         return;
1404     }
1405 
1406     QueueEntry *entry = &*mVideoQueue.begin();
1407 
1408     if (entry->mBuffer == NULL) {
1409         // EOS
1410 
1411         notifyEOS(false /* audio */, entry->mFinalResult);
1412 
1413         mVideoQueue.erase(mVideoQueue.begin());
1414         entry = NULL;
1415 
1416         setVideoLateByUs(0);
1417         return;
1418     }
1419 
1420     int64_t nowUs = ALooper::GetNowUs();
1421     int64_t realTimeUs;
1422     int64_t mediaTimeUs = -1;
1423     if (mFlags & FLAG_REAL_TIME) {
1424         CHECK(entry->mBuffer->meta()->findInt64("timeUs", &realTimeUs));
1425     } else {
1426         CHECK(entry->mBuffer->meta()->findInt64("timeUs", &mediaTimeUs));
1427 
1428         realTimeUs = getRealTimeUs(mediaTimeUs, nowUs);
1429     }
1430     realTimeUs = mVideoScheduler->schedule(realTimeUs * 1000) / 1000;
1431 
1432     bool tooLate = false;
1433 
1434     if (!mPaused) {
1435         setVideoLateByUs(nowUs - realTimeUs);
1436         tooLate = (mVideoLateByUs > 40000);
1437 
1438         if (tooLate) {
1439             ALOGV("video late by %lld us (%.2f secs)",
1440                  (long long)mVideoLateByUs, mVideoLateByUs / 1E6);
1441         } else {
1442             int64_t mediaUs = 0;
1443             mMediaClock->getMediaTime(realTimeUs, &mediaUs);
1444             ALOGV("rendering video at media time %.2f secs",
1445                     (mFlags & FLAG_REAL_TIME ? realTimeUs :
1446                     mediaUs) / 1E6);
1447 
1448             if (!(mFlags & FLAG_REAL_TIME)
1449                     && mLastAudioMediaTimeUs != -1
1450                     && mediaTimeUs > mLastAudioMediaTimeUs) {
1451                 // If audio ends before video, video continues to drive media clock.
1452                 // Also smooth out videos >= 10fps.
1453                 mMediaClock->updateMaxTimeMedia(mediaTimeUs + kDefaultVideoFrameIntervalUs);
1454             }
1455         }
1456     } else {
1457         setVideoLateByUs(0);
1458         if (!mVideoSampleReceived && !mHasAudio) {
1459             // This will ensure that the first frame after a flush won't be used as anchor
1460             // when renderer is in paused state, because resume can happen any time after seek.
1461             clearAnchorTime();
1462         }
1463     }
1464 
1465     // Always render the first video frame while keeping stats on A/V sync.
1466     if (!mVideoSampleReceived) {
1467         realTimeUs = nowUs;
1468         tooLate = false;
1469     }
1470 
1471     entry->mNotifyConsumed->setInt64("timestampNs", realTimeUs * 1000LL);
1472     entry->mNotifyConsumed->setInt32("render", !tooLate);
1473     entry->mNotifyConsumed->post();
1474     mVideoQueue.erase(mVideoQueue.begin());
1475     entry = NULL;
1476 
1477     mVideoSampleReceived = true;
1478 
1479     if (!mPaused) {
1480         if (!mVideoRenderingStarted) {
1481             mVideoRenderingStarted = true;
1482             notifyVideoRenderingStart();
1483         }
1484         Mutex::Autolock autoLock(mLock);
1485         notifyIfMediaRenderingStarted_l();
1486     }
1487 }
1488 
notifyVideoRenderingStart()1489 void NuPlayer::Renderer::notifyVideoRenderingStart() {
1490     sp<AMessage> notify = mNotify->dup();
1491     notify->setInt32("what", kWhatVideoRenderingStart);
1492     notify->post();
1493 }
1494 
notifyEOS(bool audio,status_t finalResult,int64_t delayUs)1495 void NuPlayer::Renderer::notifyEOS(bool audio, status_t finalResult, int64_t delayUs) {
1496     Mutex::Autolock autoLock(mLock);
1497     notifyEOS_l(audio, finalResult, delayUs);
1498 }
1499 
notifyEOS_l(bool audio,status_t finalResult,int64_t delayUs)1500 void NuPlayer::Renderer::notifyEOS_l(bool audio, status_t finalResult, int64_t delayUs) {
1501     if (audio && delayUs > 0) {
1502         sp<AMessage> msg = new AMessage(kWhatEOS, this);
1503         msg->setInt32("audioEOSGeneration", mAudioEOSGeneration);
1504         msg->setInt32("finalResult", finalResult);
1505         msg->post(delayUs);
1506         return;
1507     }
1508     sp<AMessage> notify = mNotify->dup();
1509     notify->setInt32("what", kWhatEOS);
1510     notify->setInt32("audio", static_cast<int32_t>(audio));
1511     notify->setInt32("finalResult", finalResult);
1512     notify->post(delayUs);
1513 
1514     if (audio) {
1515         // Video might outlive audio. Clear anchor to enable video only case.
1516         mAnchorTimeMediaUs = -1;
1517         mHasAudio = false;
1518         if (mNextVideoTimeMediaUs >= 0) {
1519             int64_t mediaUs = 0;
1520             int64_t nowUs = ALooper::GetNowUs();
1521             status_t result = mMediaClock->getMediaTime(nowUs, &mediaUs);
1522             if (result == OK) {
1523                 if (mNextVideoTimeMediaUs > mediaUs) {
1524                     mMediaClock->updateMaxTimeMedia(mNextVideoTimeMediaUs);
1525                 }
1526             } else {
1527                 mMediaClock->updateAnchor(
1528                         mNextVideoTimeMediaUs, nowUs,
1529                         mNextVideoTimeMediaUs + kDefaultVideoFrameIntervalUs);
1530             }
1531         }
1532     } else {
1533         mHasVideo = false;
1534     }
1535 }
1536 
notifyAudioTearDown(AudioTearDownReason reason)1537 void NuPlayer::Renderer::notifyAudioTearDown(AudioTearDownReason reason) {
1538     sp<AMessage> msg = new AMessage(kWhatAudioTearDown, this);
1539     msg->setInt32("reason", reason);
1540     msg->post();
1541 }
1542 
onQueueBuffer(const sp<AMessage> & msg)1543 void NuPlayer::Renderer::onQueueBuffer(const sp<AMessage> &msg) {
1544     int32_t audio;
1545     CHECK(msg->findInt32("audio", &audio));
1546 
1547     if (dropBufferIfStale(audio, msg)) {
1548         return;
1549     }
1550 
1551     if (audio) {
1552         mHasAudio = true;
1553     } else {
1554         mHasVideo = true;
1555     }
1556 
1557     if (mHasVideo) {
1558         if (mVideoScheduler == NULL) {
1559             mVideoScheduler = new VideoFrameScheduler();
1560             mVideoScheduler->init();
1561         }
1562     }
1563 
1564     sp<RefBase> obj;
1565     CHECK(msg->findObject("buffer", &obj));
1566     sp<MediaCodecBuffer> buffer = static_cast<MediaCodecBuffer *>(obj.get());
1567 
1568     sp<AMessage> notifyConsumed;
1569     CHECK(msg->findMessage("notifyConsumed", &notifyConsumed));
1570 
1571     QueueEntry entry;
1572     entry.mBuffer = buffer;
1573     entry.mNotifyConsumed = notifyConsumed;
1574     entry.mOffset = 0;
1575     entry.mFinalResult = OK;
1576     entry.mBufferOrdinal = ++mTotalBuffersQueued;
1577 
1578     if (audio) {
1579         Mutex::Autolock autoLock(mLock);
1580         mAudioQueue.push_back(entry);
1581         postDrainAudioQueue_l();
1582     } else {
1583         mVideoQueue.push_back(entry);
1584         postDrainVideoQueue();
1585     }
1586 
1587     Mutex::Autolock autoLock(mLock);
1588     if (!mSyncQueues || mAudioQueue.empty() || mVideoQueue.empty()) {
1589         return;
1590     }
1591 
1592     sp<MediaCodecBuffer> firstAudioBuffer = (*mAudioQueue.begin()).mBuffer;
1593     sp<MediaCodecBuffer> firstVideoBuffer = (*mVideoQueue.begin()).mBuffer;
1594 
1595     if (firstAudioBuffer == NULL || firstVideoBuffer == NULL) {
1596         // EOS signalled on either queue.
1597         syncQueuesDone_l();
1598         return;
1599     }
1600 
1601     int64_t firstAudioTimeUs;
1602     int64_t firstVideoTimeUs;
1603     CHECK(firstAudioBuffer->meta()
1604             ->findInt64("timeUs", &firstAudioTimeUs));
1605     CHECK(firstVideoBuffer->meta()
1606             ->findInt64("timeUs", &firstVideoTimeUs));
1607 
1608     int64_t diff = firstVideoTimeUs - firstAudioTimeUs;
1609 
1610     ALOGV("queueDiff = %.2f secs", diff / 1E6);
1611 
1612     if (diff > 100000LL) {
1613         // Audio data starts More than 0.1 secs before video.
1614         // Drop some audio.
1615 
1616         (*mAudioQueue.begin()).mNotifyConsumed->post();
1617         mAudioQueue.erase(mAudioQueue.begin());
1618         return;
1619     }
1620 
1621     syncQueuesDone_l();
1622 }
1623 
syncQueuesDone_l()1624 void NuPlayer::Renderer::syncQueuesDone_l() {
1625     if (!mSyncQueues) {
1626         return;
1627     }
1628 
1629     mSyncQueues = false;
1630 
1631     if (!mAudioQueue.empty()) {
1632         postDrainAudioQueue_l();
1633     }
1634 
1635     if (!mVideoQueue.empty()) {
1636         mLock.unlock();
1637         postDrainVideoQueue();
1638         mLock.lock();
1639     }
1640 }
1641 
onQueueEOS(const sp<AMessage> & msg)1642 void NuPlayer::Renderer::onQueueEOS(const sp<AMessage> &msg) {
1643     int32_t audio;
1644     CHECK(msg->findInt32("audio", &audio));
1645 
1646     if (dropBufferIfStale(audio, msg)) {
1647         return;
1648     }
1649 
1650     int32_t finalResult;
1651     CHECK(msg->findInt32("finalResult", &finalResult));
1652 
1653     QueueEntry entry;
1654     entry.mOffset = 0;
1655     entry.mFinalResult = finalResult;
1656 
1657     if (audio) {
1658         Mutex::Autolock autoLock(mLock);
1659         if (mAudioQueue.empty() && mSyncQueues) {
1660             syncQueuesDone_l();
1661         }
1662         mAudioQueue.push_back(entry);
1663         postDrainAudioQueue_l();
1664     } else {
1665         if (mVideoQueue.empty() && getSyncQueues()) {
1666             Mutex::Autolock autoLock(mLock);
1667             syncQueuesDone_l();
1668         }
1669         mVideoQueue.push_back(entry);
1670         postDrainVideoQueue();
1671     }
1672 }
1673 
onFlush(const sp<AMessage> & msg)1674 void NuPlayer::Renderer::onFlush(const sp<AMessage> &msg) {
1675     int32_t audio, notifyComplete;
1676     CHECK(msg->findInt32("audio", &audio));
1677 
1678     {
1679         Mutex::Autolock autoLock(mLock);
1680         if (audio) {
1681             notifyComplete = mNotifyCompleteAudio;
1682             mNotifyCompleteAudio = false;
1683             mLastAudioMediaTimeUs = -1;
1684 
1685             mHasAudio = false;
1686             if (mNextVideoTimeMediaUs >= 0) {
1687                 int64_t nowUs = ALooper::GetNowUs();
1688                 mMediaClock->updateAnchor(
1689                         mNextVideoTimeMediaUs, nowUs,
1690                         mNextVideoTimeMediaUs + kDefaultVideoFrameIntervalUs);
1691             }
1692         } else {
1693             notifyComplete = mNotifyCompleteVideo;
1694             mNotifyCompleteVideo = false;
1695             mHasVideo = false;
1696         }
1697 
1698         // If we're currently syncing the queues, i.e. dropping audio while
1699         // aligning the first audio/video buffer times and only one of the
1700         // two queues has data, we may starve that queue by not requesting
1701         // more buffers from the decoder. If the other source then encounters
1702         // a discontinuity that leads to flushing, we'll never find the
1703         // corresponding discontinuity on the other queue.
1704         // Therefore we'll stop syncing the queues if at least one of them
1705         // is flushed.
1706         syncQueuesDone_l();
1707     }
1708 
1709     if (audio && mDrainVideoQueuePending) {
1710         // Audio should not clear anchor(MediaClock) directly, because video
1711         // postDrainVideoQueue sets msg kWhatDrainVideoQueue into MediaClock
1712         // timer, clear anchor without update immediately may block msg posting.
1713         // So, postpone clear action to video to ensure anchor can be updated
1714         // immediately after clear
1715         mNeedVideoClearAnchor = true;
1716     } else {
1717         clearAnchorTime();
1718     }
1719 
1720     ALOGV("flushing %s", audio ? "audio" : "video");
1721     if (audio) {
1722         {
1723             Mutex::Autolock autoLock(mLock);
1724             flushQueue(&mAudioQueue);
1725 
1726             ++mAudioDrainGeneration;
1727             ++mAudioEOSGeneration;
1728             prepareForMediaRenderingStart_l();
1729 
1730             // the frame count will be reset after flush.
1731             clearAudioFirstAnchorTime_l();
1732         }
1733 
1734         mDrainAudioQueuePending = false;
1735 
1736         mAudioSink->pause();
1737         mAudioSink->flush();
1738         if (!offloadingAudio()) {
1739             // Call stop() to signal to the AudioSink to completely fill the
1740             // internal buffer before resuming playback.
1741             // FIXME: this is ignored after flush().
1742             mAudioSink->stop();
1743             mNumFramesWritten = 0;
1744         }
1745         if (!mPaused) {
1746             mAudioSink->start();
1747         }
1748         mNextAudioClockUpdateTimeUs = -1;
1749     } else {
1750         flushQueue(&mVideoQueue);
1751 
1752         mDrainVideoQueuePending = false;
1753 
1754         if (mVideoScheduler != NULL) {
1755             mVideoScheduler->restart();
1756         }
1757 
1758         Mutex::Autolock autoLock(mLock);
1759         ++mVideoDrainGeneration;
1760         prepareForMediaRenderingStart_l();
1761     }
1762 
1763     mVideoSampleReceived = false;
1764 
1765     if (notifyComplete) {
1766         notifyFlushComplete(audio);
1767     }
1768 }
1769 
flushQueue(List<QueueEntry> * queue)1770 void NuPlayer::Renderer::flushQueue(List<QueueEntry> *queue) {
1771     while (!queue->empty()) {
1772         QueueEntry *entry = &*queue->begin();
1773 
1774         if (entry->mBuffer != NULL) {
1775             entry->mNotifyConsumed->post();
1776         } else if (entry->mNotifyConsumed != nullptr) {
1777             // Is it needed to open audio sink now?
1778             onChangeAudioFormat(entry->mMeta, entry->mNotifyConsumed);
1779         }
1780 
1781         queue->erase(queue->begin());
1782         entry = NULL;
1783     }
1784 }
1785 
notifyFlushComplete(bool audio)1786 void NuPlayer::Renderer::notifyFlushComplete(bool audio) {
1787     sp<AMessage> notify = mNotify->dup();
1788     notify->setInt32("what", kWhatFlushComplete);
1789     notify->setInt32("audio", static_cast<int32_t>(audio));
1790     notify->post();
1791 }
1792 
dropBufferIfStale(bool audio,const sp<AMessage> & msg)1793 bool NuPlayer::Renderer::dropBufferIfStale(
1794         bool audio, const sp<AMessage> &msg) {
1795     int32_t queueGeneration;
1796     CHECK(msg->findInt32("queueGeneration", &queueGeneration));
1797 
1798     if (queueGeneration == getQueueGeneration(audio)) {
1799         return false;
1800     }
1801 
1802     sp<AMessage> notifyConsumed;
1803     if (msg->findMessage("notifyConsumed", &notifyConsumed)) {
1804         notifyConsumed->post();
1805     }
1806 
1807     return true;
1808 }
1809 
onAudioSinkChanged()1810 void NuPlayer::Renderer::onAudioSinkChanged() {
1811     if (offloadingAudio()) {
1812         return;
1813     }
1814     CHECK(!mDrainAudioQueuePending);
1815     mNumFramesWritten = 0;
1816     mAnchorNumFramesWritten = -1;
1817     uint32_t written;
1818     if (mAudioSink->getFramesWritten(&written) == OK) {
1819         mNumFramesWritten = written;
1820     }
1821 }
1822 
onDisableOffloadAudio()1823 void NuPlayer::Renderer::onDisableOffloadAudio() {
1824     Mutex::Autolock autoLock(mLock);
1825     mFlags &= ~FLAG_OFFLOAD_AUDIO;
1826     ++mAudioDrainGeneration;
1827     if (mAudioRenderingStartGeneration != -1) {
1828         prepareForMediaRenderingStart_l();
1829         // PauseTimeout is applied to offload mode only. Cancel pending timer.
1830         cancelAudioOffloadPauseTimeout();
1831     }
1832 }
1833 
onEnableOffloadAudio()1834 void NuPlayer::Renderer::onEnableOffloadAudio() {
1835     Mutex::Autolock autoLock(mLock);
1836     mFlags |= FLAG_OFFLOAD_AUDIO;
1837     ++mAudioDrainGeneration;
1838     if (mAudioRenderingStartGeneration != -1) {
1839         prepareForMediaRenderingStart_l();
1840     }
1841 }
1842 
onPause()1843 void NuPlayer::Renderer::onPause() {
1844     if (mPaused) {
1845         return;
1846     }
1847 
1848     startAudioOffloadPauseTimeout();
1849 
1850     {
1851         Mutex::Autolock autoLock(mLock);
1852         // we do not increment audio drain generation so that we fill audio buffer during pause.
1853         ++mVideoDrainGeneration;
1854         prepareForMediaRenderingStart_l();
1855         mPaused = true;
1856         mMediaClock->setPlaybackRate(0.0);
1857     }
1858 
1859     mDrainAudioQueuePending = false;
1860     mDrainVideoQueuePending = false;
1861 
1862     // Note: audio data may not have been decoded, and the AudioSink may not be opened.
1863     mAudioSink->pause();
1864 
1865     ALOGV("now paused audio queue has %zu entries, video has %zu entries",
1866           mAudioQueue.size(), mVideoQueue.size());
1867 }
1868 
onResume()1869 void NuPlayer::Renderer::onResume() {
1870     if (!mPaused) {
1871         return;
1872     }
1873 
1874     // Note: audio data may not have been decoded, and the AudioSink may not be opened.
1875     cancelAudioOffloadPauseTimeout();
1876     if (mAudioSink->ready()) {
1877         status_t err = mAudioSink->start();
1878         if (err != OK) {
1879             ALOGE("cannot start AudioSink err %d", err);
1880             notifyAudioTearDown(kDueToError);
1881         }
1882     }
1883 
1884     {
1885         Mutex::Autolock autoLock(mLock);
1886         mPaused = false;
1887         // rendering started message may have been delayed if we were paused.
1888         if (mRenderingDataDelivered) {
1889             notifyIfMediaRenderingStarted_l();
1890         }
1891         // configure audiosink as we did not do it when pausing
1892         if (mAudioSink != NULL && mAudioSink->ready()) {
1893             mAudioSink->setPlaybackRate(mPlaybackSettings);
1894         }
1895 
1896         mMediaClock->setPlaybackRate(mPlaybackRate);
1897 
1898         if (!mAudioQueue.empty()) {
1899             postDrainAudioQueue_l();
1900         }
1901     }
1902 
1903     if (!mVideoQueue.empty()) {
1904         postDrainVideoQueue();
1905     }
1906 }
1907 
onSetVideoFrameRate(float fps)1908 void NuPlayer::Renderer::onSetVideoFrameRate(float fps) {
1909     if (mVideoScheduler == NULL) {
1910         mVideoScheduler = new VideoFrameScheduler();
1911     }
1912     mVideoScheduler->init(fps);
1913 }
1914 
getQueueGeneration(bool audio)1915 int32_t NuPlayer::Renderer::getQueueGeneration(bool audio) {
1916     Mutex::Autolock autoLock(mLock);
1917     return (audio ? mAudioQueueGeneration : mVideoQueueGeneration);
1918 }
1919 
getDrainGeneration(bool audio)1920 int32_t NuPlayer::Renderer::getDrainGeneration(bool audio) {
1921     Mutex::Autolock autoLock(mLock);
1922     return (audio ? mAudioDrainGeneration : mVideoDrainGeneration);
1923 }
1924 
getSyncQueues()1925 bool NuPlayer::Renderer::getSyncQueues() {
1926     Mutex::Autolock autoLock(mLock);
1927     return mSyncQueues;
1928 }
1929 
onAudioTearDown(AudioTearDownReason reason)1930 void NuPlayer::Renderer::onAudioTearDown(AudioTearDownReason reason) {
1931     if (mAudioTornDown) {
1932         return;
1933     }
1934 
1935     // TimeoutWhenPaused is only for offload mode.
1936     if (reason == kDueToTimeout && !offloadingAudio()) {
1937         return;
1938     }
1939 
1940     mAudioTornDown = true;
1941 
1942     int64_t currentPositionUs;
1943     sp<AMessage> notify = mNotify->dup();
1944     if (getCurrentPosition(&currentPositionUs) == OK) {
1945         notify->setInt64("positionUs", currentPositionUs);
1946     }
1947 
1948     mAudioSink->stop();
1949     mAudioSink->flush();
1950 
1951     notify->setInt32("what", kWhatAudioTearDown);
1952     notify->setInt32("reason", reason);
1953     notify->post();
1954 }
1955 
startAudioOffloadPauseTimeout()1956 void NuPlayer::Renderer::startAudioOffloadPauseTimeout() {
1957     if (offloadingAudio()) {
1958         mWakeLock->acquire();
1959         mWakelockAcquireEvent.updateValues(uptimeMillis(),
1960                                            mAudioOffloadPauseTimeoutGeneration,
1961                                            mAudioOffloadPauseTimeoutGeneration);
1962         sp<AMessage> msg = new AMessage(kWhatAudioOffloadPauseTimeout, this);
1963         msg->setInt32("drainGeneration", mAudioOffloadPauseTimeoutGeneration);
1964         msg->post(kOffloadPauseMaxUs);
1965     }
1966 }
1967 
cancelAudioOffloadPauseTimeout()1968 void NuPlayer::Renderer::cancelAudioOffloadPauseTimeout() {
1969     // We may have called startAudioOffloadPauseTimeout() without
1970     // the AudioSink open and with offloadingAudio enabled.
1971     //
1972     // When we cancel, it may be that offloadingAudio is subsequently disabled, so regardless
1973     // we always release the wakelock and increment the pause timeout generation.
1974     //
1975     // Note: The acquired wakelock prevents the device from suspending
1976     // immediately after offload pause (in case a resume happens shortly thereafter).
1977     mWakeLock->release(true);
1978     mWakelockCancelEvent.updateValues(uptimeMillis(),
1979                                       mAudioOffloadPauseTimeoutGeneration,
1980                                       mAudioOffloadPauseTimeoutGeneration);
1981     ++mAudioOffloadPauseTimeoutGeneration;
1982 }
1983 
onOpenAudioSink(const sp<AMessage> & format,bool offloadOnly,bool hasVideo,uint32_t flags,bool isStreaming)1984 status_t NuPlayer::Renderer::onOpenAudioSink(
1985         const sp<AMessage> &format,
1986         bool offloadOnly,
1987         bool hasVideo,
1988         uint32_t flags,
1989         bool isStreaming) {
1990     ALOGV("openAudioSink: offloadOnly(%d) offloadingAudio(%d)",
1991             offloadOnly, offloadingAudio());
1992     ATRACE_BEGIN(StringPrintf("NuPlayer::Renderer::onOpenAudioSink: offloadOnly(%d) "
1993             "offloadingAudio(%d)", offloadOnly, offloadingAudio()).c_str());
1994     bool audioSinkChanged = false;
1995 
1996     int32_t numChannels;
1997     CHECK(format->findInt32("channel-count", &numChannels));
1998 
1999     // channel mask info as read from the audio format
2000     int32_t mediaFormatChannelMask;
2001     // channel mask to use for native playback
2002     audio_channel_mask_t channelMask;
2003     if (format->findInt32("channel-mask", &mediaFormatChannelMask)) {
2004         // KEY_CHANNEL_MASK follows the android.media.AudioFormat java mask
2005         channelMask = audio_channel_mask_from_media_format_mask(mediaFormatChannelMask);
2006     } else {
2007         // no mask found: the mask will be derived from the channel count
2008         channelMask = CHANNEL_MASK_USE_CHANNEL_ORDER;
2009     }
2010 
2011     int32_t sampleRate;
2012     CHECK(format->findInt32("sample-rate", &sampleRate));
2013 
2014     // read pcm encoding from MediaCodec output format, if available
2015     int32_t pcmEncoding;
2016     audio_format_t audioFormat =
2017             format->findInt32(KEY_PCM_ENCODING, &pcmEncoding) ?
2018                     audioFormatFromEncoding(pcmEncoding) : AUDIO_FORMAT_PCM_16_BIT;
2019 
2020     if (offloadingAudio()) {
2021         AString mime;
2022         CHECK(format->findString("mime", &mime));
2023         status_t err = OK;
2024         if (audioFormat == AUDIO_FORMAT_PCM_16_BIT) {
2025             // If there is probably no pcm-encoding in the format message, try to get the format by
2026             // its mimetype.
2027             err = mapMimeToAudioFormat(audioFormat, mime.c_str());
2028         }
2029 
2030         if (err != OK) {
2031             ALOGE("Couldn't map mime \"%s\" to a valid "
2032                     "audio_format", mime.c_str());
2033             onDisableOffloadAudio();
2034         } else {
2035             ALOGV("Mime \"%s\" mapped to audio_format 0x%x",
2036                     mime.c_str(), audioFormat);
2037 
2038             int avgBitRate = 0;
2039             format->findInt32("bitrate", &avgBitRate);
2040 
2041             int32_t aacProfile = -1;
2042             if (audioFormat == AUDIO_FORMAT_AAC
2043                     && format->findInt32("aac-profile", &aacProfile)) {
2044                 // Redefine AAC format as per aac profile
2045                 mapAACProfileToAudioFormat(
2046                         audioFormat,
2047                         aacProfile);
2048             }
2049 
2050             audio_offload_info_t offloadInfo = AUDIO_INFO_INITIALIZER;
2051             offloadInfo.duration_us = -1;
2052             format->findInt64(
2053                     "durationUs", &offloadInfo.duration_us);
2054             offloadInfo.sample_rate = sampleRate;
2055             offloadInfo.channel_mask = channelMask;
2056             offloadInfo.format = audioFormat;
2057             offloadInfo.stream_type = AUDIO_STREAM_MUSIC;
2058             offloadInfo.bit_rate = avgBitRate;
2059             offloadInfo.has_video = hasVideo;
2060             offloadInfo.is_streaming = isStreaming;
2061 
2062             if (memcmp(&mCurrentOffloadInfo, &offloadInfo, sizeof(offloadInfo)) == 0) {
2063                 ALOGV("openAudioSink: no change in offload mode");
2064                 // no change from previous configuration, everything ok.
2065                 ATRACE_END();
2066                 return OK;
2067             }
2068             mCurrentPcmInfo = AUDIO_PCMINFO_INITIALIZER;
2069 
2070             ALOGV("openAudioSink: try to open AudioSink in offload mode");
2071             uint32_t offloadFlags = flags;
2072             offloadFlags |= AUDIO_OUTPUT_FLAG_COMPRESS_OFFLOAD;
2073             offloadFlags &= ~AUDIO_OUTPUT_FLAG_DEEP_BUFFER;
2074             audioSinkChanged = true;
2075             mAudioSink->close();
2076 
2077             err = mAudioSink->open(
2078                     sampleRate,
2079                     numChannels,
2080                     (audio_channel_mask_t)channelMask,
2081                     audioFormat,
2082                     0 /* bufferCount - unused */,
2083                     &NuPlayer::Renderer::AudioSinkCallback,
2084                     this,
2085                     (audio_output_flags_t)offloadFlags,
2086                     &offloadInfo);
2087 
2088             if (err == OK) {
2089                 err = mAudioSink->setPlaybackRate(mPlaybackSettings);
2090             }
2091 
2092             if (err == OK) {
2093                 // If the playback is offloaded to h/w, we pass
2094                 // the HAL some metadata information.
2095                 // We don't want to do this for PCM because it
2096                 // will be going through the AudioFlinger mixer
2097                 // before reaching the hardware.
2098                 // TODO
2099                 mCurrentOffloadInfo = offloadInfo;
2100                 if (!mPaused) { // for preview mode, don't start if paused
2101                     err = mAudioSink->start();
2102                 }
2103                 ALOGV_IF(err == OK, "openAudioSink: offload succeeded");
2104             }
2105             if (err != OK) {
2106                 // Clean up, fall back to non offload mode.
2107                 mAudioSink->close();
2108                 onDisableOffloadAudio();
2109                 mCurrentOffloadInfo = AUDIO_INFO_INITIALIZER;
2110                 ALOGV("openAudioSink: offload failed");
2111                 if (offloadOnly) {
2112                     notifyAudioTearDown(kForceNonOffload);
2113                 }
2114             } else {
2115                 mUseAudioCallback = true;  // offload mode transfers data through callback
2116                 ++mAudioDrainGeneration;  // discard pending kWhatDrainAudioQueue message.
2117             }
2118         }
2119     }
2120     if (!offloadOnly && !offloadingAudio()) {
2121         ALOGV("openAudioSink: open AudioSink in NON-offload mode");
2122         uint32_t pcmFlags = flags;
2123         pcmFlags &= ~AUDIO_OUTPUT_FLAG_COMPRESS_OFFLOAD;
2124 
2125         const PcmInfo info = {
2126                 (audio_channel_mask_t)channelMask,
2127                 (audio_output_flags_t)pcmFlags,
2128                 audioFormat,
2129                 numChannels,
2130                 sampleRate
2131         };
2132         if (memcmp(&mCurrentPcmInfo, &info, sizeof(info)) == 0) {
2133             ALOGV("openAudioSink: no change in pcm mode");
2134             // no change from previous configuration, everything ok.
2135             ATRACE_END();
2136             return OK;
2137         }
2138 
2139         audioSinkChanged = true;
2140         mAudioSink->close();
2141         mCurrentOffloadInfo = AUDIO_INFO_INITIALIZER;
2142         // Note: It is possible to set up the callback, but not use it to send audio data.
2143         // This requires a fix in AudioSink to explicitly specify the transfer mode.
2144         mUseAudioCallback = getUseAudioCallbackSetting();
2145         if (mUseAudioCallback) {
2146             ++mAudioDrainGeneration;  // discard pending kWhatDrainAudioQueue message.
2147         }
2148 
2149         // Compute the desired buffer size.
2150         // For callback mode, the amount of time before wakeup is about half the buffer size.
2151         const uint32_t frameCount =
2152                 (unsigned long long)sampleRate * getAudioSinkPcmMsSetting() / 1000;
2153 
2154         // The doNotReconnect means AudioSink will signal back and let NuPlayer to re-construct
2155         // AudioSink. We don't want this when there's video because it will cause a video seek to
2156         // the previous I frame. But we do want this when there's only audio because it will give
2157         // NuPlayer a chance to switch from non-offload mode to offload mode.
2158         // So we only set doNotReconnect when there's no video.
2159         const bool doNotReconnect = !hasVideo;
2160 
2161         // We should always be able to set our playback settings if the sink is closed.
2162         LOG_ALWAYS_FATAL_IF(mAudioSink->setPlaybackRate(mPlaybackSettings) != OK,
2163                 "onOpenAudioSink: can't set playback rate on closed sink");
2164         status_t err = mAudioSink->open(
2165                     sampleRate,
2166                     numChannels,
2167                     (audio_channel_mask_t)channelMask,
2168                     audioFormat,
2169                     0 /* bufferCount - unused */,
2170                     mUseAudioCallback ? &NuPlayer::Renderer::AudioSinkCallback : NULL,
2171                     mUseAudioCallback ? this : NULL,
2172                     (audio_output_flags_t)pcmFlags,
2173                     NULL,
2174                     doNotReconnect,
2175                     frameCount);
2176         if (err != OK) {
2177             ALOGW("openAudioSink: non offloaded open failed status: %d", err);
2178             mAudioSink->close();
2179             mCurrentPcmInfo = AUDIO_PCMINFO_INITIALIZER;
2180             ATRACE_END();
2181             return err;
2182         }
2183         mCurrentPcmInfo = info;
2184         if (!mPaused) { // for preview mode, don't start if paused
2185             mAudioSink->start();
2186         }
2187     }
2188     if (audioSinkChanged) {
2189         onAudioSinkChanged();
2190     }
2191     mAudioTornDown = false;
2192     ATRACE_END();
2193     return OK;
2194 }
2195 
onCloseAudioSink()2196 void NuPlayer::Renderer::onCloseAudioSink() {
2197     ATRACE_BEGIN("NuPlyer::Renderer::onCloseAudioSink");
2198     mAudioSink->close();
2199     mCurrentOffloadInfo = AUDIO_INFO_INITIALIZER;
2200     mCurrentPcmInfo = AUDIO_PCMINFO_INITIALIZER;
2201     ATRACE_END();
2202 }
2203 
onChangeAudioFormat(const sp<AMessage> & meta,const sp<AMessage> & notify)2204 void NuPlayer::Renderer::onChangeAudioFormat(
2205         const sp<AMessage> &meta, const sp<AMessage> &notify) {
2206     sp<AMessage> format;
2207     CHECK(meta->findMessage("format", &format));
2208 
2209     int32_t offloadOnly;
2210     CHECK(meta->findInt32("offload-only", &offloadOnly));
2211 
2212     int32_t hasVideo;
2213     CHECK(meta->findInt32("has-video", &hasVideo));
2214 
2215     uint32_t flags;
2216     CHECK(meta->findInt32("flags", (int32_t *)&flags));
2217 
2218     uint32_t isStreaming;
2219     CHECK(meta->findInt32("isStreaming", (int32_t *)&isStreaming));
2220 
2221     status_t err = onOpenAudioSink(format, offloadOnly, hasVideo, flags, isStreaming);
2222 
2223     if (err != OK) {
2224         notify->setInt32("err", err);
2225     }
2226     notify->post();
2227 }
2228 
dump(AString & logString)2229 void NuPlayer::Renderer::WakeLockEvent::dump(AString& logString) {
2230   logString.append("[");
2231   logString.append(mTimeMs);
2232   logString.append(",");
2233   logString.append(mEventTimeoutGeneration);
2234   logString.append(",");
2235   logString.append(mRendererTimeoutGeneration);
2236   logString.append("]");
2237 }
2238 
2239 }  // namespace android
2240