xref: /aosp_15_r20/cts/tests/tests/media/common/src/android/media/cts/MediaCodecTunneledPlayer.java (revision b7c941bb3fa97aba169d73cee0bed2de8ac964bf)
1 /*
2  * Copyright (C) 2014 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 package android.media.cts;
17 
18 import android.content.Context;
19 import android.media.AudioTimestamp;
20 import android.media.AudioTrack;
21 import android.media.MediaCodec;
22 import android.media.MediaCodecInfo;
23 import android.media.MediaCodecList;
24 import android.media.MediaExtractor;
25 import android.media.MediaFormat;
26 import android.net.Uri;
27 import android.util.Log;
28 import android.view.SurfaceHolder;
29 
30 import com.google.common.collect.ImmutableList;
31 
32 import java.io.IOException;
33 import java.util.HashMap;
34 import java.util.Map;
35 
36 /**
37  * JB(API 21) introduces {@link MediaCodec} tunneled mode API.  It allows apps
38  * to use MediaCodec to delegate their Audio/Video rendering to a vendor provided
39  * Codec component.
40  */
41 public class MediaCodecTunneledPlayer implements MediaTimeProvider {
42     private static final String TAG = MediaCodecTunneledPlayer.class.getSimpleName();
43 
44     /** State the player starts in, before configuration. */
45     private static final int STATE_IDLE = 1;
46     /** State of the player during initial configuration. */
47     private static final int STATE_PREPARED = 2;
48     /** State of the player after starting the codecs */
49     private static final int STATE_STARTED = 3;
50     /** State of the player during playback. */
51     private static final int STATE_PLAYING = 4;
52     /** State of the player when playback is paused. */
53     private static final int STATE_PAUSED = 5;
54 
55     private final Object mThreadStartedLock = new Object();
56     private boolean mThreadStarted = false;
57     private byte[] mSessionId;
58     private CodecState mAudioTrackState;
59     private int mMediaFormatHeight;
60     private int mMediaFormatWidth;
61     private Float mMediaFormatFrameRate;
62     private final Object mStateLock = new Object();
63     private int mState;
64     private long mDeltaTimeUs;
65     private long mDurationUs;
66     private Map<Integer, CodecState> mAudioCodecStates;
67     private Map<Integer, CodecState> mVideoCodecStates;
68     private Map<String, String> mAudioHeaders;
69     private Map<String, String> mVideoHeaders;
70     private MediaExtractor mAudioExtractor;
71     private MediaExtractor mVideoExtractor;
72     private SurfaceHolder mSurfaceHolder;
73     private Thread mThread;
74     private Uri mAudioUri;
75     private Uri mVideoUri;
76     private boolean mIsTunneled;
77     private int mAudioSessionId;
78     private Context mContext;
79 
80     /*
81      * Media player class to playback video using tunneled MediaCodec.
82      */
MediaCodecTunneledPlayer(Context context, SurfaceHolder holder, boolean tunneled, int AudioSessionId)83     public MediaCodecTunneledPlayer(Context context, SurfaceHolder holder, boolean tunneled, int AudioSessionId) {
84         mContext = context;
85         mSurfaceHolder = holder;
86         mIsTunneled = tunneled;
87         mAudioTrackState = null;
88         mState = STATE_IDLE;
89         mAudioSessionId = AudioSessionId;
90         mThread = new Thread(new Runnable() {
91             @Override
92             public void run() {
93                 while (true) {
94                     synchronized (mThreadStartedLock) {
95                         if (mThreadStarted == false) {
96                             break;
97                         }
98                     }
99                     synchronized (mStateLock) {
100                         if (mState == STATE_PLAYING) {
101                             doSomeWork();
102                             if (mAudioTrackState != null) {
103                                 mAudioTrackState.processAudioTrack();
104                             }
105                         }
106                     }
107                     try {
108                         Thread.sleep(5);
109                     } catch (InterruptedException ex) {
110                         Log.d(TAG, "Thread interrupted");
111                     }
112                 }
113             }
114         });
115     }
116 
setFrameRate(float frameRate)117     public void setFrameRate(float frameRate) {
118         mMediaFormatFrameRate = frameRate;
119     }
120 
setAudioDataSource(Uri uri, Map<String, String> headers)121     public void setAudioDataSource(Uri uri, Map<String, String> headers) {
122         mAudioUri = uri;
123         mAudioHeaders = headers;
124     }
125 
setVideoDataSource(Uri uri, Map<String, String> headers)126     public void setVideoDataSource(Uri uri, Map<String, String> headers) {
127         mVideoUri = uri;
128         mVideoHeaders = headers;
129     }
130 
getMediaFormatHeight()131     public final int getMediaFormatHeight() {
132         return mMediaFormatHeight;
133     }
134 
getMediaFormatWidth()135     public final int getMediaFormatWidth() {
136         return mMediaFormatWidth;
137     }
138 
prepareAudio()139     private boolean prepareAudio() throws IOException {
140         for (int i = mAudioExtractor.getTrackCount(); i-- > 0;) {
141             MediaFormat format = mAudioExtractor.getTrackFormat(i);
142             String mime = format.getString(MediaFormat.KEY_MIME);
143 
144             if (!mime.startsWith("audio/")) {
145                 continue;
146             }
147 
148             Log.d(TAG, "audio track #" + i + " " + format + " " + mime +
149                   " Is ADTS:" + getMediaFormatInteger(format, MediaFormat.KEY_IS_ADTS) +
150                   " Sample rate:" + getMediaFormatInteger(format, MediaFormat.KEY_SAMPLE_RATE) +
151                   " Channel count:" +
152                   getMediaFormatInteger(format, MediaFormat.KEY_CHANNEL_COUNT));
153 
154             mAudioExtractor.selectTrack(i);
155             if (!addTrack(i, format)) {
156                 Log.e(TAG, "prepareAudio - addTrack() failed!");
157                 return false;
158             }
159 
160             if (format.containsKey(MediaFormat.KEY_DURATION)) {
161                 long durationUs = format.getLong(MediaFormat.KEY_DURATION);
162 
163                 if (durationUs > mDurationUs) {
164                     mDurationUs = durationUs;
165                 }
166                 Log.d(TAG, "audio track format #" + i +
167                         " Duration:" + mDurationUs + " microseconds");
168             }
169         }
170         return true;
171     }
172 
prepareVideo()173     private boolean prepareVideo() throws IOException {
174         for (int i = mVideoExtractor.getTrackCount(); i-- > 0;) {
175             MediaFormat format = mVideoExtractor.getTrackFormat(i);
176             String mime = format.getString(MediaFormat.KEY_MIME);
177 
178             if (!mime.startsWith("video/")) {
179                 continue;
180             }
181 
182             mMediaFormatHeight = getMediaFormatInteger(format, MediaFormat.KEY_HEIGHT);
183             mMediaFormatWidth = getMediaFormatInteger(format, MediaFormat.KEY_WIDTH);
184             Log.d(TAG, "video track #" + i + " " + format + " " + mime +
185                   " Width:" + mMediaFormatWidth + ", Height:" + mMediaFormatHeight);
186 
187             mVideoExtractor.selectTrack(i);
188             if (!addTrack(i, format)) {
189                 Log.e(TAG, "prepareVideo - addTrack() failed!");
190                 return false;
191             }
192 
193             if (format.containsKey(MediaFormat.KEY_DURATION)) {
194                 long durationUs = format.getLong(MediaFormat.KEY_DURATION);
195 
196                 if (durationUs > mDurationUs) {
197                     mDurationUs = durationUs;
198                 }
199                 Log.d(TAG, "track format #" + i + " Duration:" +
200                         mDurationUs + " microseconds");
201             }
202         }
203         return true;
204     }
205 
206     // Creates the extractors, identifies tracks and formats, and then calls MediaCodec.configure
prepare()207     public boolean prepare() throws IOException {
208         if (mState != STATE_IDLE) {
209             throw new IllegalStateException("Expected STATE_IDLE, got " + mState);
210         }
211 
212         if (null == mAudioExtractor) {
213             mAudioExtractor = new MediaExtractor();
214             if (null == mAudioExtractor) {
215                 Log.e(TAG, "prepare - Cannot create Audio extractor.");
216                 return false;
217             }
218         }
219 
220         if (null == mVideoExtractor){
221             mVideoExtractor = new MediaExtractor();
222             if (null == mVideoExtractor) {
223                 Log.e(TAG, "prepare - Cannot create Video extractor.");
224                 return false;
225             }
226         }
227 
228         mAudioExtractor.setDataSource(mContext, mAudioUri, mAudioHeaders);
229         if (mVideoUri != null) {
230             mVideoExtractor.setDataSource(mContext, mVideoUri, mVideoHeaders);
231         }
232 
233         if (null == mVideoCodecStates) {
234             mVideoCodecStates = new HashMap<Integer, CodecState>();
235         } else {
236             mVideoCodecStates.clear();
237         }
238 
239         if (null == mAudioCodecStates) {
240             mAudioCodecStates = new HashMap<Integer, CodecState>();
241         } else {
242             mAudioCodecStates.clear();
243         }
244 
245         if (!prepareAudio()) {
246             Log.e(TAG,"prepare - prepareAudio() failed!");
247             return false;
248         }
249         if (!prepareVideo()) {
250             Log.e(TAG,"prepare - prepareVideo() failed!");
251             return false;
252         }
253 
254         mState = STATE_PREPARED;
255         return true;
256     }
257 
addTrack(int trackIndex, MediaFormat format)258     private boolean addTrack(int trackIndex, MediaFormat format) throws IOException {
259         String mime = format.getString(MediaFormat.KEY_MIME);
260         boolean isVideo = mime.startsWith("video/");
261         boolean isAudio = mime.startsWith("audio/");
262         MediaCodec codec;
263 
264         // setup tunneled video codec if needed
265         if (isVideo && mIsTunneled) {
266             format.setFeatureEnabled(MediaCodecInfo.CodecCapabilities.FEATURE_TunneledPlayback,
267                         true);
268             MediaCodecList mcl = new MediaCodecList(MediaCodecList.ALL_CODECS);
269             String codecName = mcl.findDecoderForFormat(format);
270             if (codecName == null) {
271                 Log.e(TAG,"addTrack - Could not find Tunneled playback codec for "+mime+
272                         " format!");
273                 return false;
274             }
275 
276             codec = MediaCodec.createByCodecName(codecName);
277             if (codec == null) {
278                 Log.e(TAG, "addTrack - Could not create Tunneled playback codec "+
279                         codecName+"!");
280                 return false;
281             }
282 
283             if (mAudioTrackState != null) {
284                 format.setInteger(MediaFormat.KEY_AUDIO_SESSION_ID, mAudioSessionId);
285             }
286         }
287         else {
288             codec = MediaCodec.createDecoderByType(mime);
289             if (codec == null) {
290                 Log.e(TAG, "addTrack - Could not create regular playback codec for mime "+
291                         mime+"!");
292                 return false;
293             }
294         }
295         if (isVideo && mMediaFormatFrameRate != null) {
296             format.setFloat(MediaFormat.KEY_FRAME_RATE, mMediaFormatFrameRate);
297         }
298         codec.configure(
299                 format,
300                 isVideo ? mSurfaceHolder.getSurface() : null, null, 0);
301 
302         CodecState state;
303         if (isVideo) {
304             state = new CodecState((MediaTimeProvider)this, mVideoExtractor,
305                             trackIndex, format, codec, true, mIsTunneled, mAudioSessionId);
306             mVideoCodecStates.put(Integer.valueOf(trackIndex), state);
307         } else {
308             state = new CodecState((MediaTimeProvider)this, mAudioExtractor,
309                             trackIndex, format, codec, true, mIsTunneled, mAudioSessionId);
310             mAudioCodecStates.put(Integer.valueOf(trackIndex), state);
311         }
312 
313         if (isAudio) {
314             mAudioTrackState = state;
315         }
316 
317         return true;
318     }
319 
getMediaFormatInteger(MediaFormat format, String key)320     protected int getMediaFormatInteger(MediaFormat format, String key) {
321         return format.containsKey(key) ? format.getInteger(key) : 0;
322     }
323 
324     // Calls MediaCodec.start
startCodec()325     public void startCodec() {
326         Log.d(TAG, "start");
327 
328         if (mState != STATE_PREPARED) {
329             throw new IllegalStateException("Expected STATE_PREAPRED, got " + mState);
330         }
331 
332         for (CodecState state : mVideoCodecStates.values()) {
333             state.startCodec();
334         }
335 
336         for (CodecState state : mAudioCodecStates.values()) {
337             state.startCodec();
338         }
339 
340         mDeltaTimeUs = -1;
341         mState = STATE_STARTED;
342     }
343 
344     // Starts the decoding threads and then starts AudioTrack playback
play()345     public void play() {
346         if (mState != STATE_STARTED) {
347             throw new IllegalStateException("Expected STATE_STARTED, got " + mState);
348         }
349         mState = STATE_PLAYING;
350 
351         synchronized (mThreadStartedLock) {
352             mThreadStarted = true;
353             mThread.start();
354         }
355 
356         for (CodecState state : mVideoCodecStates.values()) {
357             state.play();
358         }
359 
360         for (CodecState state : mAudioCodecStates.values()) {
361             state.play();
362         }
363     }
364 
365     // Pauses playback by pausing the AudioTrack
pause()366     public void pause() {
367         Log.d(TAG, "pause");
368 
369         if (mState != STATE_PLAYING) {
370             throw new IllegalStateException("Expected STATE_PLAYING, got " + mState);
371         }
372 
373         synchronized (mStateLock) {
374             for (CodecState state : mVideoCodecStates.values()) {
375                 state.pause();
376             }
377 
378             for (CodecState state : mAudioCodecStates.values()) {
379                 state.pause();
380             }
381 
382             mState = STATE_PAUSED;
383         }
384     }
385 
386     // Resume playback when paused
resume()387     public void resume() {
388         Log.d(TAG, "resume");
389 
390         if (mState != STATE_PAUSED) {
391             throw new IllegalStateException("Expected STATE_PAUSED, got " + mState);
392         }
393 
394         synchronized (mStateLock) {
395             for (CodecState state : mVideoCodecStates.values()) {
396                 state.play();
397             }
398 
399             for (CodecState state : mAudioCodecStates.values()) {
400                 state.play();
401             }
402 
403             mState = STATE_PLAYING;
404         }
405     }
406 
flush()407     public void flush() {
408         Log.d(TAG, "flush");
409 
410         if (mState != STATE_PAUSED) {
411             throw new IllegalStateException("Expected STATE_PAUSED, got " + mState);
412         }
413 
414         for (CodecState state : mAudioCodecStates.values()) {
415             state.flush();
416         }
417 
418         for (CodecState state : mVideoCodecStates.values()) {
419             state.flush();
420         }
421     }
422 
423     /** Seek all tracks to the first sample time.
424      *
425      * @param  presentationTimeOffsetUs The offset for the presentation time to start at.
426      * @throws IllegalStateException  if the player is not paused
427      */
seekToBeginning(long presentationTimeOffsetUs)428     public void seekToBeginning(long presentationTimeOffsetUs) {
429         Log.d(TAG, "seekToBeginning");
430         if (mState != STATE_PAUSED) {
431             throw new IllegalStateException("Expected STATE_PAUSED, got " + mState);
432         }
433 
434         for (CodecState state : mVideoCodecStates.values()) {
435             state.seekToBeginning(presentationTimeOffsetUs);
436         }
437 
438         for (CodecState state : mAudioCodecStates.values()) {
439             state.seekToBeginning(presentationTimeOffsetUs);
440         }
441     }
442 
443     /**
444      * Enables or disables looping. Should be called after {@link #prepare()}.
445      */
setLoopEnabled(boolean enabled)446     public void setLoopEnabled(boolean enabled) {
447         if (mState != STATE_PREPARED) {
448             throw new IllegalStateException("Expected STATE_PREPARED, got " + mState);
449         }
450 
451         for (CodecState state : mVideoCodecStates.values()) {
452             state.setLoopEnabled(enabled);
453         }
454 
455         for (CodecState state : mAudioCodecStates.values()) {
456             state.setLoopEnabled(enabled);
457         }
458     }
459 
reset()460     public void reset() {
461         if (mState == STATE_PLAYING) {
462             pause();
463         }
464         if (mVideoCodecStates != null) {
465             for (CodecState state : mVideoCodecStates.values()) {
466                 state.release();
467             }
468             mVideoCodecStates = null;
469         }
470 
471         if (mAudioCodecStates != null) {
472             for (CodecState state : mAudioCodecStates.values()) {
473                 state.release();
474             }
475             mAudioCodecStates = null;
476         }
477 
478         if (mAudioExtractor != null) {
479             mAudioExtractor.release();
480             mAudioExtractor = null;
481         }
482 
483         if (mVideoExtractor != null) {
484             mVideoExtractor.release();
485             mVideoExtractor = null;
486         }
487 
488         mDurationUs = -1;
489         mState = STATE_IDLE;
490 
491         synchronized (mThreadStartedLock) {
492             mThreadStarted = false;
493         }
494         try {
495             mThread.join();
496         } catch (InterruptedException ex) {
497             Log.d(TAG, "mThread.join ", ex);
498         }
499     }
500 
isEnded()501     public boolean isEnded() {
502         for (CodecState state : mVideoCodecStates.values()) {
503           if (!state.isEnded()) {
504             return false;
505           }
506         }
507 
508         for (CodecState state : mAudioCodecStates.values()) {
509             if (!state.isEnded()) {
510               return false;
511             }
512         }
513 
514         return true;
515     }
516 
doSomeWork()517     private void doSomeWork() {
518         try {
519             for (CodecState state : mVideoCodecStates.values()) {
520                 state.doSomeWork();
521             }
522         } catch (IllegalStateException e) {
523             throw new Error("Video CodecState.doSomeWork", e);
524         }
525 
526         try {
527             for (CodecState state : mAudioCodecStates.values()) {
528                 state.doSomeWork();
529             }
530         } catch (IllegalStateException e) {
531             throw new Error("Audio CodecState.doSomeWork", e);
532         }
533 
534     }
535 
getNowUs()536     public long getNowUs() {
537         if (mAudioTrackState == null) {
538             return System.currentTimeMillis() * 1000;
539         }
540 
541         return mAudioTrackState.getAudioTimeUs();
542     }
543 
getRealTimeUsForMediaTime(long mediaTimeUs)544     public long getRealTimeUsForMediaTime(long mediaTimeUs) {
545         if (mDeltaTimeUs == -1) {
546             long nowUs = getNowUs();
547             mDeltaTimeUs = nowUs - mediaTimeUs;
548         }
549 
550         return mDeltaTimeUs + mediaTimeUs;
551     }
552 
getDuration()553     public int getDuration() {
554         return (int)((mDurationUs + 500) / 1000);
555     }
556 
557     /**
558      * Retrieve the presentation timestamp of the latest queued output sample.
559      * In tunnel mode, retrieves the presentation timestamp of the latest rendered video frame.
560      * @return presentation timestamp in microseconds, or {@code CodecState.UNINITIALIZED_TIMESTAMP}
561      * if playback has not started.
562     */
getCurrentPosition()563     public int getCurrentPosition() {
564         if (mVideoCodecStates == null) {
565             return CodecState.UNINITIALIZED_TIMESTAMP;
566         }
567 
568         long positionUs = CodecState.UNINITIALIZED_TIMESTAMP;
569 
570         for (CodecState state : mVideoCodecStates.values()) {
571             long trackPositionUs = state.getCurrentPositionUs();
572             if (trackPositionUs > positionUs) {
573                 positionUs = trackPositionUs;
574             }
575         }
576 
577         if (positionUs == CodecState.UNINITIALIZED_TIMESTAMP) {
578             return CodecState.UNINITIALIZED_TIMESTAMP;
579         }
580         return (int) (positionUs + 500) / 1000;
581     }
582 
583     /**
584      * Returns the system time of the latest rendered frame in any of the video codecs.
585      */
getCurrentRenderedSystemTimeNano()586     public long getCurrentRenderedSystemTimeNano() {
587         if (mVideoCodecStates == null) {
588             return 0;
589         }
590 
591         long position = 0;
592 
593         for (CodecState state : mVideoCodecStates.values()) {
594             long trackPosition = state.getRenderedVideoSystemTimeNano();
595 
596             if (trackPosition > position) {
597                 position = trackPosition;
598             }
599         }
600         return position;
601     }
602 
603     /**
604      * Returns the timestamp of the last written audio sample, in microseconds.
605      */
getAudioTrackPositionUs()606     public long getAudioTrackPositionUs() {
607         if (mAudioTrackState == null) {
608             return 0;
609         }
610         return mAudioTrackState.getCurrentPositionUs();
611     }
612 
613     /**
614      * Returns the presentation timestamp of the last rendered video frame.
615      *
616      * Note: This assumes there is exactly one video codec running in the player.
617      */
getVideoTimeUs()618     public long getVideoTimeUs() {
619         if (mVideoCodecStates == null || mVideoCodecStates.get(0) == null) {
620             return CodecState.UNINITIALIZED_TIMESTAMP;
621         }
622         return mVideoCodecStates.get(0).getVideoTimeUs();
623     }
624 
getVideoSystemTimeNs()625     public long getVideoSystemTimeNs() {
626         if (mVideoCodecStates == null || mVideoCodecStates.get(0) == null) {
627             return -1;
628         }
629         return mVideoCodecStates.get(0).getVideoTimeUs();
630 
631     }
632 
633     /**
634      * Returns the ordered list of video frame timestamps rendered in tunnel mode.
635      *
636      * Note: This assumes there is exactly one video codec running in the player.
637      */
getRenderedVideoFrameTimestampList()638     public ImmutableList<Long> getRenderedVideoFrameTimestampList() {
639         return mVideoCodecStates.get(0).getRenderedVideoFrameTimestampList();
640     }
641 
642     /**
643      * Returns the ordered list of system times of rendered video frames in tunnel-mode.
644      *
645      * Note: This assumes there is at most one tunneled mode video codec running in the player.
646      */
getRenderedVideoFrameSystemTimeList()647     public ImmutableList<Long> getRenderedVideoFrameSystemTimeList() {
648         if (mVideoCodecStates == null) {
649             return ImmutableList.<Long>of();
650         }
651 
652         for (CodecState state : mVideoCodecStates.values()) {
653             ImmutableList<Long> timestamps = state.getRenderedVideoFrameSystemTimeList();
654             if (!timestamps.isEmpty())
655                 return timestamps;
656         }
657         return ImmutableList.<Long>of();
658     }
659 
660     /**
661      * When the player is on stand-by, tries to queue one frame worth of video per video codec.
662      *
663      * Returns arbitrarily the timestamp of any frame queued this way by one of the video codecs.
664      * Returns null if no video frame were queued.
665      */
queueOneVideoFrame()666     public Long queueOneVideoFrame() {
667         Log.d(TAG, "queueOneVideoFrame");
668 
669         if (mState != STATE_STARTED && mState != STATE_PAUSED) {
670             throw new IllegalStateException("Expected STARTED or PAUSED, got " + mState);
671         }
672 
673         Long result = null;
674         if (mVideoCodecStates != null) {
675             for (CodecState state : mVideoCodecStates.values()) {
676                 Long timestamp = state.doSomeWork(true /* mustWait */);
677                 if (timestamp != null) {
678                     result = timestamp;
679                 }
680             }
681         }
682         return result;
683     }
684 
685     /**
686      * Configure video peek for the video codecs attached to the player.
687      */
setVideoPeek(boolean enable)688     public void setVideoPeek(boolean enable) {
689         Log.d(TAG, "setVideoPeek");
690         if (mVideoCodecStates == null) {
691             return;
692         }
693 
694         for (CodecState state: mVideoCodecStates.values()) {
695             state.setVideoPeek(enable);
696         }
697     }
698 
getTimestamp()699     public AudioTimestamp getTimestamp() {
700         if (mAudioCodecStates == null) {
701             return null;
702         }
703 
704         AudioTimestamp timestamp = new AudioTimestamp();
705         if (mAudioCodecStates.size() != 0) {
706             timestamp =
707                     mAudioCodecStates.entrySet().iterator().next().getValue().getTimestamp();
708         }
709         return timestamp;
710     }
711 
712     /** Queries the attached video codecs for video peek ready signals.
713      *
714      * Returns true if any of the video codecs have video peek ready.
715      * Returns false otherwise.
716      */
isFirstTunnelFrameReady()717     public boolean isFirstTunnelFrameReady() {
718         Log.d(TAG, "firstTunnelFrameReady");
719         if (mVideoCodecStates == null) {
720             return false;
721         }
722 
723         for (CodecState state : mVideoCodecStates.values()) {
724             if (state.isFirstTunnelFrameReady()) {
725                 return true;
726             }
727         }
728         return false;
729     }
730 
731     /** Returns the number of frames that have been sent down to the HAL. */
getAudioFramesWritten()732     public int getAudioFramesWritten() {
733         if (mAudioCodecStates == null) {
734             return -1;
735         }
736         return mAudioCodecStates.entrySet().iterator().next().getValue().getFramesWritten();
737     }
738 
739     /** Configure underrun simulation on audio codecs. */
stopDrainingAudioOutputBuffers(boolean stop)740     public void stopDrainingAudioOutputBuffers(boolean stop) {
741         for (CodecState state: mAudioCodecStates.values()) {
742             state.stopDrainingOutputBuffers(stop);
743         }
744     }
745 
746     /** Configure an offset (in Ns) to audio content to simulate track desynchronization. */
setAudioTrackOffsetNs(long audioOffsetNs)747     public void setAudioTrackOffsetNs(long audioOffsetNs) {
748         if (mAudioTrackState != null) {
749             mAudioTrackState.setAudioOffsetNs(audioOffsetNs);
750         }
751     }
752 
753     /** Returns the underlying {@code AudioTrack}, if any. */
getAudioTrack()754     public AudioTrack getAudioTrack() {
755         if (mAudioTrackState != null) {
756             return mAudioTrackState.getAudioTrack();
757         }
758         return null;
759     }
760 }
761