xref: /aosp_15_r20/cts/tests/tests/media/common/src/android/media/cts/CodecState.java (revision b7c941bb3fa97aba169d73cee0bed2de8ac964bf)
1 /*
2  * Copyright (C) 2014 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 package android.media.cts;
17 
18 import android.media.AudioTimestamp;
19 import android.media.AudioTrack;
20 import android.media.MediaCodec;
21 import android.media.MediaExtractor;
22 import android.media.MediaFormat;
23 import android.os.Build;
24 import android.os.Bundle;
25 import android.os.Handler;
26 import android.os.Looper;
27 import android.util.Log;
28 import android.view.Surface;
29 
30 import androidx.test.filters.SdkSuppress;
31 
32 import com.android.compatibility.common.util.ApiLevelUtil;
33 import com.android.compatibility.common.util.MediaUtils;
34 
35 import com.google.common.collect.ImmutableList;
36 
37 import java.nio.ByteBuffer;
38 import java.util.ArrayList;
39 import java.util.LinkedList;
40 
41 /**
42  * Class for directly managing both audio and video playback by
43  * using {@link MediaCodec} and {@link AudioTrack}.
44  */
45 public class CodecState {
46     private static final String TAG = CodecState.class.getSimpleName();
47 
48     public static final int UNINITIALIZED_TIMESTAMP = Integer.MIN_VALUE;
49 
50     private boolean mSawInputEOS;
51     private volatile boolean mSawOutputEOS;
52     private boolean mLimitQueueDepth;
53     private boolean mIsTunneled;
54     private boolean mIsAudio;
55     private int mAudioSessionId;
56     private ByteBuffer[] mCodecInputBuffers;
57     private ByteBuffer[] mCodecOutputBuffers;
58     private int mTrackIndex;
59     private int mAvailableInputBufferIndex;
60     private LinkedList<Integer> mAvailableOutputBufferIndices;
61     private LinkedList<MediaCodec.BufferInfo> mAvailableOutputBufferInfos;
62 
63     /**
64      * The media timestamp of the latest frame decoded by this codec.
65      *
66      * Note: in tunnel mode, this coincides with the latest rendered frame.
67      */
68     private volatile long mDecodedFramePresentationTimeUs;
69     private volatile long mRenderedVideoFramePresentationTimeUs;
70     private volatile long mRenderedVideoFrameSystemTimeNano;
71     private long mFirstSampleTimeUs;
72     private long mPlaybackStartTimeUs;
73     private long mLastPresentTimeUs;
74     private MediaCodec mCodec;
75     private MediaTimeProvider mMediaTimeProvider;
76     private MediaExtractor mExtractor;
77     private MediaFormat mFormat;
78     private MediaFormat mOutputFormat;
79     private NonBlockingAudioTrack mAudioTrack;
80     private volatile OnFrameRenderedListener mOnFrameRenderedListener;
81     /** A list of reported rendered video frames' timestamps. */
82     private ArrayList<Long> mRenderedVideoFrameTimestampList;
83     private ArrayList<Long> mRenderedVideoFrameSystemTimeList;
84     private boolean mIsFirstTunnelFrameReady;
85     private volatile OnFirstTunnelFrameReadyListener mOnFirstTunnelFrameReadyListener;
86     /** If true, starves the underlying {@link MediaCodec} to simulate an underrun. */
87     private boolean mShouldStopDrainingOutputBuffers;
88 
89     private static boolean mIsAtLeastS = ApiLevelUtil.isAtLeast(Build.VERSION_CODES.S);
90 
91     /** If true the video/audio will start from the beginning when it reaches the end. */
92     private boolean mLoopEnabled = false;
93 
94     /**
95      * Manages audio and video playback using MediaCodec and AudioTrack.
96      */
CodecState( MediaTimeProvider mediaTimeProvider, MediaExtractor extractor, int trackIndex, MediaFormat format, MediaCodec codec, boolean limitQueueDepth, boolean tunneled, int audioSessionId)97     public CodecState(
98             MediaTimeProvider mediaTimeProvider,
99             MediaExtractor extractor,
100             int trackIndex,
101             MediaFormat format,
102             MediaCodec codec,
103             boolean limitQueueDepth,
104             boolean tunneled,
105             int audioSessionId) {
106         mMediaTimeProvider = mediaTimeProvider;
107         mExtractor = extractor;
108         mTrackIndex = trackIndex;
109         mFormat = format;
110         mSawInputEOS = mSawOutputEOS = false;
111         mLimitQueueDepth = limitQueueDepth;
112         mIsTunneled = tunneled;
113         mAudioSessionId = audioSessionId;
114         mFirstSampleTimeUs = -1;
115         mPlaybackStartTimeUs = 0;
116         mLastPresentTimeUs = 0;
117 
118         mCodec = codec;
119 
120         mAvailableInputBufferIndex = -1;
121         mAvailableOutputBufferIndices = new LinkedList<Integer>();
122         mAvailableOutputBufferInfos = new LinkedList<MediaCodec.BufferInfo>();
123         mRenderedVideoFrameTimestampList = new ArrayList<Long>();
124         mRenderedVideoFrameSystemTimeList = new ArrayList<Long>();
125 
126         mDecodedFramePresentationTimeUs = UNINITIALIZED_TIMESTAMP;
127         mRenderedVideoFramePresentationTimeUs = UNINITIALIZED_TIMESTAMP;
128         mRenderedVideoFrameSystemTimeNano = UNINITIALIZED_TIMESTAMP;
129 
130         mIsFirstTunnelFrameReady = false;
131         mShouldStopDrainingOutputBuffers = false;
132 
133         String mime = mFormat.getString(MediaFormat.KEY_MIME);
134         Log.d(TAG, "CodecState::CodecState " + mime);
135         mIsAudio = mime.startsWith("audio/");
136 
137         setFrameListeners(mCodec);
138     }
139 
release()140     public void release() {
141         mCodec.stop();
142         mCodecInputBuffers = null;
143         mCodecOutputBuffers = null;
144         mOutputFormat = null;
145 
146         mAvailableOutputBufferIndices.clear();
147         mAvailableOutputBufferInfos.clear();
148 
149         mAvailableInputBufferIndex = -1;
150         mAvailableOutputBufferIndices = null;
151         mAvailableOutputBufferInfos = null;
152 
153         releaseFrameListeners();
154 
155         mCodec.release();
156         mCodec = null;
157 
158         if (mAudioTrack != null) {
159             mAudioTrack.release();
160             mAudioTrack = null;
161         }
162     }
163 
startCodec()164     public void startCodec() {
165         mCodec.start();
166         mCodecInputBuffers = mCodec.getInputBuffers();
167         if (!mIsTunneled || mIsAudio) {
168             mCodecOutputBuffers = mCodec.getOutputBuffers();
169         }
170     }
171 
play()172     public void play() {
173         if (mAudioTrack != null) {
174             mAudioTrack.play();
175         }
176     }
177 
pause()178     public void pause() {
179         if (mAudioTrack != null) {
180             mAudioTrack.pause();
181         }
182     }
183 
184     /**
185      * Returns the media timestamp of the latest decoded sample/frame.
186      *
187      * TODO(b/202710709): Disambiguate getCurrentPosition's meaning
188      */
getCurrentPositionUs()189     public long getCurrentPositionUs() {
190         // Use decoded frame time when available, otherwise default to render time (typically, in
191         // tunnel mode).
192         if (mDecodedFramePresentationTimeUs != UNINITIALIZED_TIMESTAMP) {
193             return mDecodedFramePresentationTimeUs;
194         } else {
195             return mRenderedVideoFramePresentationTimeUs;
196         }
197     }
198 
199     /** Returns the system time of the latest rendered video frame. */
getRenderedVideoSystemTimeNano()200     public long getRenderedVideoSystemTimeNano() {
201         return mRenderedVideoFrameSystemTimeNano;
202     }
203 
flush()204     public void flush() {
205         if (!mIsTunneled || mIsAudio) {
206             mAvailableOutputBufferIndices.clear();
207             mAvailableOutputBufferInfos.clear();
208         }
209 
210         mAvailableInputBufferIndex = -1;
211         mSawInputEOS = false;
212         mSawOutputEOS = false;
213 
214         if (mAudioTrack != null
215                 && mAudioTrack.getPlayState() != AudioTrack.PLAYSTATE_PLAYING) {
216             mAudioTrack.flush();
217         }
218 
219         mCodec.flush();
220         mDecodedFramePresentationTimeUs = UNINITIALIZED_TIMESTAMP;
221         mRenderedVideoFramePresentationTimeUs = UNINITIALIZED_TIMESTAMP;
222         mRenderedVideoFrameSystemTimeNano = UNINITIALIZED_TIMESTAMP;
223         mRenderedVideoFrameTimestampList = new ArrayList<Long>();
224         mRenderedVideoFrameSystemTimeList = new ArrayList<Long>();
225         mIsFirstTunnelFrameReady = false;
226     }
227 
isEnded()228     public boolean isEnded() {
229         return mSawInputEOS && mSawOutputEOS;
230     }
231 
232     /** @see #doSomeWork(Boolean) */
doSomeWork()233     public Long doSomeWork() {
234         return doSomeWork(false /* mustWait */);
235     }
236 
237     /**
238      * {@code doSomeWork} is the worker function that does all buffer handling and decoding works.
239      * It first reads data from {@link MediaExtractor} and pushes it into {@link MediaCodec}; it
240      * then dequeues buffer from {@link MediaCodec}, consumes it and pushes back to its own buffer
241      * queue for next round reading data from {@link MediaExtractor}.
242      *
243      * @param boolean  Whether to block on input buffer retrieval
244      *
245      * @return timestamp of the queued frame, if any.
246      */
doSomeWork(boolean mustWait)247     public Long doSomeWork(boolean mustWait) {
248         // Extract input data, if relevant
249         Long sampleTime = null;
250         if (mAvailableInputBufferIndex == -1) {
251             int indexInput = mCodec.dequeueInputBuffer(mustWait ? -1 : 0 /* timeoutUs */);
252             if (indexInput != MediaCodec.INFO_TRY_AGAIN_LATER) {
253                 mAvailableInputBufferIndex = indexInput;
254             }
255         }
256         if (mAvailableInputBufferIndex != -1) {
257             sampleTime = feedInputBuffer(mAvailableInputBufferIndex);
258             if (sampleTime != null) {
259                 mAvailableInputBufferIndex = -1;
260             }
261         }
262 
263         // Queue output data, if relevant
264         if (mIsAudio || !mIsTunneled) {
265             MediaCodec.BufferInfo info = new MediaCodec.BufferInfo();
266             int indexOutput = mCodec.dequeueOutputBuffer(info, 0 /* timeoutUs */);
267 
268             if (indexOutput == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
269                 mOutputFormat = mCodec.getOutputFormat();
270                 onOutputFormatChanged();
271             } else if (indexOutput == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
272                 mCodecOutputBuffers = mCodec.getOutputBuffers();
273             } else if (indexOutput != MediaCodec.INFO_TRY_AGAIN_LATER) {
274                 mAvailableOutputBufferIndices.add(indexOutput);
275                 mAvailableOutputBufferInfos.add(info);
276             }
277 
278             while (drainOutputBuffer()) {
279             }
280         }
281 
282         return sampleTime;
283     }
284 
setLoopEnabled(boolean enabled)285     public void setLoopEnabled(boolean enabled) {
286         mLoopEnabled = enabled;
287     }
288 
289     @SdkSuppress(minSdkVersion = Build.VERSION_CODES.S)
setFrameListeners(MediaCodec codec)290     private void setFrameListeners(MediaCodec codec) {
291         if (!mIsAudio) {
292             // Setup frame rendered callback for video codecs
293             mOnFrameRenderedListener = new OnFrameRenderedListener();
294             codec.setOnFrameRenderedListener(mOnFrameRenderedListener,
295                     new Handler(Looper.getMainLooper()));
296 
297             if (mIsTunneled) {
298                 mOnFirstTunnelFrameReadyListener = new OnFirstTunnelFrameReadyListener();
299                 codec.setOnFirstTunnelFrameReadyListener(new Handler(Looper.getMainLooper()),
300                         mOnFirstTunnelFrameReadyListener);
301             }
302         }
303     }
304 
305     @SdkSuppress(minSdkVersion = Build.VERSION_CODES.S)
releaseFrameListeners()306     private void releaseFrameListeners() {
307         if (mOnFrameRenderedListener != null) {
308             mCodec.setOnFrameRenderedListener(null, null);
309             mOnFrameRenderedListener = null;
310         }
311         if (mOnFirstTunnelFrameReadyListener != null) {
312             mCodec.setOnFirstTunnelFrameReadyListener(null, null);
313             mOnFirstTunnelFrameReadyListener = null;
314         }
315     }
316 
317     /**
318      * Extracts some data from the configured MediaExtractor and feeds it to the configured
319      * MediaCodec.
320      *
321      * Returns the timestamp of the queued buffer, if any.
322      * Returns null once all data has been extracted and queued.
323      */
feedInputBuffer(int inputBufferIndex)324     private Long feedInputBuffer(int inputBufferIndex)
325             throws MediaCodec.CryptoException, IllegalStateException {
326         if (mSawInputEOS || inputBufferIndex == -1) {
327             return null;
328         }
329 
330         // stalls read if audio queue is larger than 2MB full so we will not occupy too much heap
331         if (mLimitQueueDepth && mAudioTrack != null &&
332                 mAudioTrack.getNumBytesQueued() > 2 * 1024 * 1024) {
333             return null;
334         }
335 
336         ByteBuffer codecData = mCodecInputBuffers[inputBufferIndex];
337 
338         int trackIndex = mExtractor.getSampleTrackIndex();
339 
340         if (trackIndex == mTrackIndex) {
341             int sampleSize =
342                 mExtractor.readSampleData(codecData, 0 /* offset */);
343 
344             long sampleTime = mExtractor.getSampleTime();
345 
346             int sampleFlags = mExtractor.getSampleFlags();
347 
348             if (sampleSize <= 0) {
349                 Log.d(TAG, "sampleSize: " + sampleSize + " trackIndex:" + trackIndex +
350                         " sampleTime:" + sampleTime + " sampleFlags:" + sampleFlags);
351                 mSawInputEOS = true;
352                 return null;
353             }
354 
355             if (mIsTunneled) {
356                 if (mFirstSampleTimeUs == -1) {
357                     mFirstSampleTimeUs = sampleTime;
358                 }
359                 sampleTime -= mFirstSampleTimeUs;
360             }
361 
362             mLastPresentTimeUs = mPlaybackStartTimeUs + sampleTime;
363 
364             if ((sampleFlags & MediaExtractor.SAMPLE_FLAG_ENCRYPTED) != 0) {
365                 MediaCodec.CryptoInfo info = new MediaCodec.CryptoInfo();
366                 mExtractor.getSampleCryptoInfo(info);
367 
368                 mCodec.queueSecureInputBuffer(
369                         inputBufferIndex, 0 /* offset */, info, mLastPresentTimeUs, 0 /* flags */);
370             } else {
371                 mCodec.queueInputBuffer(
372                         inputBufferIndex, 0 /* offset */, sampleSize, mLastPresentTimeUs, 0 /* flags */);
373             }
374 
375             mExtractor.advance();
376             return mLastPresentTimeUs;
377         } else if (trackIndex < 0) {
378             Log.d(TAG, "saw input EOS on track " + mTrackIndex);
379 
380             if (mLoopEnabled) {
381                 Log.d(TAG, "looping from the beginning");
382                 mExtractor.seekTo(0, MediaExtractor.SEEK_TO_CLOSEST_SYNC);
383                 mPlaybackStartTimeUs = mLastPresentTimeUs;
384                 return null;
385             }
386 
387             mSawInputEOS = true;
388             mCodec.queueInputBuffer(
389                     inputBufferIndex, 0 /* offset */, 0 /* sampleSize */,
390                     0 /* sampleTime */, MediaCodec.BUFFER_FLAG_END_OF_STREAM);
391         }
392 
393         return null;
394     }
395 
onOutputFormatChanged()396     private void onOutputFormatChanged() {
397         String mime = mOutputFormat.getString(MediaFormat.KEY_MIME);
398         // b/9250789
399         Log.d(TAG, "CodecState::onOutputFormatChanged " + mime);
400 
401         mIsAudio = false;
402         if (mime.startsWith("audio/")) {
403             mIsAudio = true;
404             int sampleRate =
405                 mOutputFormat.getInteger(MediaFormat.KEY_SAMPLE_RATE);
406 
407             int channelCount =
408                 mOutputFormat.getInteger(MediaFormat.KEY_CHANNEL_COUNT);
409 
410             Log.d(TAG, "CodecState::onOutputFormatChanged Audio" +
411                     " sampleRate:" + sampleRate + " channels:" + channelCount);
412             // We do a check here after we receive data from MediaExtractor and before
413             // we pass them down to AudioTrack. If MediaExtractor works properly, this
414             // check is not necessary, however, in our tests, we found that there
415             // are a few cases where ch=0 and samplerate=0 were returned by MediaExtractor.
416             if (channelCount < 1 || channelCount > 8 ||
417                     sampleRate < 8000 || sampleRate > 128000) {
418                 return;
419             }
420             mAudioTrack = new NonBlockingAudioTrack(sampleRate, channelCount,
421                                     mIsTunneled, mAudioSessionId);
422             mAudioTrack.play();
423         }
424 
425         if (mime.startsWith("video/")) {
426             int width = mOutputFormat.getInteger(MediaFormat.KEY_WIDTH);
427             int height = mOutputFormat.getInteger(MediaFormat.KEY_HEIGHT);
428             Log.d(TAG, "CodecState::onOutputFormatChanged Video" +
429                     " width:" + width + " height:" + height);
430         }
431     }
432 
433     /** Returns true if more output data could be drained. */
drainOutputBuffer()434     private boolean drainOutputBuffer() {
435         if (mSawOutputEOS || mAvailableOutputBufferIndices.isEmpty()
436                 || mShouldStopDrainingOutputBuffers) {
437             return false;
438         }
439 
440         int index = mAvailableOutputBufferIndices.peekFirst().intValue();
441         MediaCodec.BufferInfo info = mAvailableOutputBufferInfos.peekFirst();
442 
443         if ((info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
444             Log.d(TAG, "saw output EOS on track " + mTrackIndex);
445 
446             mSawOutputEOS = true;
447 
448             // Do not stop audio track here. Video presentation may not finish
449             // yet, stopping the audio track now would result in getAudioTimeUs
450             // returning 0 and prevent video samples from being presented.
451             // We stop the audio track before the playback thread exits.
452             if (mAudioTrack != null) {
453                 mAudioTrack.setEndOfStream();
454             }
455             return false;
456         }
457 
458         if (mAudioTrack != null) {
459             ByteBuffer buffer = mCodecOutputBuffers[index];
460             byte[] audioArray = new byte[info.size];
461             buffer.get(audioArray);
462             buffer.clear();
463 
464             mAudioTrack.write(ByteBuffer.wrap(audioArray), info.size,
465                     info.presentationTimeUs * 1000);
466 
467             mCodec.releaseOutputBuffer(index, false /* render */);
468 
469             mDecodedFramePresentationTimeUs = info.presentationTimeUs;
470 
471             mAvailableOutputBufferIndices.removeFirst();
472             mAvailableOutputBufferInfos.removeFirst();
473             return true;
474         } else {
475             // video
476             boolean render;
477             long realTimeUs =
478                     mMediaTimeProvider.getRealTimeUsForMediaTime(info.presentationTimeUs);
479 
480             long nowUs = mMediaTimeProvider.getNowUs();
481 
482             long lateUs = nowUs - realTimeUs;
483 
484             if (lateUs < -45000) {
485                 // too early;
486                 return false;
487             } else if (lateUs > 30000) {
488                 Log.d(TAG, "video late by " + lateUs + " us.");
489                 render = false;
490             } else {
491                 render = true;
492                 mDecodedFramePresentationTimeUs = info.presentationTimeUs;
493             }
494 
495             mCodec.releaseOutputBuffer(index, render);
496 
497             mAvailableOutputBufferIndices.removeFirst();
498             mAvailableOutputBufferInfos.removeFirst();
499             return true;
500         }
501     }
502 
503     /**
504      * Callback called by {@link MediaCodec} when it is notified that a decoded video frame has been
505      * rendered on the attached {@link Surface}.
506     */
507     private class OnFrameRenderedListener implements MediaCodec.OnFrameRenderedListener {
508         private static final long TUNNELING_EOS_PRESENTATION_TIME_US = Long.MAX_VALUE;
509 
510         @Override
onFrameRendered(MediaCodec codec, long presentationTimeUs, long nanoTime)511         public void onFrameRendered(MediaCodec codec, long presentationTimeUs, long nanoTime) {
512             if (this != mOnFrameRenderedListener) {
513                 return; // stale event
514             }
515             if (presentationTimeUs == TUNNELING_EOS_PRESENTATION_TIME_US) {
516                  mSawOutputEOS = true;
517             } else {
518                 mRenderedVideoFramePresentationTimeUs = presentationTimeUs;
519             }
520             mRenderedVideoFrameSystemTimeNano = nanoTime;
521             mRenderedVideoFrameTimestampList.add(presentationTimeUs);
522             mRenderedVideoFrameSystemTimeList.add(mRenderedVideoFrameSystemTimeNano);
523         }
524     }
525 
getAudioTimeUs()526     public long getAudioTimeUs() {
527         if (mAudioTrack == null) {
528             return 0;
529         }
530 
531         return mAudioTrack.getAudioTimeUs();
532     }
533 
534     /** Returns the presentation timestamp of the last rendered video frame. */
getVideoTimeUs()535     public long getVideoTimeUs() {
536         return mRenderedVideoFramePresentationTimeUs;
537     }
538 
539     /** Callback called in tunnel mode when video peek is ready */
540     @SdkSuppress(minSdkVersion = Build.VERSION_CODES.S)
541     private class OnFirstTunnelFrameReadyListener
542         implements MediaCodec.OnFirstTunnelFrameReadyListener {
543 
544         @Override
onFirstTunnelFrameReady(MediaCodec codec)545         public void onFirstTunnelFrameReady(MediaCodec codec) {
546             if (this != mOnFirstTunnelFrameReadyListener) {
547                 return; // stale event
548             }
549             mIsFirstTunnelFrameReady = true;
550         }
551     }
552 
553     /**
554      * If a video codec, returns the list of rendered frames' timestamps. Otherwise, returns an
555      * empty list.
556      */
getRenderedVideoFrameTimestampList()557     public ImmutableList<Long> getRenderedVideoFrameTimestampList() {
558         return ImmutableList.<Long>copyOf(mRenderedVideoFrameTimestampList);
559     }
560 
561     /**
562      * If a video codec, returns the list system times when frames were rendered. Otherwise, returns
563      * an empty list.
564      */
getRenderedVideoFrameSystemTimeList()565     public ImmutableList<Long> getRenderedVideoFrameSystemTimeList() {
566         return ImmutableList.<Long>copyOf(mRenderedVideoFrameSystemTimeList);
567     }
568 
569 
570     /** Process the attached {@link AudioTrack}, if any. */
processAudioTrack()571     public void processAudioTrack() {
572         if (mAudioTrack != null) {
573             mAudioTrack.process();
574         }
575     }
576 
getFramesWritten()577     public int getFramesWritten() {
578         if (mAudioTrack != null) {
579             return mAudioTrack.getFramesWritten();
580         }
581         return 0;
582     }
583 
getTimestamp()584     public AudioTimestamp getTimestamp() {
585         if (mAudioTrack == null) {
586             return null;
587         }
588 
589         return mAudioTrack.getTimestamp();
590     }
591 
592     /** Stop the attached {@link AudioTrack}, if any. */
stopAudioTrack()593     public void stopAudioTrack() {
594         if (mAudioTrack != null) {
595             mAudioTrack.stop();
596         }
597     }
598 
599     /** Start associated audio track, if any. */
playAudioTrack()600     public void playAudioTrack() {
601         if (mAudioTrack != null) {
602             mAudioTrack.play();
603         }
604     }
605 
setOutputSurface(Surface surface)606     public void setOutputSurface(Surface surface) {
607         if (mAudioTrack != null) {
608             throw new UnsupportedOperationException("Cannot set surface on audio codec");
609         }
610         mCodec.setOutputSurface(surface);
611     }
612 
613     /** Configure video peek. */
setVideoPeek(boolean enable)614     public void setVideoPeek(boolean enable) {
615         if (MediaUtils.check(mIsAtLeastS, "setVideoPeek requires Android S")) {
616             Bundle parameters = new Bundle();
617             parameters.putInt(MediaCodec.PARAMETER_KEY_TUNNEL_PEEK, enable ? 1 : 0);
618             mCodec.setParameters(parameters);
619         }
620     }
621 
622     /** In tunnel mode, queries whether the first video frame is ready for video peek. */
isFirstTunnelFrameReady()623     public boolean isFirstTunnelFrameReady() {
624         return mIsFirstTunnelFrameReady;
625     }
626 
627     /**
628      * Stop draining output buffers which can simulate underrun condition.
629      */
stopDrainingOutputBuffers(boolean stop)630     public void stopDrainingOutputBuffers(boolean stop) {
631         mShouldStopDrainingOutputBuffers = stop;
632         if (mAudioTrack != null) {
633             mAudioTrack.setStopWriting(stop);
634         }
635     }
636 
637     /**
638      * Option to introduce an offset (positive or negative, in Ns) to content queued to the
639      * {@link AudioTrack}.
640      */
setAudioOffsetNs(long audioOffsetNs)641     public void setAudioOffsetNs(long audioOffsetNs) {
642         if (mAudioTrack != null) {
643             mAudioTrack.setAudioOffsetNs(audioOffsetNs);
644         }
645     }
646 
647     /** Returns the underlying {@code AudioTrack}, if any. */
getAudioTrack()648     public AudioTrack getAudioTrack() {
649         if (mAudioTrack != null) {
650             return mAudioTrack.getAudioTrack();
651         }
652         return null;
653     }
654 
655     /**
656      * Seek media extractor to the beginning of the configured track.
657      *
658      * @param presentationTimeOffsetUs The offset for the presentation time to start at.
659      */
seekToBeginning(long presentationTimeOffsetUs)660     public void seekToBeginning(long presentationTimeOffsetUs) {
661         mExtractor.seekTo(mFirstSampleTimeUs, MediaExtractor.SEEK_TO_CLOSEST_SYNC);
662         mPlaybackStartTimeUs = presentationTimeOffsetUs;
663     }
664 }
665