xref: /aosp_15_r20/cts/tests/tests/media/decoder/src/android/media/decoder/cts/DecoderTest.java (revision b7c941bb3fa97aba169d73cee0bed2de8ac964bf)
1 /*
2  * Copyright (C) 2012 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 package android.media.decoder.cts;
18 
19 import static android.media.MediaCodecInfo.CodecCapabilities.FEATURE_TunneledPlayback;
20 import static android.media.MediaCodecInfo.CodecProfileLevel.AVCLevel31;
21 import static android.media.MediaCodecInfo.CodecProfileLevel.AVCLevel32;
22 import static android.media.MediaCodecInfo.CodecProfileLevel.AVCLevel4;
23 import static android.media.MediaCodecInfo.CodecProfileLevel.AVCLevel42;
24 import static android.media.MediaCodecInfo.CodecProfileLevel.AVCProfileHigh;
25 import static android.media.MediaCodecInfo.CodecProfileLevel.HEVCMainTierLevel31;
26 import static android.media.MediaCodecInfo.CodecProfileLevel.HEVCMainTierLevel41;
27 import static android.media.MediaCodecInfo.CodecProfileLevel.HEVCProfileMain;
28 
29 import static org.junit.Assert.assertEquals;
30 import static org.junit.Assert.assertFalse;
31 import static org.junit.Assert.assertNotEquals;
32 import static org.junit.Assert.assertNotNull;
33 import static org.junit.Assert.assertTrue;
34 import static org.junit.Assert.fail;
35 
36 import android.app.ActivityManager;
37 import android.content.Context;
38 import android.content.pm.PackageManager;
39 import android.content.res.AssetFileDescriptor;
40 import android.graphics.ImageFormat;
41 import android.media.AudioFormat;
42 import android.media.AudioManager;
43 import android.media.AudioTimestamp;
44 import android.media.Image;
45 import android.media.MediaCodec;
46 import android.media.MediaCodecInfo;
47 import android.media.MediaCodecInfo.CodecCapabilities;
48 import android.media.MediaCodecList;
49 import android.media.MediaExtractor;
50 import android.media.MediaFormat;
51 import android.media.cts.CodecState;
52 import android.media.cts.MediaCodecTunneledPlayer;
53 import android.media.cts.MediaHeavyPresubmitTest;
54 import android.media.cts.MediaTestBase;
55 import android.media.cts.TestUtils;
56 import android.net.Uri;
57 import android.os.Build;
58 import android.os.ParcelFileDescriptor;
59 import android.os.SystemProperties;
60 import android.platform.test.annotations.AppModeFull;
61 import android.util.Log;
62 import android.view.Surface;
63 
64 import androidx.test.ext.junit.runners.AndroidJUnit4;
65 import androidx.test.filters.SdkSuppress;
66 
67 import com.android.compatibility.common.util.ApiLevelUtil;
68 import com.android.compatibility.common.util.ApiTest;
69 import com.android.compatibility.common.util.CddTest;
70 import com.android.compatibility.common.util.DeviceReportLog;
71 import com.android.compatibility.common.util.MediaUtils;
72 import com.android.compatibility.common.util.Preconditions;
73 import com.android.compatibility.common.util.ResultType;
74 import com.android.compatibility.common.util.ResultUnit;
75 
76 import com.google.common.collect.ImmutableList;
77 
78 import org.junit.After;
79 import org.junit.Assume;
80 import org.junit.Before;
81 import org.junit.Test;
82 import org.junit.runner.RunWith;
83 
84 import java.io.BufferedInputStream;
85 import java.io.File;
86 import java.io.FileNotFoundException;
87 import java.io.IOException;
88 import java.io.InputStream;
89 import java.nio.ByteBuffer;
90 import java.time.Duration;
91 import java.util.ArrayList;
92 import java.util.Arrays;
93 import java.util.HashMap;
94 import java.util.List;
95 import java.util.Map;
96 import java.util.concurrent.TimeUnit;
97 import java.util.function.Supplier;
98 import java.util.zip.CRC32;
99 
100 @MediaHeavyPresubmitTest
101 @AppModeFull(reason = "There should be no instant apps specific behavior related to decoders")
102 @RunWith(AndroidJUnit4.class)
103 public class DecoderTest extends MediaTestBase {
104     private static final String TAG = "DecoderTest";
105     private static final String REPORT_LOG_NAME = "CtsMediaDecoderTestCases";
106 
107     public static final boolean IS_VENDOR_AT_LEAST_S =
108             SystemProperties.getInt("ro.vendor.api_level",
109                                     Build.VERSION_CODES.CUR_DEVELOPMENT)
110                     >= Build.VERSION_CODES.S;
111 
112     private static boolean IS_AT_LEAST_R = ApiLevelUtil.isAtLeast(Build.VERSION_CODES.R);
113     private static boolean IS_BEFORE_S = ApiLevelUtil.isBefore(Build.VERSION_CODES.S);
114     private static boolean IS_AFTER_T = ApiLevelUtil.isAfter(Build.VERSION_CODES.TIRAMISU);
115 
116     private static final int RESET_MODE_NONE = 0;
117     private static final int RESET_MODE_RECONFIGURE = 1;
118     private static final int RESET_MODE_FLUSH = 2;
119     private static final int RESET_MODE_EOS_FLUSH = 3;
120 
121     private static final String[] CSD_KEYS = new String[] { "csd-0", "csd-1" };
122 
123     private static final int CONFIG_MODE_NONE = 0;
124     private static final int CONFIG_MODE_QUEUE = 1;
125 
126     public static final int CODEC_ALL = 0; // All codecs must support
127     public static final int CODEC_ANY = 1; // At least one codec must support
128     public static final int CODEC_DEFAULT = 2; // Default codec must support
129     public static final int CODEC_OPTIONAL = 3; // Codec support is optional
130 
131     short[] mMasterBuffer;
132     static final String mInpPrefix = WorkDir.getMediaDirString();
133 
134     private MediaCodecTunneledPlayer mMediaCodecPlayer;
135     private static final int SLEEP_TIME_MS = 1000;
136     private static final long PLAY_TIME_MS = TimeUnit.MILLISECONDS.convert(1, TimeUnit.MINUTES);
137 
138     static final Map<String, String> sDefaultDecoders = new HashMap<>();
139 
getAssetFileDescriptorFor(final String res)140     protected static AssetFileDescriptor getAssetFileDescriptorFor(final String res)
141             throws FileNotFoundException {
142         File inpFile = new File(mInpPrefix + res);
143         Preconditions.assertTestFileExists(mInpPrefix + res);
144         ParcelFileDescriptor parcelFD =
145                 ParcelFileDescriptor.open(inpFile, ParcelFileDescriptor.MODE_READ_ONLY);
146         return new AssetFileDescriptor(parcelFD, 0, parcelFD.getStatSize());
147     }
148 
149     @Before
150     @Override
setUp()151     public void setUp() throws Throwable {
152         super.setUp();
153 
154         // read primary file into memory
155         AssetFileDescriptor masterFd = getAssetFileDescriptorFor("sinesweepraw.raw");
156         long masterLength = masterFd.getLength();
157         mMasterBuffer = new short[(int) (masterLength / 2)];
158         InputStream is = masterFd.createInputStream();
159         BufferedInputStream bis = new BufferedInputStream(is);
160         for (int i = 0; i < mMasterBuffer.length; i++) {
161             int lo = bis.read();
162             int hi = bis.read();
163             if (hi >= 128) {
164                 hi -= 256;
165             }
166             int sample = hi * 256 + lo;
167             mMasterBuffer[i] = (short) sample;
168         }
169         bis.close();
170         masterFd.close();
171     }
172 
173     @After
174     @Override
tearDown()175     public void tearDown() {
176         // ensure MediaCodecPlayer resources are released even if an exception is thrown.
177         if (mMediaCodecPlayer != null) {
178             mMediaCodecPlayer.reset();
179             mMediaCodecPlayer = null;
180         }
181         super.tearDown();
182     }
183 
isDefaultCodec(String codecName, String mime)184     static boolean isDefaultCodec(String codecName, String mime) throws IOException {
185         if (sDefaultDecoders.containsKey(mime)) {
186             return sDefaultDecoders.get(mime).equalsIgnoreCase(codecName);
187         }
188         MediaCodec codec = MediaCodec.createDecoderByType(mime);
189         boolean isDefault = codec.getName().equalsIgnoreCase(codecName);
190         sDefaultDecoders.put(mime, codec.getName());
191         codec.release();
192 
193         return isDefault;
194     }
195 
196     @ApiTest(apis = {"android.media.MediaCodec#BUFFER_FLAG_END_OF_STREAM"})
197     @Test
testBug11696552()198     public void testBug11696552() throws Exception {
199         MediaCodec mMediaCodec = MediaCodec.createDecoderByType(MediaFormat.MIMETYPE_AUDIO_AAC);
200         MediaFormat mFormat = MediaFormat.createAudioFormat(
201                 MediaFormat.MIMETYPE_AUDIO_AAC, 48000 /* frequency */, 2 /* channels */);
202         mFormat.setByteBuffer("csd-0", ByteBuffer.wrap( new byte [] {0x13, 0x10} ));
203         mFormat.setInteger(MediaFormat.KEY_IS_ADTS, 1);
204         mMediaCodec.configure(mFormat, null, null, 0);
205         mMediaCodec.start();
206         int index = mMediaCodec.dequeueInputBuffer(250000);
207         mMediaCodec.queueInputBuffer(index, 0, 0, 0, MediaCodec.BUFFER_FLAG_END_OF_STREAM);
208         MediaCodec.BufferInfo info = new MediaCodec.BufferInfo();
209         mMediaCodec.dequeueOutputBuffer(info, 250000);
210     }
211 
212     // methods decode(), testTimeStampOrdering(), monoTest(), ... internally call decodeToMemory
213     // (). This method for a given media type and component, decodes the resource file. Majority
214     // of the mediacodec api will get involved in this process. The ones that are involved are
215     // listed below. The @ApiTest(...) annotation is only added for one test, but it applies at
216     // all other places unless indicated otherwise. This is done to avoid redundancy.
217     @ApiTest(apis = {"android.media.MediaExtractor#setDataSource",
218             "android.media.MediaExtractor#getTrackCount",
219             "android.media.MediaExtractor#getTrackFormat",
220             "android.media.MediaExtractor#selectTrack",
221             "android.media.MediaExtractor#getSampleTrackIndex",
222             "android.media.MediaExtractor#readSampleData",
223             "android.media.MediaExtractor#getSampleTime",
224             "android.media.MediaExtractor#advance",
225             "android.media.MediaExtractor#release",
226             "android.media.MediaCodec#createByCodecName",
227             "android.media.MediaCodec#configure",
228             "android.media.MediaCodec#start",
229             "android.media.MediaCodec#getInputBuffers",
230             "android.media.MediaCodec#getOutputBuffers",
231             "android.media.MediaCodec#dequeueInputBuffer",
232             "android.media.MediaCodec#queueInputBuffer",
233             "android.media.MediaCodec#dequeueOutputBuffer",
234             "android.media.MediaCodec#releaseOutputBuffer",
235             "android.media.MediaCodec#getOutputFormat",
236             "android.media.MediaCodec#flush",
237             "android.media.MediaCodec#stop",
238             "android.media.MediaCodec#release",
239             "android.media.MediaCodec#BUFFER_FLAG_END_OF_STREAM",
240             "android.media.MediaCodec#BUFFER_FLAG_CODEC_CONFIG"})
241     // The allowed errors in the following tests are the actual maximum measured
242     // errors with the standard decoders, plus 10%.
243     // This should allow for some variation in decoders, while still detecting
244     // phase and delay errors, channel swap, etc.
245     @CddTest(requirements = {"5.1.2/C-1-6"})
246     @Test
testDecodeMp3Lame()247     public void testDecodeMp3Lame() throws Exception {
248         decode("sinesweepmp3lame.mp3", 804.f);
249         testTimeStampOrdering("sinesweepmp3lame.mp3");
250     }
251     @CddTest(requirements = {"5.1.2/C-1-6"})
252     @Test
testDecodeMp3Smpb()253     public void testDecodeMp3Smpb() throws Exception {
254         decode("sinesweepmp3smpb.mp3", 413.f);
255         testTimeStampOrdering("sinesweepmp3smpb.mp3");
256     }
257     @CddTest(requirements = {"5.1.2/C-1-1"})
258     @Test
testDecodeM4a()259     public void testDecodeM4a() throws Exception {
260         decode("sinesweepm4a.m4a", 124.f);
261         testTimeStampOrdering("sinesweepm4a.m4a");
262     }
263     @CddTest(requirements = {"5.1.2/C-1-8"})
264     @Test
testDecodeOgg()265     public void testDecodeOgg() throws Exception {
266         decode("sinesweepogg.ogg", 168.f);
267         testTimeStampOrdering("sinesweepogg.ogg");
268     }
269     @CddTest(requirements = {"5.1.2/C-1-8"})
270     @Test
testDecodeOggMkv()271     public void testDecodeOggMkv() throws Exception {
272         decode("sinesweepoggmkv.mkv", 168.f);
273         testTimeStampOrdering("sinesweepoggmkv.mkv");
274     }
275     @CddTest(requirements = {"5.1.2/C-1-8"})
276     @Test
testDecodeOggMp4()277     public void testDecodeOggMp4() throws Exception {
278         decode("sinesweepoggmp4.mp4", 168.f);
279         testTimeStampOrdering("sinesweepoggmp4.mp4");
280     }
281     @CddTest(requirements = {"5.1.2/C-1-9"})
282     @Test
testDecodeWav()283     public void testDecodeWav() throws Exception {
284         decode("sinesweepwav.wav", 0.0f);
285         testTimeStampOrdering("sinesweepwav.wav");
286     }
287     @CddTest(requirements = {"5.1.2/C-1-9"})
288     @Test
testDecodeWav24()289     public void testDecodeWav24() throws Exception {
290         decode("sinesweepwav24.wav", 0.0f);
291         testTimeStampOrdering("sinesweepwav24.wav");
292     }
293     @CddTest(requirements = {"5.1.2/C-1-5"})
294     @Test
testDecodeFlacMkv()295     public void testDecodeFlacMkv() throws Exception {
296         decode("sinesweepflacmkv.mkv", 0.0f);
297         testTimeStampOrdering("sinesweepflacmkv.mkv");
298     }
299     @CddTest(requirements = {"5.1.2/C-1-5"})
300     @Test
testDecodeFlac()301     public void testDecodeFlac() throws Exception {
302         decode("sinesweepflac.flac", 0.0f);
303         testTimeStampOrdering("sinesweepflac.flac");
304     }
305     @CddTest(requirements = {"5.1.2/C-1-5"})
306     @Test
testDecodeFlac24()307     public void testDecodeFlac24() throws Exception {
308         decode("sinesweepflac24.flac", 0.0f);
309         testTimeStampOrdering("sinesweepflac24.flac");
310     }
311     @CddTest(requirements = {"5.1.2/C-1-5"})
312     @Test
testDecodeFlacMp4()313     public void testDecodeFlacMp4() throws Exception {
314         decode("sinesweepflacmp4.mp4", 0.0f);
315         testTimeStampOrdering("sinesweepflacmp4.mp4");
316     }
317 
318     @CddTest(requirements = {"5.1.2/C-1-6"})
319     @Test
testDecodeMonoMp3()320     public void testDecodeMonoMp3() throws Exception {
321         monoTest("monotestmp3.mp3", 44100);
322         testTimeStampOrdering("monotestmp3.mp3");
323     }
324 
325     @CddTest(requirements = {"5.1.2/C-1-1"})
326     @Test
testDecodeMonoM4a()327     public void testDecodeMonoM4a() throws Exception {
328         monoTest("monotestm4a.m4a", 44100);
329         testTimeStampOrdering("monotestm4a.m4a");
330     }
331 
332     @CddTest(requirements = {"5.1.2/C-1-8"})
333     @Test
testDecodeMonoOgg()334     public void testDecodeMonoOgg() throws Exception {
335         monoTest("monotestogg.ogg", 44100);
336         testTimeStampOrdering("monotestogg.ogg");
337     }
338     @CddTest(requirements = {"5.1.2/C-1-8"})
339     @Test
testDecodeMonoOggMkv()340     public void testDecodeMonoOggMkv() throws Exception {
341         monoTest("monotestoggmkv.mkv", 44100);
342         testTimeStampOrdering("monotestoggmkv.mkv");
343     }
344     @CddTest(requirements = {"5.1.2/C-1-8"})
345     @Test
testDecodeMonoOggMp4()346     public void testDecodeMonoOggMp4() throws Exception {
347         monoTest("monotestoggmp4.mp4", 44100);
348         testTimeStampOrdering("monotestoggmp4.mp4");
349     }
350 
351     @Test
testDecodeMonoGsm()352     public void testDecodeMonoGsm() throws Exception {
353         String fileName = "monotestgsm.wav";
354         Preconditions.assertTestFileExists(mInpPrefix + fileName);
355         if (MediaUtils.hasCodecsForResource(mInpPrefix + fileName)) {
356             monoTest(fileName, 8000);
357             testTimeStampOrdering(fileName);
358         } else {
359             MediaUtils.skipTest("not mandatory");
360         }
361     }
362 
363     @CddTest(requirements = {"5.1.2/C-1-1"})
364     @Test
testDecodeAacTs()365     public void testDecodeAacTs() throws Exception {
366         testTimeStampOrdering("sinesweeptsaac.m4a");
367     }
368 
369     @CddTest(requirements = {"5.1.2/C-1-8"})
370     @Test
testDecodeVorbis()371     public void testDecodeVorbis() throws Exception {
372         testTimeStampOrdering("sinesweepvorbis.mkv");
373     }
374     @CddTest(requirements = {"5.1.2/C-1-8"})
375     @Test
testDecodeVorbisMp4()376     public void testDecodeVorbisMp4() throws Exception {
377         testTimeStampOrdering("sinesweepvorbismp4.mp4");
378     }
379 
380     @CddTest(requirements = {"5.1.2/C-1-10"})
381     @Test
testDecodeOpus()382     public void testDecodeOpus() throws Exception {
383         testTimeStampOrdering("sinesweepopus.mkv");
384     }
385     @CddTest(requirements = {"5.1.2/C-1-10"})
386     @Test
testDecodeOpusMp4()387     public void testDecodeOpusMp4() throws Exception {
388         testTimeStampOrdering("sinesweepopusmp4.mp4");
389     }
390 
391     @CddTest(requirements = {"5.1.3"})
392     @Test
testDecodeG711ChannelsAndRates()393     public void testDecodeG711ChannelsAndRates() throws Exception {
394         String[] mimetypes = { MediaFormat.MIMETYPE_AUDIO_G711_ALAW,
395                                MediaFormat.MIMETYPE_AUDIO_G711_MLAW };
396         int[] sampleRates = { 8000 };
397         int[] channelMasks = { AudioFormat.CHANNEL_OUT_MONO,
398                                AudioFormat.CHANNEL_OUT_STEREO,
399                                AudioFormat.CHANNEL_OUT_5POINT1 };
400 
401         verifyChannelsAndRates(mimetypes, sampleRates, channelMasks);
402     }
403 
404     @CddTest(requirements = {"5.1.3"})
405     @Test
testDecodeOpusChannelsAndRates()406     public void testDecodeOpusChannelsAndRates() throws Exception {
407         String[] mimetypes = { MediaFormat.MIMETYPE_AUDIO_OPUS };
408         int[] sampleRates = { 8000, 12000, 16000, 24000, 48000 };
409         int[] channelMasks = { AudioFormat.CHANNEL_OUT_MONO,
410                                AudioFormat.CHANNEL_OUT_STEREO,
411                                AudioFormat.CHANNEL_OUT_5POINT1 };
412 
413         verifyChannelsAndRates(mimetypes, sampleRates, channelMasks);
414     }
415 
verifyChannelsAndRates(String[] mimetypes, int[] sampleRates, int[] channelMasks)416     private void verifyChannelsAndRates(String[] mimetypes, int[] sampleRates,
417                                        int[] channelMasks) throws Exception {
418 
419         if (!MediaUtils.check(IS_AT_LEAST_R, "test invalid before Android 11")) return;
420 
421         for (String mimetype : mimetypes) {
422             // ensure we find a codec for all listed mime/channel/rate combinations
423             MediaCodecList mcl = new MediaCodecList(MediaCodecList.ALL_CODECS);
424             for (int sampleRate : sampleRates) {
425                 for (int channelMask : channelMasks) {
426                     int channelCount = AudioFormat.channelCountFromOutChannelMask(channelMask);
427                     MediaFormat desiredFormat = MediaFormat.createAudioFormat(
428                                 mimetype,
429                                 sampleRate,
430                                 channelCount);
431                     String codecname = mcl.findDecoderForFormat(desiredFormat);
432 
433                     assertNotNull("findDecoderForFormat() failed for mime=" + mimetype
434                                     + " sampleRate=" + sampleRate + " channelCount=" + channelCount,
435                             codecname);
436                 }
437             }
438 
439             // check all mime-matching codecs successfully configure the desired rate/channels
440             ArrayList<MediaCodecInfo> codecInfoList = getDecoderMediaCodecInfoList(mimetype);
441             if (codecInfoList == null) {
442                 continue;
443             }
444             for (MediaCodecInfo codecInfo : codecInfoList) {
445                 MediaCodec codec = MediaCodec.createByCodecName(codecInfo.getName());
446                 for (int sampleRate : sampleRates) {
447                     for (int channelMask : channelMasks) {
448                         int channelCount = AudioFormat.channelCountFromOutChannelMask(channelMask);
449 
450                         codec.reset();
451                         MediaFormat desiredFormat = MediaFormat.createAudioFormat(
452                                 mimetype,
453                                 sampleRate,
454                                 channelCount);
455                         codec.configure(desiredFormat, null, null, 0);
456                         codec.start();
457 
458                         Log.d(TAG, "codec: " + codecInfo.getName() +
459                                 " sample rate: " + sampleRate +
460                                 " channelcount:" + channelCount);
461 
462                         MediaFormat actual = codec.getInputFormat();
463                         int actualChannels = actual.getInteger(MediaFormat.KEY_CHANNEL_COUNT, -1);
464                         int actualSampleRate = actual.getInteger(MediaFormat.KEY_SAMPLE_RATE, -1);
465                         assertTrue("channels: configured " + actualChannels +
466                                    " != desired " + channelCount, actualChannels == channelCount);
467                         assertTrue("sample rate: configured " + actualSampleRate +
468                                    " != desired " + sampleRate, actualSampleRate == sampleRate);
469                     }
470                 }
471                 codec.release();
472             }
473         }
474     }
475 
getDecoderMediaCodecInfoList(String mimeType)476     private ArrayList<MediaCodecInfo> getDecoderMediaCodecInfoList(String mimeType) {
477         MediaCodecList mediaCodecList = new MediaCodecList(MediaCodecList.ALL_CODECS);
478         ArrayList<MediaCodecInfo> decoderInfos = new ArrayList<MediaCodecInfo>();
479         for (MediaCodecInfo codecInfo : mediaCodecList.getCodecInfos()) {
480             if (!codecInfo.isEncoder() && isMimeTypeSupported(codecInfo, mimeType)) {
481                 decoderInfos.add(codecInfo);
482             }
483         }
484         return decoderInfos;
485     }
486 
isMimeTypeSupported(MediaCodecInfo codecInfo, String mimeType)487     private boolean isMimeTypeSupported(MediaCodecInfo codecInfo, String mimeType) {
488         for (String type : codecInfo.getSupportedTypes()) {
489             if (type.equalsIgnoreCase(mimeType)) {
490                 return true;
491             }
492         }
493         return false;
494     }
495 
496     @CddTest(requirements = {"5.1.2/C-1-1"})
497     @Test
testDecode51M4a()498     public void testDecode51M4a() throws Exception {
499         for (String codecName : codecsFor("sinesweep51m4a.m4a")) {
500             decodeToMemory(codecName, "sinesweep51m4a.m4a", RESET_MODE_NONE, CONFIG_MODE_NONE, -1,
501                     null);
502         }
503     }
504 
testTimeStampOrdering(final String res)505     private void testTimeStampOrdering(final String res) throws Exception {
506         for (String codecName : codecsFor(res)) {
507             List<Long> timestamps = new ArrayList<Long>();
508             decodeToMemory(codecName, res, RESET_MODE_NONE, CONFIG_MODE_NONE, -1, timestamps);
509             Long lastTime = Long.MIN_VALUE;
510             for (int i = 0; i < timestamps.size(); i++) {
511                 Long thisTime = timestamps.get(i);
512                 assertTrue(codecName + ": timetravel occurred: " + lastTime + " > " + thisTime,
513                        thisTime >= lastTime);
514                 lastTime = thisTime;
515             }
516         }
517     }
518 
519     // Annotation applicable to other testTrackSelection*() as well
520     @ApiTest(apis = {"android.media.MediaExtractor#setDataSource",
521         "android.media.MediaExtractor#selectTrack",
522         "android.media.MediaExtractor#readSampleData",
523         "android.media.MediaExtractor#getSampleTrackIndex",
524         "android.media.MediaExtractor#seekTo",
525         "android.media.MediaExtractor#advance",
526         "android.media.MediaExtractor#release"})
527     @Test
testTrackSelection()528     public void testTrackSelection() throws Exception {
529         testTrackSelection("video_480x360_mp4_h264_1350kbps_30fps_aac_stereo_128kbps_44100hz.mp4");
530         testTrackSelection(
531                 "video_480x360_mp4_h264_1350kbps_30fps_aac_stereo_128kbps_44100hz_fragmented.mp4");
532         testTrackSelection(
533                 "video_480x360_mp4_h264_1350kbps_30fps_aac_stereo_128kbps_44100hz_dash.mp4");
534     }
535 
536     @Test
testTrackSelectionMkv()537     public void testTrackSelectionMkv() throws Exception {
538         Log.d(TAG, "testTrackSelectionMkv!!!!!! ");
539         testTrackSelection("mkv_avc_adpcm_ima.mkv");
540         Log.d(TAG, "mkv_avc_adpcm_ima finished!!!!!! ");
541         testTrackSelection("mkv_avc_adpcm_ms.mkv");
542         Log.d(TAG, "mkv_avc_adpcm_ms finished!!!!!! ");
543         testTrackSelection("mkv_avc_wma.mkv");
544         Log.d(TAG, "mkv_avc_wma finished!!!!!! ");
545         testTrackSelection("mkv_avc_mp2.mkv");
546         Log.d(TAG, "mkv_avc_mp2 finished!!!!!! ");
547     }
548 
549     @Test
testBFrames()550     public void testBFrames() throws Exception {
551         int testsRun =
552             testBFrames("video_h264_main_b_frames.mp4") +
553             testBFrames("video_h264_main_b_frames_frag.mp4");
554         if (testsRun == 0) {
555             MediaUtils.skipTest("no codec found");
556         }
557     }
558 
testBFrames(final String res)559     public int testBFrames(final String res) throws Exception {
560         MediaExtractor ex = new MediaExtractor();
561         Preconditions.assertTestFileExists(mInpPrefix + res);
562         ex.setDataSource(mInpPrefix + res);
563         MediaFormat format = ex.getTrackFormat(0);
564         String mime = format.getString(MediaFormat.KEY_MIME);
565         assertTrue("not a video track. Wrong test file?", mime.startsWith("video/"));
566         if (!MediaUtils.canDecode(format)) {
567             ex.release();
568             return 0; // skip
569         }
570         MediaCodec dec = MediaCodec.createDecoderByType(mime);
571         Surface s = getActivity().getSurfaceHolder().getSurface();
572         dec.configure(format, s, null, 0);
573         dec.start();
574         ByteBuffer[] buf = dec.getInputBuffers();
575         ex.selectTrack(0);
576         MediaCodec.BufferInfo info = new MediaCodec.BufferInfo();
577         long lastPresentationTimeUsFromExtractor = -1;
578         long lastPresentationTimeUsFromDecoder = -1;
579         boolean inputoutoforder = false;
580         while(true) {
581             int flags = ex.getSampleFlags();
582             long time = ex.getSampleTime();
583             if (time >= 0 && time < lastPresentationTimeUsFromExtractor) {
584                 inputoutoforder = true;
585             }
586             lastPresentationTimeUsFromExtractor = time;
587             int bufidx = dec.dequeueInputBuffer(5000);
588             if (bufidx >= 0) {
589                 int n = ex.readSampleData(buf[bufidx], 0);
590                 if (n < 0) {
591                     flags = MediaCodec.BUFFER_FLAG_END_OF_STREAM;
592                     time = 0;
593                     n = 0;
594                 }
595                 dec.queueInputBuffer(bufidx, 0, n, time, flags);
596                 ex.advance();
597             }
598             int status = dec.dequeueOutputBuffer(info, 5000);
599             if (status >= 0) {
600                 if ((info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
601                     break;
602                 }
603                 assertTrue("out of order timestamp from decoder",
604                         info.presentationTimeUs > lastPresentationTimeUsFromDecoder);
605                 dec.releaseOutputBuffer(status, true);
606                 lastPresentationTimeUsFromDecoder = info.presentationTimeUs;
607             }
608         }
609         assertTrue("extractor timestamps were ordered, wrong test file?", inputoutoforder);
610         dec.release();
611         ex.release();
612         return 1;
613       }
614 
615     /**
616      * Test ColorAspects of all the AVC decoders. Decoders should handle
617      * the colors aspects presented in both the mp4 atom 'colr' and VUI
618      * in the bitstream correctly. The following table lists the color
619      * aspects contained in the color box and VUI for the test stream.
620      * P = primaries, T = transfer, M = coeffs, R = range. '-' means
621      * empty value.
622      *                                      |     colr     |    VUI
623      * -------------------------------------------------------------------
624      *         File Name                    |  P  T  M  R  |  P  T  M  R
625      * -------------------------------------------------------------------
626      *  color_176x144_bt709_lr_sdr_h264     |  1  1  1  0  |  -  -  -  -
627      *  color_176x144_bt601_625_fr_sdr_h264 |  1  6  6  0  |  5  2  2  1
628      *  color_176x144_bt601_525_lr_sdr_h264 |  6  5  4  0  |  2  6  6  0
629      *  color_176x144_srgb_lr_sdr_h264      |  2  0  2  1  |  1  13 1  0
630      */
631     @ApiTest(apis = {"android.media.MediaFormat#KEY_COLOR_RANGE",
632             "android.media.MediaFormat#KEY_COLOR_STANDARD",
633             "android.media.MediaFormat#KEY_COLOR_TRANSFER"})
634     @Test
testH264ColorAspects()635     public void testH264ColorAspects() throws Exception {
636         testColorAspects(
637                 "color_176x144_bt709_lr_sdr_h264.mp4", 1 /* testId */,
638                 MediaFormat.COLOR_RANGE_LIMITED, MediaFormat.COLOR_STANDARD_BT709,
639                 MediaFormat.COLOR_TRANSFER_SDR_VIDEO);
640         testColorAspects(
641                 "color_176x144_bt601_625_fr_sdr_h264.mp4", 2 /* testId */,
642                 MediaFormat.COLOR_RANGE_FULL, MediaFormat.COLOR_STANDARD_BT601_PAL,
643                 MediaFormat.COLOR_TRANSFER_SDR_VIDEO);
644         testColorAspects(
645                 "color_176x144_bt601_525_lr_sdr_h264.mp4", 3 /* testId */,
646                 MediaFormat.COLOR_RANGE_LIMITED, MediaFormat.COLOR_STANDARD_BT601_NTSC,
647                 MediaFormat.COLOR_TRANSFER_SDR_VIDEO);
648         testColorAspects(
649                 "color_176x144_srgb_lr_sdr_h264.mp4", 4 /* testId */,
650                 MediaFormat.COLOR_RANGE_LIMITED, MediaFormat.COLOR_STANDARD_BT709,
651                 2 /* MediaFormat.COLOR_TRANSFER_SRGB */);
652     }
653 
654     /**
655      * Test ColorAspects of all the HEVC decoders. Decoders should handle
656      * the colors aspects presented in both the mp4 atom 'colr' and VUI
657      * in the bitstream correctly. The following table lists the color
658      * aspects contained in the color box and VUI for the test stream.
659      * P = primaries, T = transfer, M = coeffs, R = range. '-' means
660      * empty value.
661      *                                      |     colr     |    VUI
662      * -------------------------------------------------------------------
663      *         File Name                    |  P  T  M  R  |  P  T  M  R
664      * -------------------------------------------------------------------
665      *  color_176x144_bt709_lr_sdr_h265     |  1  1  1  0  |  -  -  -  -
666      *  color_176x144_bt601_625_fr_sdr_h265 |  1  6  6  0  |  5  2  2  1
667      *  color_176x144_bt601_525_lr_sdr_h265 |  6  5  4  0  |  2  6  6  0
668      *  color_176x144_srgb_lr_sdr_h265      |  2  0  2  1  |  1  13 1  0
669      */
670     @ApiTest(apis = {"android.media.MediaFormat#KEY_COLOR_RANGE",
671             "android.media.MediaFormat#KEY_COLOR_STANDARD",
672             "android.media.MediaFormat#KEY_COLOR_TRANSFER"})
673     @Test
testH265ColorAspects()674     public void testH265ColorAspects() throws Exception {
675         testColorAspects(
676                 "color_176x144_bt709_lr_sdr_h265.mp4", 1 /* testId */,
677                 MediaFormat.COLOR_RANGE_LIMITED, MediaFormat.COLOR_STANDARD_BT709,
678                 MediaFormat.COLOR_TRANSFER_SDR_VIDEO);
679         testColorAspects(
680                 "color_176x144_bt601_625_fr_sdr_h265.mp4", 2 /* testId */,
681                 MediaFormat.COLOR_RANGE_FULL, MediaFormat.COLOR_STANDARD_BT601_PAL,
682                 MediaFormat.COLOR_TRANSFER_SDR_VIDEO);
683         testColorAspects(
684                 "color_176x144_bt601_525_lr_sdr_h265.mp4", 3 /* testId */,
685                 MediaFormat.COLOR_RANGE_LIMITED, MediaFormat.COLOR_STANDARD_BT601_NTSC,
686                 MediaFormat.COLOR_TRANSFER_SDR_VIDEO);
687         testColorAspects(
688                 "color_176x144_srgb_lr_sdr_h265.mp4", 4 /* testId */,
689                 MediaFormat.COLOR_RANGE_LIMITED, MediaFormat.COLOR_STANDARD_BT709,
690                 2 /* MediaFormat.COLOR_TRANSFER_SRGB */);
691         // Test the main10 streams with surface as the decoder might
692         // support opaque buffers only.
693         testColorAspects(
694                 "color_176x144_bt2020_lr_smpte2084_h265.mp4", 5 /* testId */,
695                 MediaFormat.COLOR_RANGE_LIMITED, MediaFormat.COLOR_STANDARD_BT2020,
696                 MediaFormat.COLOR_TRANSFER_ST2084,
697                 getActivity().getSurfaceHolder().getSurface());
698         testColorAspects(
699                 "color_176x144_bt2020_lr_hlg_h265.mp4", 6 /* testId */,
700                 MediaFormat.COLOR_RANGE_LIMITED, MediaFormat.COLOR_STANDARD_BT2020,
701                 MediaFormat.COLOR_TRANSFER_HLG,
702                 getActivity().getSurfaceHolder().getSurface());
703     }
704 
705     /**
706      * Test ColorAspects of all the MPEG2 decoders if avaiable. Decoders should
707      * handle the colors aspects presented in both the mp4 atom 'colr' and Sequence
708      * in the bitstream correctly. The following table lists the color aspects
709      * contained in the color box and SeqInfo for the test stream.
710      * P = primaries, T = transfer, M = coeffs, R = range. '-' means
711      * empty value.
712      *                                       |     colr     |    SeqInfo
713      * -------------------------------------------------------------------
714      *         File Name                     |  P  T  M  R  |  P  T  M  R
715      * -------------------------------------------------------------------
716      *  color_176x144_bt709_lr_sdr_mpeg2     |  1  1  1  0  |  -  -  -  -
717      *  color_176x144_bt601_625_lr_sdr_mpeg2 |  1  6  6  0  |  5  2  2  0
718      *  color_176x144_bt601_525_lr_sdr_mpeg2 |  6  5  4  0  |  2  6  6  0
719      *  color_176x144_srgb_lr_sdr_mpeg2      |  2  0  2  0  |  1  13 1  0
720      */
721     @ApiTest(apis = {"android.media.MediaFormat#KEY_COLOR_RANGE",
722             "android.media.MediaFormat#KEY_COLOR_STANDARD",
723             "android.media.MediaFormat#KEY_COLOR_TRANSFER"})
724     @Test
testMPEG2ColorAspectsTV()725     public void testMPEG2ColorAspectsTV() throws Exception {
726         testColorAspects(
727                 "color_176x144_bt709_lr_sdr_mpeg2.mp4", 1 /* testId */,
728                 MediaFormat.COLOR_RANGE_LIMITED, MediaFormat.COLOR_STANDARD_BT709,
729                 MediaFormat.COLOR_TRANSFER_SDR_VIDEO);
730         testColorAspects(
731                 "color_176x144_bt601_625_lr_sdr_mpeg2.mp4", 2 /* testId */,
732                 MediaFormat.COLOR_RANGE_LIMITED, MediaFormat.COLOR_STANDARD_BT601_PAL,
733                 MediaFormat.COLOR_TRANSFER_SDR_VIDEO);
734         testColorAspects(
735                 "color_176x144_bt601_525_lr_sdr_mpeg2.mp4", 3 /* testId */,
736                 MediaFormat.COLOR_RANGE_LIMITED, MediaFormat.COLOR_STANDARD_BT601_NTSC,
737                 MediaFormat.COLOR_TRANSFER_SDR_VIDEO);
738         testColorAspects(
739                 "color_176x144_srgb_lr_sdr_mpeg2.mp4", 4 /* testId */,
740                 MediaFormat.COLOR_RANGE_LIMITED, MediaFormat.COLOR_STANDARD_BT709,
741                 2 /* MediaFormat.COLOR_TRANSFER_SRGB */);
742     }
743 
testColorAspects( final String res, int testId, int expectRange, int expectStandard, int expectTransfer)744     private void testColorAspects(
745             final String res, int testId, int expectRange, int expectStandard, int expectTransfer)
746             throws Exception {
747         testColorAspects(
748                 res, testId, expectRange, expectStandard, expectTransfer, null /*surface*/);
749     }
750 
testColorAspects( final String res, int testId, int expectRange, int expectStandard, int expectTransfer, Surface surface)751     private void testColorAspects(
752             final String res, int testId, int expectRange, int expectStandard, int expectTransfer,
753             Surface surface) throws Exception {
754         Preconditions.assertTestFileExists(mInpPrefix + res);
755         MediaFormat format = MediaUtils.getTrackFormatForResource(mInpPrefix + res, "video");
756         MediaFormat mimeFormat = new MediaFormat();
757         mimeFormat.setString(MediaFormat.KEY_MIME, format.getString(MediaFormat.KEY_MIME));
758 
759         for (String decoderName: MediaUtils.getDecoderNames(mimeFormat)) {
760             if (!MediaUtils.supports(decoderName, format)) {
761                 MediaUtils.skipTest(decoderName + " cannot play resource " + mInpPrefix + res);
762             } else {
763                 testColorAspects(decoderName, res, testId,
764                         expectRange, expectStandard, expectTransfer, surface);
765             }
766         }
767     }
768 
testColorAspects( String decoderName, final String res, int testId, int expectRange, int expectStandard, int expectTransfer, Surface surface)769     private void testColorAspects(
770             String decoderName, final String res, int testId, int expectRange,
771             int expectStandard, int expectTransfer, Surface surface) throws Exception {
772         Preconditions.assertTestFileExists(mInpPrefix + res);
773         MediaExtractor ex = new MediaExtractor();
774         ex.setDataSource(mInpPrefix + res);
775         MediaFormat format = ex.getTrackFormat(0);
776         MediaCodec dec = MediaCodec.createByCodecName(decoderName);
777         dec.configure(format, surface, null, 0);
778         dec.start();
779         ByteBuffer[] buf = dec.getInputBuffers();
780         ex.selectTrack(0);
781         MediaCodec.BufferInfo info = new MediaCodec.BufferInfo();
782         boolean sawInputEOS = false;
783         boolean getOutputFormat = false;
784         boolean rangeMatch = false;
785         boolean colorMatch = false;
786         boolean transferMatch = false;
787         int colorRange = 0;
788         int colorStandard = 0;
789         int colorTransfer = 0;
790 
791         while (true) {
792             if (!sawInputEOS) {
793                 int flags = ex.getSampleFlags();
794                 long time = ex.getSampleTime();
795                 int bufidx = dec.dequeueInputBuffer(200 * 1000);
796                 if (bufidx >= 0) {
797                     int n = ex.readSampleData(buf[bufidx], 0);
798                     if (n < 0) {
799                         flags = MediaCodec.BUFFER_FLAG_END_OF_STREAM;
800                         sawInputEOS = true;
801                         n = 0;
802                     }
803                     dec.queueInputBuffer(bufidx, 0, n, time, flags);
804                     ex.advance();
805                 } else {
806                     assertEquals(
807                             "codec.dequeueInputBuffer() unrecognized return value: " + bufidx,
808                             MediaCodec.INFO_TRY_AGAIN_LATER, bufidx);
809                 }
810             }
811 
812             int status = dec.dequeueOutputBuffer(info, sawInputEOS ? 3000 * 1000 : 100 * 1000);
813             if (status == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
814                 MediaFormat fmt = dec.getOutputFormat();
815                 colorRange = fmt.containsKey("color-range") ? fmt.getInteger("color-range") : 0;
816                 colorStandard = fmt.containsKey("color-standard") ? fmt.getInteger("color-standard") : 0;
817                 colorTransfer = fmt.containsKey("color-transfer") ? fmt.getInteger("color-transfer") : 0;
818                 rangeMatch = colorRange == expectRange;
819                 colorMatch = colorStandard == expectStandard;
820                 transferMatch = colorTransfer == expectTransfer;
821                 getOutputFormat = true;
822                 // Test only needs to check the color format in the first format changed event.
823                 break;
824             } else if (status >= 0) {
825                 // Test should get at least one format changed event before getting first frame.
826                 assertTrue(getOutputFormat);
827                 break;
828             } else {
829                 assertFalse(
830                         "codec.dequeueOutputBuffer() timeout after seeing input EOS",
831                         status == MediaCodec.INFO_TRY_AGAIN_LATER && sawInputEOS);
832             }
833         }
834 
835         String reportName = decoderName + "_colorAspectsTest Test " + testId +
836                 " (Get R: " + colorRange + " S: " + colorStandard + " T: " + colorTransfer + ")" +
837                 " (Expect R: " + expectRange + " S: " + expectStandard + " T: " + expectTransfer + ")";
838         Log.d(TAG, reportName);
839 
840         DeviceReportLog log = new DeviceReportLog("CtsMediaDecoderTestCases", "color_aspects_test");
841         log.addValue("decoder_name", decoderName, ResultType.NEUTRAL, ResultUnit.NONE);
842         log.addValue("test_id", testId, ResultType.NEUTRAL, ResultUnit.NONE);
843         log.addValues(
844                 "rst_actual", new int[] { colorRange, colorStandard, colorTransfer },
845                 ResultType.NEUTRAL, ResultUnit.NONE);
846         log.addValues(
847                 "rst_expected", new int[] { expectRange, expectStandard, expectTransfer },
848                 ResultType.NEUTRAL, ResultUnit.NONE);
849 
850         if (rangeMatch && colorMatch && transferMatch) {
851             log.setSummary("result", 1, ResultType.HIGHER_BETTER, ResultUnit.COUNT);
852         } else {
853             log.setSummary("result", 0, ResultType.HIGHER_BETTER, ResultUnit.COUNT);
854         }
855         log.submit(getInstrumentation());
856 
857         assertTrue(rangeMatch && colorMatch && transferMatch);
858 
859         dec.release();
860         ex.release();
861     }
862 
testTrackSelection(final String res)863     private void testTrackSelection(final String res) throws Exception {
864         MediaExtractor ex1 = new MediaExtractor();
865         Preconditions.assertTestFileExists(mInpPrefix + res);
866         try {
867             ex1.setDataSource(mInpPrefix + res);
868 
869             ByteBuffer buf1 = ByteBuffer.allocate(1024*1024);
870             ArrayList<Integer> vid = new ArrayList<Integer>();
871             ArrayList<Integer> aud = new ArrayList<Integer>();
872 
873             // scan the file once and build lists of audio and video samples
874             ex1.selectTrack(0);
875             ex1.selectTrack(1);
876             while(true) {
877                 int n1 = ex1.readSampleData(buf1, 0);
878                 if (n1 < 0) {
879                     break;
880                 }
881                 int idx = ex1.getSampleTrackIndex();
882                 if (idx == 0) {
883                     vid.add(n1);
884                 } else if (idx == 1) {
885                     aud.add(n1);
886                 } else {
887                     fail("unexpected track index: " + idx);
888                 }
889                 ex1.advance();
890             }
891 
892             // read the video track once, then rewind and do it again, and
893             // verify we get the right samples
894             ex1.release();
895             ex1 = new MediaExtractor();
896             ex1.setDataSource(mInpPrefix + res);
897             ex1.selectTrack(0);
898             for (int i = 0; i < 2; i++) {
899                 ex1.seekTo(0, MediaExtractor.SEEK_TO_NEXT_SYNC);
900                 int idx = 0;
901                 while(true) {
902                     int n1 = ex1.readSampleData(buf1, 0);
903                     if (n1 < 0) {
904                         assertEquals(vid.size(), idx);
905                         break;
906                     }
907                     assertEquals(vid.get(idx++).intValue(), n1);
908                     ex1.advance();
909                 }
910             }
911 
912             // read the audio track once, then rewind and do it again, and
913             // verify we get the right samples
914             ex1.release();
915             ex1 = new MediaExtractor();
916             ex1.setDataSource(mInpPrefix + res);
917             ex1.selectTrack(1);
918             for (int i = 0; i < 2; i++) {
919                 ex1.seekTo(0, MediaExtractor.SEEK_TO_NEXT_SYNC);
920                 int idx = 0;
921                 while(true) {
922                     int n1 = ex1.readSampleData(buf1, 0);
923                     if (n1 < 0) {
924                         assertEquals(aud.size(), idx);
925                         break;
926                     }
927                     assertEquals(aud.get(idx++).intValue(), n1);
928                     ex1.advance();
929                 }
930             }
931 
932             // read the video track first, then rewind and get the audio track instead, and
933             // verify we get the right samples
934             ex1.release();
935             ex1 = new MediaExtractor();
936             ex1.setDataSource(mInpPrefix + res);
937             for (int i = 0; i < 2; i++) {
938                 ex1.selectTrack(i);
939                 ex1.seekTo(0, MediaExtractor.SEEK_TO_NEXT_SYNC);
940                 int idx = 0;
941                 while(true) {
942                     int n1 = ex1.readSampleData(buf1, 0);
943                     if (i == 0) {
944                         if (n1 < 0) {
945                             assertEquals(vid.size(), idx);
946                             break;
947                         }
948                         assertEquals(vid.get(idx++).intValue(), n1);
949                     } else if (i == 1) {
950                         if (n1 < 0) {
951                             assertEquals(aud.size(), idx);
952                             break;
953                         }
954                         assertEquals(aud.get(idx++).intValue(), n1);
955                     } else {
956                         fail("unexpected track index: " + idx);
957                     }
958                     ex1.advance();
959                 }
960                 ex1.unselectTrack(i);
961             }
962 
963             // read the video track first, then rewind, enable the audio track in addition
964             // to the video track, and verify we get the right samples
965             ex1.release();
966             ex1 = new MediaExtractor();
967             ex1.setDataSource(mInpPrefix + res);
968             for (int i = 0; i < 2; i++) {
969                 ex1.selectTrack(i);
970                 ex1.seekTo(0, MediaExtractor.SEEK_TO_NEXT_SYNC);
971                 int vididx = 0;
972                 int audidx = 0;
973                 while(true) {
974                     int n1 = ex1.readSampleData(buf1, 0);
975                     if (n1 < 0) {
976                         // we should have read all audio and all video samples at this point
977                         assertEquals(vid.size(), vididx);
978                         if (i == 1) {
979                             assertEquals(aud.size(), audidx);
980                         }
981                         break;
982                     }
983                     int trackidx = ex1.getSampleTrackIndex();
984                     if (trackidx == 0) {
985                         assertEquals(vid.get(vididx++).intValue(), n1);
986                     } else if (trackidx == 1) {
987                         assertEquals(aud.get(audidx++).intValue(), n1);
988                     } else {
989                         fail("unexpected track index: " + trackidx);
990                     }
991                     ex1.advance();
992                 }
993             }
994 
995             // read both tracks from the start, then rewind and verify we get the right
996             // samples both times
997             ex1.release();
998             ex1 = new MediaExtractor();
999             ex1.setDataSource(mInpPrefix + res);
1000             for (int i = 0; i < 2; i++) {
1001                 ex1.selectTrack(0);
1002                 ex1.selectTrack(1);
1003                 ex1.seekTo(0, MediaExtractor.SEEK_TO_NEXT_SYNC);
1004                 int vididx = 0;
1005                 int audidx = 0;
1006                 while(true) {
1007                     int n1 = ex1.readSampleData(buf1, 0);
1008                     if (n1 < 0) {
1009                         // we should have read all audio and all video samples at this point
1010                         assertEquals(vid.size(), vididx);
1011                         assertEquals(aud.size(), audidx);
1012                         break;
1013                     }
1014                     int trackidx = ex1.getSampleTrackIndex();
1015                     if (trackidx == 0) {
1016                         assertEquals(vid.get(vididx++).intValue(), n1);
1017                     } else if (trackidx == 1) {
1018                         assertEquals(aud.get(audidx++).intValue(), n1);
1019                     } else {
1020                         fail("unexpected track index: " + trackidx);
1021                     }
1022                     ex1.advance();
1023                 }
1024             }
1025 
1026         } finally {
1027             if (ex1 != null) {
1028                 ex1.release();
1029             }
1030         }
1031     }
1032 
1033     @ApiTest(apis = {"android.media.MediaExtractor#setDataSource",
1034         "android.media.MediaExtractor#selectTrack",
1035         "android.media.MediaExtractor#readSampleData",
1036         "android.media.MediaExtractor#seekTo",
1037         "android.media.MediaExtractor#advance"})
1038     @Test
testDecodeFragmented()1039     public void testDecodeFragmented() throws Exception {
1040         testDecodeFragmented("video_480x360_mp4_h264_1350kbps_30fps_aac_stereo_128kbps_44100hz.mp4",
1041                 "video_480x360_mp4_h264_1350kbps_30fps_aac_stereo_128kbps_44100hz_fragmented.mp4");
1042         testDecodeFragmented("video_480x360_mp4_h264_1350kbps_30fps_aac_stereo_128kbps_44100hz.mp4",
1043                 "video_480x360_mp4_h264_1350kbps_30fps_aac_stereo_128kbps_44100hz_dash.mp4");
1044     }
1045 
testDecodeFragmented(final String reference, final String teststream)1046     private void testDecodeFragmented(final String reference, final String teststream)
1047             throws Exception {
1048         Preconditions.assertTestFileExists(mInpPrefix + reference);
1049         Preconditions.assertTestFileExists(mInpPrefix + teststream);
1050         try {
1051             MediaExtractor ex1 = new MediaExtractor();
1052             ex1.setDataSource(mInpPrefix + reference);
1053             MediaExtractor ex2 = new MediaExtractor();
1054             ex2.setDataSource(mInpPrefix + teststream);
1055 
1056             assertEquals("different track count", ex1.getTrackCount(), ex2.getTrackCount());
1057 
1058             ByteBuffer buf1 = ByteBuffer.allocate(1024*1024);
1059             ByteBuffer buf2 = ByteBuffer.allocate(1024*1024);
1060 
1061             for (int i = 0; i < ex1.getTrackCount(); i++) {
1062                 // note: this assumes the tracks are reported in the order in which they appear
1063                 // in the file.
1064                 ex1.seekTo(0, MediaExtractor.SEEK_TO_NEXT_SYNC);
1065                 ex1.selectTrack(i);
1066                 ex2.seekTo(0, MediaExtractor.SEEK_TO_NEXT_SYNC);
1067                 ex2.selectTrack(i);
1068 
1069                 while(true) {
1070                     int n1 = ex1.readSampleData(buf1, 0);
1071                     int n2 = ex2.readSampleData(buf2, 0);
1072                     assertEquals("different buffer size on track " + i, n1, n2);
1073 
1074                     if (n1 < 0) {
1075                         break;
1076                     }
1077                     // see bug 13008204
1078                     buf1.limit(n1);
1079                     buf2.limit(n2);
1080                     buf1.rewind();
1081                     buf2.rewind();
1082 
1083                     assertEquals("limit does not match return value on track " + i,
1084                             n1, buf1.limit());
1085                     assertEquals("limit does not match return value on track " + i,
1086                             n2, buf2.limit());
1087 
1088                     assertEquals("buffer data did not match on track " + i, buf1, buf2);
1089 
1090                     ex1.advance();
1091                     ex2.advance();
1092                 }
1093                 ex1.unselectTrack(i);
1094                 ex2.unselectTrack(i);
1095             }
1096         } catch (IOException e) {
1097             e.printStackTrace();
1098         }
1099     }
1100 
1101     /**
1102      * Verify correct decoding of MPEG-4 AAC-LC mono and stereo streams
1103      */
1104     @CddTest(requirements = {"5.1.2/C-1-1"})
1105     @Test
testDecodeAacLcM4a()1106     public void testDecodeAacLcM4a() throws Exception {
1107         // mono
1108         decodeNtest("sinesweep1_1ch_8khz_aot2_mp4.m4a", 40.f);
1109         decodeNtest("sinesweep1_1ch_11khz_aot2_mp4.m4a", 40.f);
1110         decodeNtest("sinesweep1_1ch_12khz_aot2_mp4.m4a", 40.f);
1111         decodeNtest("sinesweep1_1ch_16khz_aot2_mp4.m4a", 40.f);
1112         decodeNtest("sinesweep1_1ch_22khz_aot2_mp4.m4a", 40.f);
1113         decodeNtest("sinesweep1_1ch_24khz_aot2_mp4.m4a", 40.f);
1114         decodeNtest("sinesweep1_1ch_32khz_aot2_mp4.m4a", 40.f);
1115         decodeNtest("sinesweep1_1ch_44khz_aot2_mp4.m4a", 40.f);
1116         decodeNtest("sinesweep1_1ch_48khz_aot2_mp4.m4a", 40.f);
1117         // stereo
1118         decodeNtest("sinesweep_2ch_8khz_aot2_mp4.m4a", 40.f);
1119         decodeNtest("sinesweep_2ch_11khz_aot2_mp4.m4a", 40.f);
1120         decodeNtest("sinesweep_2ch_12khz_aot2_mp4.m4a", 40.f);
1121         decodeNtest("sinesweep_2ch_16khz_aot2_mp4.m4a", 40.f);
1122         decodeNtest("sinesweep_2ch_22khz_aot2_mp4.m4a", 40.f);
1123         decodeNtest("sinesweep_2ch_24khz_aot2_mp4.m4a", 40.f);
1124         decodeNtest("sinesweep_2ch_32khz_aot2_mp4.m4a", 40.f);
1125         decodeNtest("sinesweep_2ch_44khz_aot2_mp4.m4a", 40.f);
1126         decodeNtest("sinesweep_2ch_48khz_aot2_mp4.m4a", 40.f);
1127     }
1128 
1129     /**
1130      * Verify correct decoding of MPEG-4 AAC-LC 5.0 and 5.1 channel streams
1131      */
1132     @CddTest(requirements = {"5.1.2/C-1-1"})
1133     @Test
testDecodeAacLcMcM4a()1134     public void testDecodeAacLcMcM4a() throws Exception {
1135         for (String codecName : codecsFor("noise_6ch_48khz_aot2_mp4.m4a")) {
1136             AudioParameter decParams = new AudioParameter();
1137             short[] decSamples = decodeToMemory(codecName, decParams,
1138                     "noise_6ch_48khz_aot2_mp4.m4a", RESET_MODE_NONE,
1139                     CONFIG_MODE_NONE, -1, null);
1140             checkEnergy(decSamples, decParams, 6);
1141             decParams.reset();
1142 
1143             decSamples = decodeToMemory(codecName, decParams, "noise_5ch_44khz_aot2_mp4.m4a",
1144                     RESET_MODE_NONE, CONFIG_MODE_NONE, -1, null);
1145             checkEnergy(decSamples, decParams, 5);
1146             decParams.reset();
1147         }
1148     }
1149 
1150     /**
1151      * Verify correct decoding of MPEG-4 HE-AAC mono and stereo streams
1152      */
1153     @CddTest(requirements = {"5.1.2/C-1-2"})
1154     @Test
testDecodeHeAacM4a()1155     public void testDecodeHeAacM4a() throws Exception {
1156         Object [][] samples = {
1157                 //  {resource, numChannels},
1158                 {"noise_1ch_24khz_aot5_dr_sbr_sig1_mp4.m4a", 1},
1159                 {"noise_1ch_24khz_aot5_ds_sbr_sig1_mp4.m4a", 1},
1160                 {"noise_1ch_32khz_aot5_dr_sbr_sig2_mp4.m4a", 1},
1161                 {"noise_1ch_44khz_aot5_dr_sbr_sig0_mp4.m4a", 1},
1162                 {"noise_1ch_44khz_aot5_ds_sbr_sig2_mp4.m4a", 1},
1163                 {"noise_2ch_24khz_aot5_dr_sbr_sig2_mp4.m4a", 2},
1164                 {"noise_2ch_32khz_aot5_ds_sbr_sig2_mp4.m4a", 2},
1165                 {"noise_2ch_48khz_aot5_dr_sbr_sig1_mp4.m4a", 2},
1166                 {"noise_2ch_48khz_aot5_ds_sbr_sig1_mp4.m4a", 2},
1167         };
1168 
1169         for (Object [] sample: samples) {
1170             for (String codecName : codecsFor((String)sample[0], CODEC_DEFAULT)) {
1171                 AudioParameter decParams = new AudioParameter();
1172                 short[] decSamples = decodeToMemory(codecName, decParams,
1173                         (String)sample[0] /* resource */, RESET_MODE_NONE, CONFIG_MODE_NONE,
1174                         -1, null);
1175                 checkEnergy(decSamples, decParams, (Integer)sample[1] /* number of channels */);
1176                 decParams.reset();
1177             }
1178         }
1179     }
1180 
1181     /**
1182      * Verify correct decoding of MPEG-4 HE-AAC 5.0 and 5.1 channel streams
1183      */
1184     @CddTest(requirements = {"5.1.2/C-1-2"})
1185     @Test
testDecodeHeAacMcM4a()1186     public void testDecodeHeAacMcM4a() throws Exception {
1187         Object [][] samples = {
1188                 //  {resource, numChannels},
1189                 {"noise_5ch_48khz_aot5_dr_sbr_sig1_mp4.m4a", 5},
1190                 {"noise_6ch_44khz_aot5_dr_sbr_sig2_mp4.m4a", 6},
1191         };
1192         for (Object [] sample: samples) {
1193             for (String codecName : codecsFor((String)sample[0] /* resource */, CODEC_DEFAULT)) {
1194                 AudioParameter decParams = new AudioParameter();
1195                 short[] decSamples = decodeToMemory(codecName, decParams,
1196                         (String)sample[0] /* resource */, RESET_MODE_NONE, CONFIG_MODE_NONE,
1197                         -1, null);
1198                 checkEnergy(decSamples, decParams, (Integer)sample[1] /* number of channels */);
1199                 decParams.reset();
1200             }
1201         }
1202     }
1203 
1204     /**
1205      * Verify correct decoding of MPEG-4 HE-AAC v2 stereo streams
1206      */
1207     @CddTest(requirements = {"5.1.2/C-1-3"})
1208     @Test
testDecodeHeAacV2M4a()1209     public void testDecodeHeAacV2M4a() throws Exception {
1210         String [] samples = {
1211                 "noise_2ch_24khz_aot29_dr_sbr_sig0_mp4.m4a",
1212                 "noise_2ch_44khz_aot29_dr_sbr_sig1_mp4.m4a",
1213                 "noise_2ch_48khz_aot29_dr_sbr_sig2_mp4.m4a"
1214         };
1215         for (String sample: samples) {
1216             for (String codecName : codecsFor(sample, CODEC_DEFAULT)) {
1217                 AudioParameter decParams = new AudioParameter();
1218                 short[] decSamples = decodeToMemory(codecName, decParams, sample,
1219                         RESET_MODE_NONE, CONFIG_MODE_NONE, -1, null);
1220                 checkEnergy(decSamples, decParams, 2);
1221             }
1222         }
1223     }
1224 
1225     /**
1226      * Verify correct decoding of MPEG-4 AAC-ELD mono and stereo streams
1227      */
1228     @CddTest(requirements = {"5.1.2/C-1-4"})
1229     @Test
testDecodeAacEldM4a()1230     public void testDecodeAacEldM4a() throws Exception {
1231         // mono
1232         decodeNtest("sinesweep1_1ch_16khz_aot39_fl480_mp4.m4a", 40.f, CODEC_DEFAULT);
1233         decodeNtest("sinesweep1_1ch_22khz_aot39_fl512_mp4.m4a", 40.f, CODEC_DEFAULT);
1234         decodeNtest("sinesweep1_1ch_24khz_aot39_fl480_mp4.m4a", 40.f, CODEC_DEFAULT);
1235         decodeNtest("sinesweep1_1ch_32khz_aot39_fl512_mp4.m4a", 40.f, CODEC_DEFAULT);
1236         decodeNtest("sinesweep1_1ch_44khz_aot39_fl480_mp4.m4a", 40.f, CODEC_DEFAULT);
1237         decodeNtest("sinesweep1_1ch_48khz_aot39_fl512_mp4.m4a", 40.f, CODEC_DEFAULT);
1238 
1239         // stereo
1240         decodeNtest("sinesweep_2ch_16khz_aot39_fl512_mp4.m4a", 40.f, CODEC_DEFAULT);
1241         decodeNtest("sinesweep_2ch_22khz_aot39_fl480_mp4.m4a", 40.f, CODEC_DEFAULT);
1242         decodeNtest("sinesweep_2ch_24khz_aot39_fl512_mp4.m4a", 40.f, CODEC_DEFAULT);
1243         decodeNtest("sinesweep_2ch_32khz_aot39_fl480_mp4.m4a", 40.f, CODEC_DEFAULT);
1244         decodeNtest("sinesweep_2ch_44khz_aot39_fl512_mp4.m4a", 40.f, CODEC_DEFAULT);
1245         decodeNtest("sinesweep_2ch_48khz_aot39_fl480_mp4.m4a", 40.f, CODEC_DEFAULT);
1246 
1247         AudioParameter decParams = new AudioParameter();
1248 
1249         Object [][] samples = {
1250                 //  {resource, numChannels},
1251                 {"noise_1ch_16khz_aot39_ds_sbr_fl512_mp4.m4a", 1},
1252                 {"noise_1ch_24khz_aot39_ds_sbr_fl512_mp4.m4a", 1},
1253                 {"noise_1ch_32khz_aot39_dr_sbr_fl480_mp4.m4a", 1},
1254                 {"noise_1ch_44khz_aot39_ds_sbr_fl512_mp4.m4a", 1},
1255                 {"noise_1ch_44khz_aot39_ds_sbr_fl512_mp4.m4a", 1},
1256                 {"noise_1ch_48khz_aot39_dr_sbr_fl480_mp4.m4a", 1},
1257                 {"noise_2ch_22khz_aot39_ds_sbr_fl512_mp4.m4a", 2},
1258                 {"noise_2ch_32khz_aot39_ds_sbr_fl512_mp4.m4a", 2},
1259                 {"noise_2ch_44khz_aot39_dr_sbr_fl480_mp4.m4a", 2},
1260                 {"noise_2ch_48khz_aot39_ds_sbr_fl512_mp4.m4a", 2},
1261         };
1262         for (Object [] sample: samples) {
1263             for (String codecName : codecsFor((String)sample[0], CODEC_DEFAULT)) {
1264                 short[] decSamples = decodeToMemory(codecName, decParams,
1265                         (String)sample[0] /* resource */, RESET_MODE_NONE, CONFIG_MODE_NONE,
1266                         -1, null);
1267                 checkEnergy(decSamples, decParams, (Integer)sample[1] /* number of channels */);
1268                 decParams.reset();
1269             }
1270         }
1271     }
1272 
1273     /**
1274      * Perform a segmented energy analysis on given audio signal samples and run several tests on
1275      * the energy values.
1276      *
1277      * The main purpose is to verify whether an AAC decoder implementation applies Spectral Band
1278      * Replication (SBR) and Parametric Stereo (PS) correctly. Both tools are inherent parts to the
1279      * MPEG-4 HE-AAC and HE-AAC v2 audio codecs.
1280      *
1281      * In addition, this test can verify the correct decoding of multi-channel (e.g. 5.1 channel)
1282      * streams or the creation of a mixdown signal.
1283      *
1284      * Note: This test procedure is not an MPEG Conformance Test and can not serve as a replacement.
1285      *
1286      * @param decSamples the decoded audio samples to be tested
1287      * @param decParams the audio parameters of the given audio samples (decSamples)
1288      * @param encNch the encoded number of audio channels (number of channels of the original
1289      *               input)
1290      * @param nrgRatioThresh threshold to classify the energy ratios ]0.0, 1.0[
1291      * @throws RuntimeException
1292      */
checkEnergy(short[] decSamples, AudioParameter decParams, int encNch, float nrgRatioThresh)1293     protected void checkEnergy(short[] decSamples, AudioParameter decParams, int encNch,
1294                              float nrgRatioThresh) throws RuntimeException
1295     {
1296         final int nSegPerBlk = 4;                          // the number of segments per block
1297         final int nCh = decParams.getNumChannels();        // the number of input channels
1298         final int nBlkSmp = decParams.getSamplingRate();   // length of one (LB/HB) block [samples]
1299         final int nSegSmp = nBlkSmp / nSegPerBlk;          // length of one segment [samples]
1300         final int smplPerChan = decSamples.length / nCh;   // actual # samples per channel (total)
1301 
1302         final int nSegSmpTot = nSegSmp * nCh;              // actual # samples per segment (all ch)
1303         final int nSegChOffst = 2 * nSegPerBlk;            // signal offset between chans [segments]
1304         final int procNch = Math.min(nCh, encNch);         // the number of channels to be analyzed
1305         if (encNch > 4) {
1306             assertTrue(String.format("multichannel content (%dch) was downmixed (%dch)",
1307                     encNch, nCh), procNch > 4);
1308         }
1309         assertTrue(String.format("got less channels(%d) than encoded (%d)", nCh, encNch),
1310                 nCh >= encNch);
1311 
1312         final int encEffNch = (encNch > 5) ? encNch-1 : encNch;  // all original configs with more
1313                                                            // ... than five channel have an LFE */
1314         final int expSmplPerChan = Math.max(encEffNch, 2) * nSegChOffst * nSegSmp;
1315         final boolean isDmx = nCh < encNch;                // flag telling that input is dmx signal
1316         int effProcNch = procNch;                          // the num analyzed channels with signal
1317 
1318         assertTrue("got less input samples than expected", smplPerChan >= expSmplPerChan);
1319 
1320         // get the signal offset by counting zero samples at the very beginning (over all channels)
1321         final int zeroSigThresh = 1;                     // sample value threshold for signal search
1322         int signalStart = smplPerChan;                   // receives the number of samples that
1323                                                          // ... are in front of the actual signal
1324         int noiseStart = signalStart;                    // receives the number of null samples
1325                                                          // ... (per chan) at the very beginning
1326         for (int smpl = 0; smpl < decSamples.length; smpl++) {
1327             int value = Math.abs(decSamples[smpl]);
1328             if (value > 0 && noiseStart == signalStart) {
1329                 noiseStart = smpl / nCh;                   // store start of prepended noise
1330             }                                              // ... (can be same as signalStart)
1331             if (value > zeroSigThresh) {
1332                 signalStart = smpl / nCh;                  // store signal start offset [samples]
1333                 break;
1334             }
1335         }
1336         signalStart = (signalStart > noiseStart+1) ? signalStart : noiseStart;
1337         assertTrue ("no signal found in any channel!", signalStart < smplPerChan);
1338         final int totSeg = (smplPerChan-signalStart) / nSegSmp; // max num seg that fit into signal
1339         final int totSmp = nSegSmp * totSeg;               // max num relevant samples (per channel)
1340         assertTrue("no segments left to test after signal search", totSeg > 0);
1341 
1342         // get the energies and the channel offsets by searching for the first segment above the
1343         //  energy threshold
1344         final double zeroMaxNrgRatio = 0.001f;             // ratio of zeroNrgThresh to the max nrg
1345         double zeroNrgThresh = nSegSmp * nSegSmp;          // threshold to classify segment energies
1346         double totMaxNrg = 0.0f;                           // will store the max seg nrg over all ch
1347         double[][] nrg = new double[procNch][totSeg];      // array receiving the segment energies
1348         int[] offset = new int[procNch];                   // array for channel offsets
1349         boolean[] sigSeg = new boolean[totSeg];            // array receiving the segment ...
1350                                                            // ... energy status over all channels
1351         for (int ch = 0; ch < procNch; ch++) {
1352             offset[ch] = -1;
1353             for (int seg = 0; seg < totSeg; seg++) {
1354                 final int smpStart = (signalStart * nCh) + (seg * nSegSmpTot) + ch;
1355                 final int smpStop = smpStart + nSegSmpTot;
1356                 for (int smpl = smpStart; smpl < smpStop; smpl += nCh) {
1357                     nrg[ch][seg] += decSamples[smpl] * decSamples[smpl];  // accumulate segment nrg
1358                 }
1359                 if (nrg[ch][seg] > zeroNrgThresh && offset[ch] < 0) { // store 1st segment (index)
1360                     offset[ch] = seg / nSegChOffst;        // ... per ch which has energy above the
1361                 }                                          // ... threshold to get the ch offsets
1362                 if (nrg[ch][seg] > totMaxNrg) {
1363                     totMaxNrg = nrg[ch][seg];              // store the max segment nrg over all ch
1364                 }
1365                 sigSeg[seg] |= nrg[ch][seg] > zeroNrgThresh;  // store whether the channel has
1366                                                            // ... energy in this segment
1367             }
1368             if (offset[ch] < 0) {                          // if one channel has no signal it is
1369                 effProcNch -= 1;                           // ... most probably the LFE
1370                 offset[ch] = effProcNch;                   // the LFE is no effective channel
1371             }
1372             if (ch == 0) {                                 // recalculate the zero signal threshold
1373                 zeroNrgThresh = zeroMaxNrgRatio * totMaxNrg; // ... based on the 1st channels max
1374             }                                              // ... energy for all subsequent checks
1375         }
1376         // check the channel mapping
1377         assertTrue("more than one LFE detected", effProcNch >= procNch - 1);
1378         assertTrue(String.format("less samples decoded than expected: %d < %d",
1379                 decSamples.length-(signalStart * nCh), totSmp * effProcNch),
1380                 decSamples.length-(signalStart * nCh) >= totSmp * effProcNch);
1381         if (procNch >= 5) {                                // for multi-channel signals the only
1382             final int[] frontChMap1 = {2, 0, 1};           // valid front channel orders are L, R, C
1383             final int[] frontChMap2 = {0, 1, 2};           // or C, L, R (L=left, R=right, C=center)
1384             if ( !(Arrays.equals(Arrays.copyOfRange(offset, 0, 3), frontChMap1)
1385                     || Arrays.equals(Arrays.copyOfRange(offset, 0, 3), frontChMap2)) ) {
1386                 fail("wrong front channel mapping");
1387             }
1388         }
1389         // check whether every channel occurs exactly once
1390         int[] chMap = new int[nCh];                        // mapping array to sort channels
1391         for (int ch = 0; ch < effProcNch; ch++) {
1392             int occurred = 0;
1393             for (int idx = 0; idx < procNch; idx++) {
1394                 if (offset[idx] == ch) {
1395                     occurred += 1;
1396                     chMap[ch] = idx;                       // create mapping table to address chans
1397                 }                                          // ... from front to back
1398             }                                              // the LFE must be last
1399             assertTrue(String.format("channel %d occurs %d times in the mapping", ch, occurred),
1400                     occurred == 1);
1401         }
1402 
1403         // go over all segment energies in all channels and check them
1404         double refMinNrg = zeroNrgThresh;                  // reference min energy for the 1st ch;
1405                                                            // others will be compared against 1st
1406         for (int ch = 0; ch < procNch; ch++) {
1407             int idx = chMap[ch];                           // resolve channel mapping
1408             final int ofst = offset[idx] * nSegChOffst;    // signal offset [segments]
1409             if (ch < effProcNch && ofst < totSeg) {
1410                 int nrgSegEnd;                             // the last segment that has energy
1411                 int nrgSeg;                                // the number of segments with energy
1412                 if ((encNch <= 2) && (ch == 0)) {          // the first channel of a mono or ...
1413                     nrgSeg = totSeg;                       // stereo signal has full signal ...
1414                 } else {                                   // all others have one LB + one HB block
1415                     nrgSeg = Math.min(totSeg, (2 * nSegPerBlk) + ofst) - ofst;
1416                 }
1417                 nrgSegEnd = ofst + nrgSeg;
1418                 // find min and max energy of all segments that should have signal
1419                 double minNrg = nrg[idx][ofst];            // channels minimum segment energy
1420                 double maxNrg = nrg[idx][ofst];            // channels maximum segment energy
1421                 for (int seg = ofst+1; seg < nrgSegEnd; seg++) {          // values of 1st segment
1422                     if (nrg[idx][seg] < minNrg) minNrg = nrg[idx][seg];   // ... already assigned
1423                     if (nrg[idx][seg] > maxNrg) maxNrg = nrg[idx][seg];
1424                 }
1425                 assertTrue(String.format("max energy of channel %d is zero", ch),
1426                         maxNrg > 0.0f);
1427                 assertTrue(String.format("channel %d has not enough energy", ch),
1428                         minNrg >= refMinNrg);              // check the channels minimum energy
1429                 if (ch == 0) {                             // use 85% of 1st channels min energy as
1430                     refMinNrg = minNrg * 0.85f;            // ... reference the other chs must meet
1431                 } else if (isDmx && (ch == 1)) {           // in case of mixdown signal the energy
1432                     refMinNrg *= 0.50f;                    // ... can be lower depending on the
1433                 }                                          // ... downmix equation
1434                 // calculate and check the energy ratio
1435                 final double nrgRatio = minNrg / maxNrg;
1436                 assertTrue(String.format("energy ratio of channel %d below threshold", ch),
1437                         nrgRatio >= nrgRatioThresh);
1438                 if (!isDmx) {
1439                     if (nrgSegEnd < totSeg) {
1440                         // consider that some noise can extend into the subsequent segment
1441                         // allow this to be at max 20% of the channels minimum energy
1442                         assertTrue(String.format("min energy after noise above threshold (%.2f)",
1443                                 nrg[idx][nrgSegEnd]),
1444                                 nrg[idx][nrgSegEnd] < minNrg * 0.20f);
1445                         nrgSegEnd += 1;
1446                     }
1447                 } else {                                   // ignore all subsequent segments
1448                     nrgSegEnd = totSeg;                    // ... in case of a mixdown signal
1449                 }
1450                 // zero-out the verified energies to simplify the subsequent check
1451                 for (int seg = ofst; seg < nrgSegEnd; seg++) nrg[idx][seg] = 0.0f;
1452             }
1453             // check zero signal parts
1454             for (int seg = 0; seg < totSeg; seg++) {
1455                 assertTrue(String.format("segment %d in channel %d has signal where should " +
1456                         "be none (%.2f)", seg, ch, nrg[idx][seg]), nrg[idx][seg] < zeroNrgThresh);
1457             }
1458         }
1459         // test whether each segment has energy in at least one channel
1460         for (int seg = 0; seg < totSeg; seg++) {
1461             assertTrue(String.format("no channel has energy in segment %d", seg), sigSeg[seg]);
1462         }
1463     }
1464 
1465     private void checkEnergy(short[] decSamples, AudioParameter decParams, int encNch)
1466             throws RuntimeException {
1467         checkEnergy(decSamples, decParams, encNch, 0.50f);  // default energy ratio threshold: 0.50
1468     }
1469 
1470     /**
1471      * Calculate the RMS of the difference signal between a given signal and the reference samples
1472      * located in mMasterBuffer.
1473      * @param signal the decoded samples to test
1474      * @return RMS of error signal
1475      * @throws RuntimeException
1476      */
1477     private double getRmsError(short[] signal) throws RuntimeException {
1478         long totalErrorSquared = 0;
1479         int stride = mMasterBuffer.length / signal.length;
1480         assertEquals("wrong data size", mMasterBuffer.length, signal.length * stride);
1481 
1482         for (int i = 0; i < signal.length; i++) {
1483             short sample = signal[i];
1484             short mastersample = mMasterBuffer[i * stride];
1485             int d = sample - mastersample;
1486             totalErrorSquared += d * d;
1487         }
1488         long avgErrorSquared = (totalErrorSquared / signal.length);
1489         return Math.sqrt(avgErrorSquared);
1490     }
1491 
1492     /**
1493      * Decode a given input stream and compare the output against the reference signal. The RMS of
1494      * the error signal must be below the given threshold (maxerror).
1495      * Important note about the test signals: this method expects test signals to have been
1496      *   "stretched" relative to the reference signal. The reference, sinesweepraw, is 3s long at
1497      *   44100Hz. For instance for comparing this reference to a test signal at 8000Hz, the test
1498      *   signal needs to be 44100/8000 = 5.5125 times longer, containing frequencies 5.5125
1499      *   times lower than the reference.
1500      * @param testinput the file to decode
1501      * @param maxerror  the maximum allowed root mean squared error
1502      * @throws Exception
1503      */
1504     private void decodeNtest(final String testinput, float maxerror) throws Exception {
1505         decodeNtest(testinput, maxerror, CODEC_ALL);
1506     }
1507 
1508     private void decodeNtest(final String testinput, float maxerror, int codecSupportMode)
1509             throws Exception {
1510         String localTag = TAG + "#decodeNtest";
1511 
1512         for (String codecName: codecsFor(testinput, codecSupportMode)) {
1513             AudioParameter decParams = new AudioParameter();
1514             short[] decoded = decodeToMemory(codecName, decParams, testinput,
1515                     RESET_MODE_NONE, CONFIG_MODE_NONE, -1, null);
1516             double rmse = getRmsError(decoded);
1517 
1518             assertTrue(codecName + ": decoding error too big: " + rmse, rmse <= maxerror);
1519             Log.v(localTag, String.format("rms = %f (max = %f)", rmse, maxerror));
1520         }
1521     }
1522 
1523     private void monoTest(final String res, int expectedLength) throws Exception {
1524         for (String codecName: codecsFor(res)) {
1525             short [] mono = decodeToMemory(codecName, res,
1526                     RESET_MODE_NONE, CONFIG_MODE_NONE, -1, null);
1527             if (mono.length == expectedLength) {
1528                 // expected
1529             } else if (mono.length == expectedLength * 2) {
1530                 // the decoder output 2 channels instead of 1, check that the left and right channel
1531                 // are identical
1532                 for (int i = 0; i < mono.length; i += 2) {
1533                     assertEquals(codecName + ": mismatched samples at " + i, mono[i], mono[i+1]);
1534                 }
1535             } else {
1536                 fail(codecName + ": wrong number of samples: " + mono.length);
1537             }
1538 
1539             short [] mono2 = decodeToMemory(codecName, res,
1540                     RESET_MODE_RECONFIGURE, CONFIG_MODE_NONE, -1, null);
1541 
1542             assertEquals(codecName + ": count different after reconfigure: ",
1543                     mono.length, mono2.length);
1544             for (int i = 0; i < mono.length; i++) {
1545                 assertEquals(codecName + ": samples at " + i + " don't match", mono[i], mono2[i]);
1546             }
1547 
1548             short [] mono3 = decodeToMemory(codecName, res,
1549                     RESET_MODE_FLUSH, CONFIG_MODE_NONE, -1, null);
1550 
1551             assertEquals(codecName + ": count different after flush: ", mono.length, mono3.length);
1552             for (int i = 0; i < mono.length; i++) {
1553                 assertEquals(codecName + ": samples at " + i + " don't match", mono[i], mono3[i]);
1554             }
1555         }
1556     }
1557 
1558     protected static List<String> codecsFor(String resource) throws IOException {
1559         return codecsFor(resource, CODEC_ALL);
1560     }
1561 
1562     protected static List<String> codecsFor(String resource, int codecSupportMode)
1563             throws IOException {
1564 
1565         // CODEC_DEFAULT behaviors started with S
1566         if (IS_BEFORE_S) {
1567             codecSupportMode = CODEC_ALL;
1568         }
1569         MediaExtractor ex = new MediaExtractor();
1570         AssetFileDescriptor fd = getAssetFileDescriptorFor(resource);
1571         try {
1572             ex.setDataSource(fd.getFileDescriptor(), fd.getStartOffset(), fd.getLength());
1573         } finally {
1574             fd.close();
1575         }
1576         MediaCodecInfo[] codecInfos = new MediaCodecList(
1577                 MediaCodecList.REGULAR_CODECS).getCodecInfos();
1578         ArrayList<String> matchingCodecs = new ArrayList<String>();
1579         MediaFormat format = ex.getTrackFormat(0);
1580         String mime = format.getString(MediaFormat.KEY_MIME);
1581         for (MediaCodecInfo info: codecInfos) {
1582             if (info.isEncoder()) {
1583                 continue;
1584             }
1585             try {
1586                 MediaCodecInfo.CodecCapabilities caps = info.getCapabilitiesForType(mime);
1587                 if (caps != null) {
1588                     if (codecSupportMode == CODEC_ALL) {
1589                         if (IS_AFTER_T) {
1590                             // This is an extractor failure as often as it is a codec failure
1591                             assertTrue(info.getName() + " does not declare support for "
1592                                     + format.toString(),
1593                                     caps.isFormatSupported(format));
1594                         }
1595                         matchingCodecs.add(info.getName());
1596                     } else if (codecSupportMode == CODEC_DEFAULT) {
1597                         if (caps.isFormatSupported(format)) {
1598                             matchingCodecs.add(info.getName());
1599                         } else if (isDefaultCodec(info.getName(), mime)) {
1600                             // This is an extractor failure as often as it is a codec failure
1601                             fail(info.getName() + " which is a default decoder for mime " + mime
1602                                    + ", does not declare support for " + format.toString());
1603                         }
1604                     } else {
1605                         fail("Unhandled codec support mode " + codecSupportMode);
1606                     }
1607                 }
1608             } catch (IllegalArgumentException e) {
1609                 // type is not supported by this codec
1610             }
1611         }
1612 
1613         // CTS rules say the device must have a codec that supports this mediatype.
1614         assertTrue("no codecs found for format " + format.toString(),
1615                         matchingCodecs.size() != 0);
1616         // but we only test the ones appropriate to this mode;
1617         // testing all matching codecs requires us to run both CtsMediaXXX and MctsMediaXXX
1618         ArrayList<String> usingCodecs = new ArrayList<String>();
1619         for (String codecName : matchingCodecs) {
1620             if (!TestUtils.isTestableCodecInCurrentMode(codecName)) {
1621                 Log.i(TAG, "skip codec " + codecName + " in current mode");
1622                 continue;
1623             }
1624             usingCodecs.add(codecName);
1625         }
1626         // which may be empty, triggering a non-fatal assumption failure
1627         Assume.assumeTrue("no testable codecs for format " + format.toString()
1628                           + " in test mode " + TestUtils.currentTestModeName(),
1629                           usingCodecs.size() != 0);
1630         return usingCodecs;
1631     }
1632 
1633     /**
1634      * @param testinput the file to decode
1635      * @param maxerror the maximum allowed root mean squared error
1636      * @throws IOException
1637      */
1638     private void decode(final String testinput, float maxerror) throws IOException {
1639 
1640         for (String codecName: codecsFor(testinput)) {
1641             short[] decoded = decodeToMemory(codecName, testinput,
1642                     RESET_MODE_NONE, CONFIG_MODE_NONE, -1, null);
1643 
1644             assertEquals(codecName + ": wrong data size", mMasterBuffer.length, decoded.length);
1645 
1646             double rmse = getRmsError(decoded);
1647 
1648             assertTrue(codecName + ": decoding error too big: " + rmse, rmse <= maxerror);
1649 
1650             int[] resetModes = new int[] { RESET_MODE_NONE, RESET_MODE_RECONFIGURE,
1651                     RESET_MODE_FLUSH, RESET_MODE_EOS_FLUSH };
1652             int[] configModes = new int[] { CONFIG_MODE_NONE, CONFIG_MODE_QUEUE };
1653 
1654             for (int conf : configModes) {
1655                 for (int reset : resetModes) {
1656                     if (conf == CONFIG_MODE_NONE && reset == RESET_MODE_NONE) {
1657                         // default case done outside of loop
1658                         continue;
1659                     }
1660                     if (conf == CONFIG_MODE_QUEUE && !hasAudioCsd(testinput)) {
1661                         continue;
1662                     }
1663 
1664                     String params = String.format("(using reset: %d, config: %s)", reset, conf);
1665                     short[] decoded2 = decodeToMemory(codecName, testinput, reset, conf, -1, null);
1666                     assertEquals(codecName + ": count different with reconfigure" + params,
1667                             decoded.length, decoded2.length);
1668                     for (int i = 0; i < decoded.length; i++) {
1669                         assertEquals(codecName + ": samples don't match" + params,
1670                                 decoded[i], decoded2[i]);
1671                     }
1672                 }
1673             }
1674         }
1675     }
1676 
1677     private boolean hasAudioCsd(final String testinput) throws IOException {
1678         AssetFileDescriptor fd = null;
1679         try {
1680             MediaExtractor extractor = new MediaExtractor();
1681             extractor.setDataSource(mInpPrefix + testinput);
1682             MediaFormat format = extractor.getTrackFormat(0);
1683 
1684             return format.containsKey(CSD_KEYS[0]);
1685 
1686         } finally {
1687             if (fd != null) {
1688                 fd.close();
1689             }
1690         }
1691     }
1692 
1693     protected static int getOutputFormatInteger(MediaCodec codec, String key) {
1694         if (codec == null) {
1695             fail("Null MediaCodec before attempting to retrieve output format key " + key);
1696         }
1697         MediaFormat format = null;
1698         try {
1699             format = codec.getOutputFormat();
1700         } catch (Exception e) {
1701             fail("Exception " + e + " when attempting to obtain output format");
1702         }
1703         if (format == null) {
1704             fail("Null output format returned from MediaCodec");
1705         }
1706         try {
1707             return format.getInteger(key);
1708         } catch (NullPointerException e) {
1709             fail("Key " + key + " not present in output format");
1710         } catch (ClassCastException e) {
1711             fail("Key " + key + " not stored as integer in output format");
1712         } catch (Exception e) {
1713             fail("Exception " + e + " when attempting to retrieve output format key " + key);
1714         }
1715         // never used
1716         return Integer.MIN_VALUE;
1717     }
1718 
1719     // Class handling all audio parameters relevant for testing
1720     protected static class AudioParameter {
1721 
1722         public AudioParameter() {
1723             reset();
1724         }
1725 
1726         public void reset() {
1727             mNumChannels = 0;
1728             mSamplingRate = 0;
1729             mChannelMask = 0;
1730         }
1731 
1732         public int getNumChannels() {
1733             return mNumChannels;
1734         }
1735 
1736         public int getSamplingRate() {
1737             return mSamplingRate;
1738         }
1739 
1740         public int getChannelMask() {
1741             return mChannelMask;
1742         }
1743 
1744         public void setNumChannels(int numChannels) {
1745             mNumChannels = numChannels;
1746         }
1747 
1748         public void setSamplingRate(int samplingRate) {
1749             mSamplingRate = samplingRate;
1750         }
1751 
1752         public void setChannelMask(int mask) {
1753             mChannelMask = mask;
1754         }
1755 
1756         private int mNumChannels;
1757         private int mSamplingRate;
1758         private int mChannelMask;
1759     }
1760 
1761     private short[] decodeToMemory(String codecName, final String testinput, int resetMode,
1762             int configMode, int eossample, List<Long> timestamps) throws IOException {
1763 
1764         AudioParameter audioParams = new AudioParameter();
1765         return decodeToMemory(codecName, audioParams, testinput,
1766                 resetMode, configMode, eossample, timestamps);
1767     }
1768 
1769     private short[] decodeToMemory(String codecName, AudioParameter audioParams,
1770             final String testinput, int resetMode, int configMode, int eossample,
1771             List<Long> timestamps) throws IOException {
1772         String localTag = TAG + "#decodeToMemory";
1773         Log.v(localTag, String.format("reset = %d; config: %s", resetMode, configMode));
1774         short [] decoded = new short[0];
1775         int decodedIdx = 0;
1776 
1777         MediaExtractor extractor;
1778         MediaCodec codec;
1779         ByteBuffer[] codecInputBuffers;
1780         ByteBuffer[] codecOutputBuffers;
1781 
1782         extractor = new MediaExtractor();
1783         extractor.setDataSource(mInpPrefix + testinput);
1784 
1785         assertEquals("wrong number of tracks", 1, extractor.getTrackCount());
1786         MediaFormat format = extractor.getTrackFormat(0);
1787         String mime = format.getString(MediaFormat.KEY_MIME);
1788         assertTrue("not an audio file", mime.startsWith("audio/"));
1789 
1790         MediaFormat configFormat = format;
1791         codec = MediaCodec.createByCodecName(codecName);
1792         if (configMode == CONFIG_MODE_QUEUE && format.containsKey(CSD_KEYS[0])) {
1793             configFormat = MediaFormat.createAudioFormat(mime,
1794                     format.getInteger(MediaFormat.KEY_SAMPLE_RATE),
1795                     format.getInteger(MediaFormat.KEY_CHANNEL_COUNT));
1796 
1797             configFormat.setLong(MediaFormat.KEY_DURATION,
1798                     format.getLong(MediaFormat.KEY_DURATION));
1799             String[] keys = new String[] { "max-input-size", "encoder-delay", "encoder-padding" };
1800             for (String k : keys) {
1801                 if (format.containsKey(k)) {
1802                     configFormat.setInteger(k, format.getInteger(k));
1803                 }
1804             }
1805         }
1806         Log.v(localTag, "configuring with " + configFormat);
1807         codec.configure(configFormat, null /* surface */, null /* crypto */, 0 /* flags */);
1808 
1809         codec.start();
1810         codecInputBuffers = codec.getInputBuffers();
1811         codecOutputBuffers = codec.getOutputBuffers();
1812 
1813         if (resetMode == RESET_MODE_RECONFIGURE) {
1814             codec.stop();
1815             codec.configure(configFormat, null /* surface */, null /* crypto */, 0 /* flags */);
1816             codec.start();
1817             codecInputBuffers = codec.getInputBuffers();
1818             codecOutputBuffers = codec.getOutputBuffers();
1819         } else if (resetMode == RESET_MODE_FLUSH) {
1820             codec.flush();
1821         }
1822 
1823         extractor.selectTrack(0);
1824 
1825         if (configMode == CONFIG_MODE_QUEUE) {
1826             queueConfig(codec, format);
1827         }
1828 
1829         // start decoding
1830         final long kTimeOutUs = 5000;
1831         MediaCodec.BufferInfo info = new MediaCodec.BufferInfo();
1832         boolean sawInputEOS = false;
1833         boolean sawOutputEOS = false;
1834         int noOutputCounter = 0;
1835         int samplecounter = 0;
1836         while (!sawOutputEOS && noOutputCounter < 50) {
1837             noOutputCounter++;
1838             if (!sawInputEOS) {
1839                 int inputBufIndex = codec.dequeueInputBuffer(kTimeOutUs);
1840 
1841                 if (inputBufIndex >= 0) {
1842                     ByteBuffer dstBuf = codecInputBuffers[inputBufIndex];
1843 
1844                     int sampleSize =
1845                         extractor.readSampleData(dstBuf, 0 /* offset */);
1846 
1847                     long presentationTimeUs = 0;
1848 
1849                     if (sampleSize < 0 && eossample > 0) {
1850                         fail("test is broken: never reached eos sample");
1851                     }
1852                     if (sampleSize < 0) {
1853                         Log.d(TAG, "saw input EOS.");
1854                         sawInputEOS = true;
1855                         sampleSize = 0;
1856                     } else {
1857                         if (samplecounter == eossample) {
1858                             sawInputEOS = true;
1859                         }
1860                         samplecounter++;
1861                         presentationTimeUs = extractor.getSampleTime();
1862                     }
1863                     codec.queueInputBuffer(
1864                             inputBufIndex,
1865                             0 /* offset */,
1866                             sampleSize,
1867                             presentationTimeUs,
1868                             sawInputEOS ? MediaCodec.BUFFER_FLAG_END_OF_STREAM : 0);
1869 
1870                     if (!sawInputEOS) {
1871                         extractor.advance();
1872                     }
1873                 }
1874             }
1875 
1876             int res = codec.dequeueOutputBuffer(info, kTimeOutUs);
1877 
1878             if (res >= 0) {
1879                 //Log.d(TAG, "got frame, size " + info.size + "/" + info.presentationTimeUs);
1880 
1881                 if (info.size > 0) {
1882                     noOutputCounter = 0;
1883                     if (timestamps != null) {
1884                         timestamps.add(info.presentationTimeUs);
1885                     }
1886                 }
1887                 if (info.size > 0 &&
1888                         resetMode != RESET_MODE_NONE && resetMode != RESET_MODE_EOS_FLUSH) {
1889                     // once we've gotten some data out of the decoder, reset and start again
1890                     if (resetMode == RESET_MODE_RECONFIGURE) {
1891                         codec.stop();
1892                         codec.configure(configFormat, null /* surface */, null /* crypto */,
1893                                 0 /* flags */);
1894                         codec.start();
1895                         codecInputBuffers = codec.getInputBuffers();
1896                         codecOutputBuffers = codec.getOutputBuffers();
1897                         if (configMode == CONFIG_MODE_QUEUE) {
1898                             queueConfig(codec, format);
1899                         }
1900                     } else /* resetMode == RESET_MODE_FLUSH */ {
1901                         codec.flush();
1902                     }
1903                     resetMode = RESET_MODE_NONE;
1904                     extractor.seekTo(0, MediaExtractor.SEEK_TO_NEXT_SYNC);
1905                     sawInputEOS = false;
1906                     samplecounter = 0;
1907                     if (timestamps != null) {
1908                         timestamps.clear();
1909                     }
1910                     continue;
1911                 }
1912 
1913                 int outputBufIndex = res;
1914                 ByteBuffer buf = codecOutputBuffers[outputBufIndex];
1915 
1916                 if (decodedIdx + (info.size / 2) >= decoded.length) {
1917                     decoded = Arrays.copyOf(decoded, decodedIdx + (info.size / 2));
1918                 }
1919 
1920                 buf.position(info.offset);
1921                 for (int i = 0; i < info.size; i += 2) {
1922                     decoded[decodedIdx++] = buf.getShort();
1923                 }
1924 
1925                 codec.releaseOutputBuffer(outputBufIndex, false /* render */);
1926 
1927                 if ((info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
1928                     Log.d(TAG, "saw output EOS.");
1929                     if (resetMode == RESET_MODE_EOS_FLUSH) {
1930                         resetMode = RESET_MODE_NONE;
1931                         codec.flush();
1932                         extractor.seekTo(0, MediaExtractor.SEEK_TO_NEXT_SYNC);
1933                         sawInputEOS = false;
1934                         samplecounter = 0;
1935                         decoded = new short[0];
1936                         decodedIdx = 0;
1937                         if (timestamps != null) {
1938                             timestamps.clear();
1939                         }
1940                     } else {
1941                         sawOutputEOS = true;
1942                     }
1943                 }
1944             } else if (res == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
1945                 codecOutputBuffers = codec.getOutputBuffers();
1946 
1947                 Log.d(TAG, "output buffers have changed.");
1948             } else if (res == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
1949                 MediaFormat oformat = codec.getOutputFormat();
1950                 audioParams.setNumChannels(oformat.getInteger(MediaFormat.KEY_CHANNEL_COUNT));
1951                 audioParams.setSamplingRate(oformat.getInteger(MediaFormat.KEY_SAMPLE_RATE));
1952                 Log.d(TAG, "output format has changed to " + oformat);
1953             } else {
1954                 Log.d(TAG, "dequeueOutputBuffer returned " + res);
1955             }
1956         }
1957         if (noOutputCounter >= 50) {
1958             fail("decoder stopped outputing data");
1959         }
1960 
1961         codec.stop();
1962         codec.release();
1963         return decoded;
1964     }
1965 
1966     private static void queueConfig(MediaCodec codec, MediaFormat format) {
1967         for (String csdKey : CSD_KEYS) {
1968             if (!format.containsKey(csdKey)) {
1969                 continue;
1970             }
1971             ByteBuffer[] codecInputBuffers = codec.getInputBuffers();
1972             int inputBufIndex = codec.dequeueInputBuffer(-1);
1973             if (inputBufIndex < 0) {
1974                 fail("failed to queue configuration buffer " + csdKey);
1975             } else {
1976                 ByteBuffer csd = (ByteBuffer) format.getByteBuffer(csdKey).rewind();
1977                 Log.v(TAG + "#queueConfig", String.format("queueing %s:%s", csdKey, csd));
1978                 codecInputBuffers[inputBufIndex].put(csd);
1979                 codec.queueInputBuffer(
1980                         inputBufIndex,
1981                         0 /* offset */,
1982                         csd.limit(),
1983                         0 /* presentation time (us) */,
1984                         MediaCodec.BUFFER_FLAG_CODEC_CONFIG);
1985             }
1986         }
1987     }
1988 
1989     @ApiTest(apis = {"android.media.MediaCodec#BUFFER_FLAG_END_OF_STREAM"})
1990     @CddTest(requirements = {"5.1.2/C-1-1"})
1991     @Test
1992     public void testDecodeM4aWithEOSOnLastBuffer() throws Exception {
1993         testDecodeWithEOSOnLastBuffer("sinesweepm4a.m4a");
1994     }
1995 
1996     @ApiTest(apis = {"android.media.MediaCodec#BUFFER_FLAG_END_OF_STREAM"})
1997     @CddTest(requirements = {"5.1.2/C-1-6"})
1998     @Test
1999     public void testDecodeMp3WithEOSOnLastBuffer() throws Exception {
2000         testDecodeWithEOSOnLastBuffer("sinesweepmp3lame.mp3");
2001         testDecodeWithEOSOnLastBuffer("sinesweepmp3smpb.mp3");
2002     }
2003 
2004     @ApiTest(apis = {"android.media.MediaCodec#BUFFER_FLAG_END_OF_STREAM"})
2005     @CddTest(requirements = {"5.1.2/C-1-10"})
2006     @Test
2007     public void testDecodeOpusWithEOSOnLastBuffer() throws Exception {
2008         testDecodeWithEOSOnLastBuffer("sinesweepopus.mkv");
2009         testDecodeWithEOSOnLastBuffer("sinesweepopusmp4.mp4");
2010     }
2011 
2012     @ApiTest(apis = {"android.media.MediaCodec#BUFFER_FLAG_END_OF_STREAM"})
2013     @CddTest(requirements = {"5.1.2/C-1-9"})
2014     @Test
2015     public void testDecodeWavWithEOSOnLastBuffer() throws Exception {
2016         testDecodeWithEOSOnLastBuffer("sinesweepwav.wav");
2017     }
2018 
2019     @ApiTest(apis = {"android.media.MediaCodec#BUFFER_FLAG_END_OF_STREAM"})
2020     @CddTest(requirements = {"5.1.2/C-1-5"})
2021     @Test
2022     public void testDecodeFlacWithEOSOnLastBuffer() throws Exception {
2023         testDecodeWithEOSOnLastBuffer("sinesweepflacmkv.mkv");
2024         testDecodeWithEOSOnLastBuffer("sinesweepflac.flac");
2025         testDecodeWithEOSOnLastBuffer("sinesweepflacmp4.mp4");
2026     }
2027 
2028     @ApiTest(apis = {"android.media.MediaCodec#BUFFER_FLAG_END_OF_STREAM"})
2029     @CddTest(requirements = {"5.1.2/C-1-8"})
2030     @Test
2031     public void testDecodeOggWithEOSOnLastBuffer() throws Exception {
2032         testDecodeWithEOSOnLastBuffer("sinesweepogg.ogg");
2033         testDecodeWithEOSOnLastBuffer("sinesweepoggmkv.mkv");
2034         testDecodeWithEOSOnLastBuffer("sinesweepoggmp4.mp4");
2035     }
2036 
2037     /* setting EOS on the last full input buffer should be equivalent to setting EOS on an empty
2038      * input buffer after all the full ones. */
2039     private void testDecodeWithEOSOnLastBuffer(final String res) throws Exception {
2040         int numsamples = countSamples(res);
2041         assertTrue(numsamples != 0);
2042 
2043         for (String codecName: codecsFor(res)) {
2044             List<Long> timestamps1 = new ArrayList<Long>();
2045             short[] decode1 = decodeToMemory(codecName, res,
2046                     RESET_MODE_NONE, CONFIG_MODE_NONE, -1, timestamps1);
2047 
2048             List<Long> timestamps2 = new ArrayList<Long>();
2049             short[] decode2 = decodeToMemory(codecName, res,
2050                     RESET_MODE_NONE, CONFIG_MODE_NONE, numsamples - 1,
2051                     timestamps2);
2052 
2053             // check that data and timestamps are the same for EOS-on-last and EOS-after-last
2054             assertEquals(decode1.length, decode2.length);
2055             assertTrue(Arrays.equals(decode1, decode2));
2056             assertEquals(timestamps1.size(), timestamps2.size());
2057             assertTrue(timestamps1.equals(timestamps2));
2058 
2059             // ... and that this is also true when reconfiguring the codec
2060             timestamps2.clear();
2061             decode2 = decodeToMemory(codecName, res,
2062                     RESET_MODE_RECONFIGURE, CONFIG_MODE_NONE, -1, timestamps2);
2063             assertTrue(Arrays.equals(decode1, decode2));
2064             assertTrue(timestamps1.equals(timestamps2));
2065             timestamps2.clear();
2066             decode2 = decodeToMemory(codecName, res,
2067                     RESET_MODE_RECONFIGURE, CONFIG_MODE_NONE, numsamples - 1, timestamps2);
2068             assertEquals(decode1.length, decode2.length);
2069             assertTrue(Arrays.equals(decode1, decode2));
2070             assertTrue(timestamps1.equals(timestamps2));
2071 
2072             // ... and that this is also true when flushing the codec
2073             timestamps2.clear();
2074             decode2 = decodeToMemory(codecName, res,
2075                     RESET_MODE_FLUSH, CONFIG_MODE_NONE, -1, timestamps2);
2076             assertTrue(Arrays.equals(decode1, decode2));
2077             assertTrue(timestamps1.equals(timestamps2));
2078             timestamps2.clear();
2079             decode2 = decodeToMemory(codecName, res,
2080                     RESET_MODE_FLUSH, CONFIG_MODE_NONE, numsamples - 1,
2081                     timestamps2);
2082             assertEquals(decode1.length, decode2.length);
2083             assertTrue(Arrays.equals(decode1, decode2));
2084             assertTrue(timestamps1.equals(timestamps2));
2085         }
2086     }
2087 
2088     private int countSamples(final String res) throws IOException {
2089         MediaExtractor extractor = new MediaExtractor();
2090         extractor.setDataSource(mInpPrefix + res);
2091         extractor.selectTrack(0);
2092         int numsamples = extractor.getSampleTime() < 0 ? 0 : 1;
2093         while (extractor.advance()) {
2094             numsamples++;
2095         }
2096         return numsamples;
2097     }
2098 
2099     private void testDecode(final String testVideo, int frameNum) throws Exception {
2100         if (!MediaUtils.checkCodecForResource(mInpPrefix + testVideo, 0 /* track */)) {
2101             return; // skip
2102         }
2103 
2104         // Decode to buffer.
2105         int frames1 = countFrames(testVideo, RESET_MODE_NONE, -1 /* eosframe */, null);
2106         assertEquals("wrong number of frames decoded", frameNum, frames1);
2107 
2108         // Decode to Surface.
2109         Surface s = getActivity().getSurfaceHolder().getSurface();
2110         int frames2 = countFrames(testVideo, RESET_MODE_NONE, -1 /* eosframe */, s);
2111         assertEquals("different number of frames when using buffer", frames1, frames2);
2112     }
2113 
2114     @Test
2115     public void testCodecBasicH264() throws Exception {
2116         testDecode("video_480x360_mp4_h264_1000kbps_25fps_aac_stereo_128kbps_44100hz.mp4", 240);
2117     }
2118 
2119     @Test
2120     public void testCodecBasicHEVC() throws Exception {
2121         testDecode(
2122                 "bbb_s1_720x480_mp4_hevc_mp3_1600kbps_30fps_aac_he_6ch_240kbps_48000hz.mp4", 300);
2123     }
2124 
2125     @Test
2126     public void testCodecBasicH263() throws Exception {
2127         testDecode("video_176x144_3gp_h263_300kbps_12fps_aac_stereo_128kbps_22050hz.3gp", 122);
2128     }
2129 
2130     @Test
2131     public void testCodecBasicMpeg2() throws Exception {
2132         testDecode("video_480x360_mp4_mpeg2_1500kbps_30fps_aac_stereo_128kbps_48000hz.mp4", 300);
2133     }
2134 
2135     @Test
2136     public void testCodecBasicMpeg4() throws Exception {
2137         testDecode("video_480x360_mp4_mpeg4_860kbps_25fps_aac_stereo_128kbps_44100hz.mp4", 249);
2138     }
2139 
2140     @Test
2141     public void testCodecBasicVP8() throws Exception {
2142         testDecode("video_480x360_webm_vp8_333kbps_25fps_vorbis_stereo_128kbps_48000hz.webm", 240);
2143     }
2144 
2145     @Test
2146     public void testCodecBasicVP9() throws Exception {
2147         testDecode("video_480x360_webm_vp9_333kbps_25fps_vorbis_stereo_128kbps_48000hz.webm", 240);
2148     }
2149 
2150     @Test
2151     public void testCodecBasicAV1() throws Exception {
2152         testDecode("video_480x360_webm_av1_400kbps_30fps_vorbis_stereo_128kbps_48000hz.webm", 300);
2153     }
2154 
2155     @Test
2156     public void testH264Decode320x240() throws Exception {
2157         testDecode("bbb_s1_320x240_mp4_h264_mp2_800kbps_30fps_aac_lc_5ch_240kbps_44100hz.mp4", 300);
2158     }
2159 
2160     @Test
2161     public void testH264Decode720x480() throws Exception {
2162         testDecode("bbb_s1_720x480_mp4_h264_mp3_2mbps_30fps_aac_lc_5ch_320kbps_48000hz.mp4", 300);
2163     }
2164 
2165     @CddTest(requirements = {"5.3.4/C-2-1"})
2166     @Test
2167     public void testH264Decode30fps1280x720Tv() throws Exception {
2168         if (checkTv()) {
2169             assertTrue(MediaUtils.canDecodeVideo(
2170                     MediaFormat.MIMETYPE_VIDEO_AVC, 1280, 720, 30,
2171                     AVCProfileHigh, AVCLevel31, 8000000));
2172         }
2173     }
2174 
2175     @ApiTest(apis = {"android.media.MediaCodecInfo.CodecCapabilities#FEATURE_SecurePlayback"})
2176     @CddTest(requirements = {"5.3.4/C-2-1"})
2177     @Test
2178     public void testH264SecureDecode30fps1280x720Tv() throws Exception {
2179         if (checkTv()) {
2180             verifySecureVideoDecodeSupport(
2181                     MediaFormat.MIMETYPE_VIDEO_AVC, 1280, 720, 30,
2182                     AVCProfileHigh, AVCLevel31, 8000000);
2183         }
2184     }
2185 
2186     @Test
2187     public void testH264Decode30fps1280x720() throws Exception {
2188         testDecode("bbb_s4_1280x720_mp4_h264_mp31_8mbps_30fps_aac_he_mono_40kbps_44100hz.mp4", 300);
2189     }
2190 
2191     @CddTest(requirements = {"5.3.4/C-2-1"})
2192     @Test
2193     public void testH264Decode60fps1280x720Tv() throws Exception {
2194         if (checkTv()) {
2195             assertTrue(MediaUtils.canDecodeVideo(
2196                     MediaFormat.MIMETYPE_VIDEO_AVC, 1280, 720, 60,
2197                     AVCProfileHigh, AVCLevel32, 8000000));
2198             testDecode(
2199                     "bbb_s3_1280x720_mp4_h264_hp32_8mbps_60fps_aac_he_v2_stereo_48kbps_48000hz.mp4",
2200                     600);
2201         }
2202     }
2203 
2204     @ApiTest(apis = {"android.media.MediaCodecInfo.CodecCapabilities#FEATURE_SecurePlayback"})
2205     @CddTest(requirements = {"5.3.4/C-2-1"})
2206     @Test
2207     public void testH264SecureDecode60fps1280x720Tv() throws Exception {
2208         if (checkTv()) {
2209             verifySecureVideoDecodeSupport(
2210                     MediaFormat.MIMETYPE_VIDEO_AVC, 1280, 720, 60,
2211                     AVCProfileHigh, AVCLevel32, 8000000);
2212         }
2213     }
2214 
2215     @Test
2216     public void testH264Decode60fps1280x720() throws Exception {
2217         testDecode("bbb_s3_1280x720_mp4_h264_mp32_8mbps_60fps_aac_he_v2_6ch_144kbps_44100hz.mp4",
2218                 600);
2219     }
2220 
2221     @CddTest(requirements = {"5.3.4/C-2-2"})
2222     @Test
2223     public void testH264Decode30fps1920x1080Tv() throws Exception {
2224         if (checkTv()) {
2225             assertTrue(MediaUtils.canDecodeVideo(
2226                     MediaFormat.MIMETYPE_VIDEO_AVC, 1920, 1080, 30,
2227                     AVCProfileHigh, AVCLevel4, 20000000));
2228             testDecode(
2229                     "bbb_s4_1920x1080_wide_mp4_h264_hp4_20mbps_30fps_aac_lc_6ch_384kbps_44100hz.mp4",
2230                     150);
2231         }
2232     }
2233 
2234     @ApiTest(apis = {"android.media.MediaCodecInfo.CodecCapabilities#FEATURE_SecurePlayback"})
2235     @CddTest(requirements = {"5.3.4/C-2-2"})
2236     @Test
2237     public void testH264SecureDecode30fps1920x1080Tv() throws Exception {
2238         if (checkTv()) {
2239             verifySecureVideoDecodeSupport(
2240                     MediaFormat.MIMETYPE_VIDEO_AVC, 1920, 1080, 30,
2241                     AVCProfileHigh, AVCLevel4, 20000000);
2242         }
2243     }
2244 
2245     @Test
2246     public void testH264Decode30fps1920x1080() throws Exception {
2247         testDecode("bbb_s4_1920x1080_wide_mp4_h264_mp4_20mbps_30fps_aac_he_5ch_200kbps_44100hz.mp4",
2248                 150);
2249     }
2250 
2251     @CddTest(requirements = {"5.3.4/C-2-2"})
2252     @Test
2253     public void testH264Decode60fps1920x1080Tv() throws Exception {
2254         if (checkTv()) {
2255             assertTrue(MediaUtils.canDecodeVideo(
2256                     MediaFormat.MIMETYPE_VIDEO_AVC, 1920, 1080, 60,
2257                     AVCProfileHigh, AVCLevel42, 20000000));
2258             testDecode("bbb_s2_1920x1080_mp4_h264_hp42_20mbps_60fps_aac_lc_6ch_384kbps_48000hz.mp4",
2259                     300);
2260         }
2261     }
2262 
2263     @ApiTest(apis = {"android.media.MediaCodecInfo.CodecCapabilities#FEATURE_SecurePlayback"})
2264     @CddTest(requirements = {"5.3.4/C-2-2"})
2265     @Test
2266     public void testH264SecureDecode60fps1920x1080Tv() throws Exception {
2267         if (checkTv()) {
2268             verifySecureVideoDecodeSupport(
2269                     MediaFormat.MIMETYPE_VIDEO_AVC, 1920, 1080, 60,
2270                     AVCProfileHigh, AVCLevel42, 20000000);
2271         }
2272     }
2273 
2274     @Test
2275     public void testH264Decode60fps1920x1080() throws Exception {
2276         testDecode("bbb_s2_1920x1080_mp4_h264_mp42_20mbps_60fps_aac_he_v2_5ch_160kbps_48000hz.mp4",
2277                 300);
2278         testDecode("bbb_s2_1920x1080_mkv_h264_mp42_20mbps_60fps_aac_he_v2_5ch_160kbps_48000hz.mkv",
2279                 300);
2280     }
2281 
2282     @Test
2283     public void testH265Decode25fps1280x720() throws Exception {
2284         testDecode("video_1280x720_mkv_h265_500kbps_25fps_aac_stereo_128kbps_44100hz.mkv", 240);
2285     }
2286 
2287     @Test
2288     public void testVP8Decode320x180() throws Exception {
2289         testDecode("bbb_s1_320x180_webm_vp8_800kbps_30fps_opus_5ch_320kbps_48000hz.webm", 300);
2290     }
2291 
2292     @Test
2293     public void testVP8Decode640x360() throws Exception {
2294         testDecode("bbb_s1_640x360_webm_vp8_2mbps_30fps_vorbis_5ch_320kbps_48000hz.webm", 300);
2295     }
2296 
2297     @CddTest(requirements = {"5.3.6/C-2-1"})
2298     @Test
2299     public void testVP8Decode30fps1280x720Tv() throws Exception {
2300         if (checkTv()) {
2301             assertTrue(MediaUtils.canDecodeVideo(MediaFormat.MIMETYPE_VIDEO_VP8, 1280, 720, 30));
2302         }
2303     }
2304 
2305     @Test
2306     public void testVP8Decode30fps1280x720() throws Exception {
2307         testDecode("bbb_s4_1280x720_webm_vp8_8mbps_30fps_opus_mono_64kbps_48000hz.webm", 300);
2308     }
2309 
2310     @CddTest(requirements = {"5.3.6/C-2-1"})
2311     @Test
2312     public void testVP8Decode60fps1280x720Tv() throws Exception {
2313         if (checkTv()) {
2314             assertTrue(MediaUtils.canDecodeVideo(MediaFormat.MIMETYPE_VIDEO_VP8, 1280, 720, 60));
2315         }
2316     }
2317 
2318     @Test
2319     public void testVP8Decode60fps1280x720() throws Exception {
2320         testDecode("bbb_s3_1280x720_webm_vp8_8mbps_60fps_opus_6ch_384kbps_48000hz.webm", 600);
2321     }
2322 
2323     @CddTest(requirements = {"5.3.6/C-2-2"})
2324     @Test
2325     public void testVP8Decode30fps1920x1080Tv() throws Exception {
2326         if (checkTv()) {
2327             assertTrue(MediaUtils.canDecodeVideo(MediaFormat.MIMETYPE_VIDEO_VP8, 1920, 1080, 30));
2328         }
2329     }
2330 
2331     @Test
2332     public void testVP8Decode30fps1920x1080() throws Exception {
2333         testDecode("bbb_s4_1920x1080_wide_webm_vp8_20mbps_30fps_vorbis_6ch_384kbps_44100hz.webm",
2334                 150);
2335     }
2336 
2337     @CddTest(requirements = {"5.3.6/C-2-2"})
2338     @Test
2339     public void testVP8Decode60fps1920x1080Tv() throws Exception {
2340         if (checkTv()) {
2341             assertTrue(MediaUtils.canDecodeVideo(MediaFormat.MIMETYPE_VIDEO_VP8, 1920, 1080, 60));
2342         }
2343     }
2344 
2345     @Test
2346     public void testVP8Decode60fps1920x1080() throws Exception {
2347         testDecode("bbb_s2_1920x1080_webm_vp8_20mbps_60fps_vorbis_6ch_384kbps_48000hz.webm", 300);
2348     }
2349 
2350     @Test
2351     public void testVP9Decode320x180() throws Exception {
2352         testDecode("bbb_s1_320x180_webm_vp9_0p11_600kbps_30fps_vorbis_mono_64kbps_48000hz.webm",
2353                 300);
2354     }
2355 
2356     @Test
2357     public void testVP9Decode640x360() throws Exception {
2358         testDecode("bbb_s1_640x360_webm_vp9_0p21_1600kbps_30fps_vorbis_stereo_128kbps_48000hz.webm",
2359                 300);
2360     }
2361 
2362     @CddTest(requirements = {"5.3.7/C-2-1"})
2363     @Test
2364     public void testVP9Decode30fps1280x720Tv() throws Exception {
2365         if (checkTv()) {
2366             assertTrue(MediaUtils.canDecodeVideo(MediaFormat.MIMETYPE_VIDEO_VP9, 1280, 720, 30));
2367         }
2368     }
2369 
2370     @Test
2371     public void testVP9Decode30fps1280x720() throws Exception {
2372         testDecode("bbb_s4_1280x720_webm_vp9_0p31_4mbps_30fps_opus_stereo_128kbps_48000hz.webm",
2373                 300);
2374     }
2375 
2376     @Test
2377     public void testVP9Decode60fps1920x1080() throws Exception {
2378         testDecode("bbb_s2_1920x1080_webm_vp9_0p41_10mbps_60fps_vorbis_6ch_384kbps_22050hz.webm",
2379                 300);
2380     }
2381 
2382     @Test
2383     public void testVP9Decode30fps3840x2160() throws Exception {
2384         testDecode("bbb_s4_3840x2160_webm_vp9_0p5_20mbps_30fps_vorbis_6ch_384kbps_24000hz.webm",
2385                 150);
2386     }
2387 
2388     @Test
2389     public void testVP9Decode60fps3840x2160() throws Exception {
2390         testDecode("bbb_s2_3840x2160_webm_vp9_0p51_20mbps_60fps_vorbis_6ch_384kbps_32000hz.webm",
2391                 300);
2392     }
2393 
2394     @Test
2395     public void testAV1Decode320x180() throws Exception {
2396         testDecode("video_320x180_webm_av1_200kbps_30fps_vorbis_stereo_128kbps_48000hz.webm", 300);
2397     }
2398 
2399     @Test
2400     public void testAV1Decode640x360() throws Exception {
2401         testDecode("video_640x360_webm_av1_470kbps_30fps_vorbis_stereo_128kbps_48000hz.webm", 300);
2402     }
2403 
2404     @Test
2405     public void testAV1Decode30fps1280x720() throws Exception {
2406         testDecode("video_1280x720_webm_av1_2000kbps_30fps_vorbis_stereo_128kbps_48000hz.webm",
2407                 300);
2408     }
2409 
2410     @Test
2411     public void testAV1Decode60fps1920x1080() throws Exception {
2412         testDecode("video_1920x1080_webm_av1_7000kbps_60fps_vorbis_stereo_128kbps_48000hz.webm",
2413                 300);
2414     }
2415 
2416     @Test
2417     public void testAV1Decode30fps3840x2160() throws Exception {
2418         testDecode("video_3840x2160_webm_av1_11000kbps_30fps_vorbis_stereo_128kbps_48000hz.webm",
2419                 150);
2420     }
2421 
2422     @Test
2423     public void testAV1Decode60fps3840x2160() throws Exception {
2424         testDecode("video_3840x2160_webm_av1_18000kbps_60fps_vorbis_stereo_128kbps_48000hz.webm",
2425                 300);
2426     }
2427 
2428     @Test
2429     public void testHEVCDecode352x288() throws Exception {
2430         testDecode("bbb_s1_352x288_mp4_hevc_mp2_600kbps_30fps_aac_he_stereo_96kbps_48000hz.mp4",
2431                 300);
2432     }
2433 
2434     @Test
2435     public void testHEVCDecode720x480() throws Exception {
2436         testDecode("bbb_s1_720x480_mp4_hevc_mp3_1600kbps_30fps_aac_he_6ch_240kbps_48000hz.mp4",
2437                 300);
2438     }
2439 
2440     @CddTest(requirements = {"5.3.5/C-1-2"})
2441     @Test
2442     public void testHEVCDecode30fps1280x720Tv() throws Exception {
2443         if (checkTv()) {
2444             assertTrue(MediaUtils.canDecodeVideo(
2445                     MediaFormat.MIMETYPE_VIDEO_HEVC, 1280, 720, 30,
2446                     HEVCProfileMain, HEVCMainTierLevel31, 4000000));
2447         }
2448     }
2449 
2450     @Test
2451     public void testHEVCDecode30fps1280x720() throws Exception {
2452         testDecode("bbb_s4_1280x720_mp4_hevc_mp31_4mbps_30fps_aac_he_stereo_80kbps_32000hz.mp4",
2453                 300);
2454     }
2455 
2456     @CddTest(requirements = {"5.3.5/C-1-2"})
2457     @Test
2458     public void testHEVCDecode30fps1920x1080Tv() throws Exception {
2459         if (checkTv()) {
2460             assertTrue(MediaUtils.canDecodeVideo(
2461                     MediaFormat.MIMETYPE_VIDEO_HEVC, 1920, 1080, 30,
2462                     HEVCProfileMain, HEVCMainTierLevel41, 5000000));
2463         }
2464     }
2465 
2466     @Test
2467     public void testHEVCDecode60fps1920x1080() throws Exception {
2468         testDecode("bbb_s2_1920x1080_mp4_hevc_mp41_10mbps_60fps_aac_lc_6ch_384kbps_22050hz.mp4",
2469                 300);
2470     }
2471 
2472     @Test
2473     public void testHEVCDecode30fps3840x2160() throws Exception {
2474         testDecode("bbb_s4_3840x2160_mp4_hevc_mp5_20mbps_30fps_aac_lc_6ch_384kbps_24000hz.mp4",
2475                 150);
2476     }
2477 
2478     @Test
2479     public void testHEVCDecode60fps3840x2160() throws Exception {
2480         testDecode("bbb_s2_3840x2160_mp4_hevc_mp51_20mbps_60fps_aac_lc_6ch_384kbps_32000hz.mp4",
2481                 300);
2482     }
2483 
2484     @Test
2485     public void testMpeg2Decode352x288() throws Exception {
2486         testDecode("video_352x288_mp4_mpeg2_1000kbps_30fps_aac_stereo_128kbps_48000hz.mp4", 300);
2487     }
2488 
2489     @Test
2490     public void testMpeg2Decode720x480() throws Exception {
2491         testDecode("video_720x480_mp4_mpeg2_2000kbps_30fps_aac_stereo_128kbps_48000hz.mp4", 300);
2492     }
2493 
2494     @CddTest(requirements = {"5.3.1/T-1-1"})
2495     @Test
2496     public void testMpeg2Decode30fps1280x720Tv() throws Exception {
2497         if (checkTv()) {
2498             assertTrue(MediaUtils.canDecodeVideo(MediaFormat.MIMETYPE_VIDEO_MPEG2, 1280, 720, 30));
2499         }
2500     }
2501 
2502     @Test
2503     public void testMpeg2Decode30fps1280x720() throws Exception {
2504         testDecode("video_1280x720_mp4_mpeg2_6000kbps_30fps_aac_stereo_128kbps_48000hz.mp4", 150);
2505     }
2506 
2507     @CddTest(requirements = {"5.3.1/T-1-1"})
2508     @Test
2509     public void testMpeg2Decode30fps1920x1080Tv() throws Exception {
2510         if (checkTv()) {
2511             assertTrue(MediaUtils.canDecodeVideo(MediaFormat.MIMETYPE_VIDEO_MPEG2, 1920, 1080, 30));
2512         }
2513     }
2514 
2515     @Test
2516     public void testMpeg2Decode30fps1920x1080() throws Exception {
2517         testDecode("video_1920x1080_mp4_mpeg2_12000kbps_30fps_aac_stereo_128kbps_48000hz.mp4", 150);
2518     }
2519 
2520     @Test
2521     public void testMpeg2Decode30fps3840x2160() throws Exception {
2522         testDecode("video_3840x2160_mp4_mpeg2_20000kbps_30fps_aac_stereo_128kbps_48000hz.mp4", 150);
2523     }
2524 
2525     private void testCodecEarlyEOS(final String res, int eosFrame) throws Exception {
2526         if (!MediaUtils.checkCodecForResource(mInpPrefix + res, 0 /* track */)) {
2527             return; // skip
2528         }
2529         Surface s = getActivity().getSurfaceHolder().getSurface();
2530         int frames1 = countFrames(res, RESET_MODE_NONE, eosFrame, s);
2531         assertEquals("wrong number of frames decoded", eosFrame, frames1);
2532     }
2533 
2534     @Test
2535     public void testCodecEarlyEOSH263() throws Exception {
2536         testCodecEarlyEOS("video_176x144_3gp_h263_300kbps_12fps_aac_stereo_128kbps_22050hz.3gp",
2537                 64 /* eosframe */);
2538     }
2539 
2540     @Test
2541     public void testCodecEarlyEOSH264() throws Exception {
2542         testCodecEarlyEOS("video_480x360_mp4_h264_1000kbps_25fps_aac_stereo_128kbps_44100hz.mp4",
2543                 120 /* eosframe */);
2544     }
2545 
2546     @Test
2547     public void testCodecEarlyEOSHEVC() throws Exception {
2548         testCodecEarlyEOS("video_480x360_mp4_hevc_650kbps_30fps_aac_stereo_128kbps_48000hz.mp4",
2549                 120 /* eosframe */);
2550     }
2551 
2552     @Test
2553     public void testCodecEarlyEOSMpeg2() throws Exception {
2554         testCodecEarlyEOS("vdeo_480x360_mp4_mpeg2_1500kbps_30fps_aac_stereo_128kbps_48000hz.mp4",
2555                 120 /* eosframe */);
2556     }
2557 
2558     @Test
2559     public void testCodecEarlyEOSMpeg4() throws Exception {
2560         testCodecEarlyEOS("video_480x360_mp4_mpeg4_860kbps_25fps_aac_stereo_128kbps_44100hz.mp4",
2561                 120 /* eosframe */);
2562     }
2563 
2564     @Test
2565     public void testCodecEarlyEOSVP8() throws Exception {
2566         testCodecEarlyEOS("video_480x360_webm_vp8_333kbps_25fps_vorbis_stereo_128kbps_48000hz.webm",
2567                 120 /* eosframe */);
2568     }
2569 
2570     @Test
2571     public void testCodecEarlyEOSVP9() throws Exception {
2572         testCodecEarlyEOS(
2573                 "video_480x360_webm_vp9_333kbps_25fps_vorbis_stereo_128kbps_48000hz.webm",
2574                 120 /* eosframe */);
2575     }
2576 
2577     @Test
2578     public void testCodecEarlyEOSAV1() throws Exception {
2579         testCodecEarlyEOS("video_480x360_webm_av1_400kbps_30fps_vorbis_stereo_128kbps_48000hz.webm",
2580                 120 /* eosframe */);
2581     }
2582 
2583     @Test
2584     public void testCodecResetsH264WithoutSurface() throws Exception {
2585         testCodecResets("video_480x360_mp4_h264_1000kbps_25fps_aac_stereo_128kbps_44100hz.mp4",
2586                 null);
2587     }
2588 
2589     @Test
2590     public void testCodecResetsH264WithSurface() throws Exception {
2591         Surface s = getActivity().getSurfaceHolder().getSurface();
2592         testCodecResets("video_480x360_mp4_h264_1000kbps_25fps_aac_stereo_128kbps_44100hz.mp4", s);
2593     }
2594 
2595     @Test
2596     public void testCodecResetsHEVCWithoutSurface() throws Exception {
2597         testCodecResets("bbb_s1_720x480_mp4_hevc_mp3_1600kbps_30fps_aac_he_6ch_240kbps_48000hz.mp4",
2598                 null);
2599     }
2600 
2601     @Test
2602     public void testCodecResetsHEVCWithSurface() throws Exception {
2603         Surface s = getActivity().getSurfaceHolder().getSurface();
2604         testCodecResets("bbb_s1_720x480_mp4_hevc_mp3_1600kbps_30fps_aac_he_6ch_240kbps_48000hz.mp4",
2605                 s);
2606     }
2607 
2608     @Test
2609     public void testCodecResetsMpeg2WithoutSurface() throws Exception {
2610         testCodecResets("video_1280x720_mp4_mpeg2_6000kbps_30fps_aac_stereo_128kbps_48000hz.mp4",
2611                 null);
2612     }
2613 
2614     @Test
2615     public void testCodecResetsMpeg2WithSurface() throws Exception {
2616         Surface s = getActivity().getSurfaceHolder().getSurface();
2617         testCodecResets("video_176x144_mp4_mpeg2_105kbps_25fps_aac_stereo_128kbps_44100hz.mp4", s);
2618     }
2619 
2620     @Test
2621     public void testCodecResetsH263WithoutSurface() throws Exception {
2622         testCodecResets("video_176x144_3gp_h263_300kbps_12fps_aac_stereo_128kbps_22050hz.3gp",null);
2623     }
2624 
2625     @Test
2626     public void testCodecResetsH263WithSurface() throws Exception {
2627         Surface s = getActivity().getSurfaceHolder().getSurface();
2628         testCodecResets("video_176x144_3gp_h263_300kbps_12fps_aac_stereo_128kbps_22050hz.3gp", s);
2629     }
2630 
2631     @Test
2632     public void testCodecResetsMpeg4WithoutSurface() throws Exception {
2633         testCodecResets("video_480x360_mp4_mpeg4_860kbps_25fps_aac_stereo_128kbps_44100hz.mp4",
2634                 null);
2635     }
2636 
2637     @Test
2638     public void testCodecResetsMpeg4WithSurface() throws Exception {
2639         Surface s = getActivity().getSurfaceHolder().getSurface();
2640         testCodecResets("video_480x360_mp4_mpeg4_860kbps_25fps_aac_stereo_128kbps_44100hz.mp4", s);
2641     }
2642 
2643     @Test
2644     public void testCodecResetsVP8WithoutSurface() throws Exception {
2645         testCodecResets("video_480x360_webm_vp8_333kbps_25fps_vorbis_stereo_128kbps_48000hz.webm",
2646                 null);
2647     }
2648 
2649     @Test
2650     public void testCodecResetsVP8WithSurface() throws Exception {
2651         Surface s = getActivity().getSurfaceHolder().getSurface();
2652         testCodecResets("video_480x360_webm_vp8_333kbps_25fps_vorbis_stereo_128kbps_48000hz.webm",
2653                 s);
2654     }
2655 
2656     @Test
2657     public void testCodecResetsVP9WithoutSurface() throws Exception {
2658         testCodecResets("video_480x360_webm_vp9_333kbps_25fps_vorbis_stereo_128kbps_48000hz.webm",
2659                 null);
2660     }
2661 
2662     @Test
2663     public void testCodecResetsAV1WithoutSurface() throws Exception {
2664         testCodecResets("video_480x360_webm_av1_400kbps_30fps_vorbis_stereo_128kbps_48000hz.webm",
2665                 null);
2666     }
2667 
2668     @Test
2669     public void testCodecResetsVP9WithSurface() throws Exception {
2670         Surface s = getActivity().getSurfaceHolder().getSurface();
2671         testCodecResets("video_480x360_webm_vp9_333kbps_25fps_vorbis_stereo_128kbps_48000hz.webm",
2672                 s);
2673     }
2674 
2675     @Test
2676     public void testCodecResetsAV1WithSurface() throws Exception {
2677         Surface s = getActivity().getSurfaceHolder().getSurface();
2678         testCodecResets("video_480x360_webm_av1_400kbps_30fps_vorbis_stereo_128kbps_48000hz.webm",
2679                 s);
2680     }
2681 
2682 //    public void testCodecResetsOgg() throws Exception {
2683 //        testCodecResets("sinesweepogg.ogg", null);
2684 //    }
2685 
2686     @Test
2687     public void testCodecResetsMp3() throws Exception {
2688         testCodecReconfig("sinesweepmp3lame.mp3");
2689         // NOTE: replacing testCodecReconfig call soon
2690 //        testCodecResets("sinesweepmp3lame.mp3, null);
2691     }
2692 
2693     @Test
2694     public void testCodecResetsM4a() throws Exception {
2695         testCodecReconfig("sinesweepm4a.m4a");
2696         // NOTE: replacing testCodecReconfig call soon
2697 //        testCodecResets("sinesweepm4a.m4a", null);
2698     }
2699 
2700     private void testCodecReconfig(final String audio) throws Exception {
2701         int size1 = countSize(audio, RESET_MODE_NONE, -1 /* eosframe */);
2702         int size2 = countSize(audio, RESET_MODE_RECONFIGURE, -1 /* eosframe */);
2703         assertEquals("different output size when using reconfigured codec", size1, size2);
2704     }
2705 
2706     private void testCodecResets(final String video, Surface s) throws Exception {
2707         if (!MediaUtils.checkCodecForResource(mInpPrefix + video, 0 /* track */)) {
2708             return; // skip
2709         }
2710 
2711         int frames1 = countFrames(video, RESET_MODE_NONE, -1 /* eosframe */, s);
2712         int frames2 = countFrames(video, RESET_MODE_RECONFIGURE, -1 /* eosframe */, s);
2713         int frames3 = countFrames(video, RESET_MODE_FLUSH, -1 /* eosframe */, s);
2714         assertEquals("different number of frames when using reconfigured codec", frames1, frames2);
2715         assertEquals("different number of frames when using flushed codec", frames1, frames3);
2716     }
2717 
2718     private static void verifySecureVideoDecodeSupport(
2719             String mime, int width, int height, float rate, int profile, int level, int bitrate) {
2720         MediaFormat baseFormat = new MediaFormat();
2721         baseFormat.setString(MediaFormat.KEY_MIME, mime);
2722         baseFormat.setFeatureEnabled(CodecCapabilities.FEATURE_SecurePlayback, true);
2723 
2724         MediaFormat format = MediaFormat.createVideoFormat(mime, width, height);
2725         format.setFeatureEnabled(CodecCapabilities.FEATURE_SecurePlayback, true);
2726         format.setFloat(MediaFormat.KEY_FRAME_RATE, rate);
2727         format.setInteger(MediaFormat.KEY_PROFILE, profile);
2728         format.setInteger(MediaFormat.KEY_LEVEL, level);
2729         format.setInteger(MediaFormat.KEY_BIT_RATE, bitrate);
2730 
2731         MediaCodecList mcl = new MediaCodecList(MediaCodecList.ALL_CODECS);
2732         if (mcl.findDecoderForFormat(baseFormat) == null) {
2733             MediaUtils.skipTest("no secure decoder for " + mime);
2734             return;
2735         }
2736         assertNotNull("no decoder for " + format, mcl.findDecoderForFormat(format));
2737     }
2738 
2739     private static MediaCodec createDecoder(MediaFormat format) {
2740         return MediaUtils.getDecoder(format);
2741     }
2742 
2743     // for video
2744     private int countFrames(final String video, int resetMode, int eosframe, Surface s)
2745             throws Exception {
2746         MediaExtractor extractor = new MediaExtractor();
2747         extractor.setDataSource(mInpPrefix + video);
2748         extractor.selectTrack(0);
2749 
2750         int numframes = decodeWithChecks(null /* decoderName */, extractor,
2751                 CHECKFLAG_RETURN_OUTPUTFRAMES | CHECKFLAG_COMPAREINPUTOUTPUTPTSMATCH,
2752                 resetMode, s, eosframe, null, null);
2753 
2754         extractor.release();
2755         return numframes;
2756     }
2757 
2758     // for audio
2759     private int countSize(final String audio, int resetMode, int eosframe)
2760             throws Exception {
2761         MediaExtractor extractor = new MediaExtractor();
2762         extractor.setDataSource(mInpPrefix + audio);
2763 
2764         extractor.selectTrack(0);
2765 
2766         // fails CHECKFLAG_COMPAREINPUTOUTPUTPTSMATCH
2767         int outputSize = decodeWithChecks(null /* decoderName */, extractor,
2768                 CHECKFLAG_RETURN_OUTPUTSIZE, resetMode, null,
2769                 eosframe, null, null);
2770 
2771         extractor.release();
2772         return outputSize;
2773     }
2774 
2775     /*
2776     * Test all decoders' EOS behavior.
2777     */
2778     private void testEOSBehavior(final String movie, int stopatsample) throws Exception {
2779         testEOSBehavior(movie, new int[] {stopatsample});
2780     }
2781 
2782     /*
2783     * Test all decoders' EOS behavior.
2784     */
2785     private void testEOSBehavior(final String movie, int[] stopAtSample) throws Exception {
2786         Surface s = null;
2787         MediaExtractor extractor = new MediaExtractor();
2788         extractor.setDataSource(mInpPrefix + movie);
2789         extractor.selectTrack(0); // consider variable looping on track
2790         MediaFormat format = extractor.getTrackFormat(0);
2791 
2792         String[] decoderNames = MediaUtils.getDecoderNames(format);
2793         for (String decoderName: decoderNames) {
2794             List<Long> outputChecksums = new ArrayList<Long>();
2795             List<Long> outputTimestamps = new ArrayList<Long>();
2796             Arrays.sort(stopAtSample);
2797             int last = stopAtSample.length - 1;
2798 
2799             // decode reference (longest sequence to stop at + 100) and
2800             // store checksums/pts in outputChecksums and outputTimestamps
2801             // (will fail CHECKFLAG_COMPAREINPUTOUTPUTSAMPLEMATCH)
2802             decodeWithChecks(decoderName, extractor,
2803                     CHECKFLAG_SETCHECKSUM | CHECKFLAG_SETPTS | CHECKFLAG_COMPAREINPUTOUTPUTPTSMATCH,
2804                     RESET_MODE_NONE, s,
2805                     stopAtSample[last] + 100, outputChecksums, outputTimestamps);
2806 
2807             // decode stopAtSample requests in reverse order (longest to
2808             // shortest) and compare to reference checksums/pts in
2809             // outputChecksums and outputTimestamps
2810             for (int i = last; i >= 0; --i) {
2811                 if (true) { // reposition extractor
2812                     extractor.seekTo(0, MediaExtractor.SEEK_TO_NEXT_SYNC);
2813                 } else { // create new extractor
2814                     extractor.release();
2815                     extractor = new MediaExtractor();
2816                     extractor.setDataSource(mInpPrefix + movie);
2817                     extractor.selectTrack(0); // consider variable looping on track
2818                 }
2819                 decodeWithChecks(decoderName, extractor,
2820                         CHECKFLAG_COMPARECHECKSUM | CHECKFLAG_COMPAREPTS
2821                         | CHECKFLAG_COMPAREINPUTOUTPUTSAMPLEMATCH
2822                         | CHECKFLAG_COMPAREINPUTOUTPUTPTSMATCH,
2823                         RESET_MODE_NONE, s,
2824                         stopAtSample[i], outputChecksums, outputTimestamps);
2825             }
2826             extractor.seekTo(0, MediaExtractor.SEEK_TO_NEXT_SYNC);
2827         }
2828 
2829         extractor.release();
2830     }
2831 
2832     private static final int CHECKFLAG_SETCHECKSUM = 1 << 0;
2833     private static final int CHECKFLAG_COMPARECHECKSUM = 1 << 1;
2834     private static final int CHECKFLAG_SETPTS = 1 << 2;
2835     private static final int CHECKFLAG_COMPAREPTS = 1 << 3;
2836     private static final int CHECKFLAG_COMPAREINPUTOUTPUTSAMPLEMATCH = 1 << 4;
2837     private static final int CHECKFLAG_COMPAREINPUTOUTPUTPTSMATCH = 1 << 5;
2838     private static final int CHECKFLAG_RETURN_OUTPUTFRAMES = 1 << 6;
2839     private static final int CHECKFLAG_RETURN_OUTPUTSIZE = 1 << 7;
2840 
2841     /**
2842      * Decodes frames with parameterized checks and return values.
2843      * If decoderName is provided, mediacodec will create that decoder. Otherwise,
2844      * mediacodec will use the default decoder provided by platform.
2845      * The integer return can be selected through the checkFlags variable.
2846      */
2847     private static int decodeWithChecks(
2848             String decoderName, MediaExtractor extractor,
2849             int checkFlags, int resetMode, Surface surface, int stopAtSample,
2850             List<Long> outputChecksums, List<Long> outputTimestamps)
2851             throws Exception {
2852         int trackIndex = extractor.getSampleTrackIndex();
2853         MediaFormat format = extractor.getTrackFormat(trackIndex);
2854         String mime = format.getString(MediaFormat.KEY_MIME);
2855         boolean isAudio = mime.startsWith("audio/");
2856         ByteBuffer[] codecInputBuffers;
2857         ByteBuffer[] codecOutputBuffers;
2858 
2859         MediaCodec codec =
2860                 decoderName == null ? createDecoder(format) : MediaCodec.createByCodecName(decoderName);
2861         Log.i("@@@@", "using codec: " + codec.getName());
2862         codec.configure(format, surface, null /* crypto */, 0 /* flags */);
2863         codec.start();
2864         codecInputBuffers = codec.getInputBuffers();
2865         codecOutputBuffers = codec.getOutputBuffers();
2866 
2867         if (resetMode == RESET_MODE_RECONFIGURE) {
2868             codec.stop();
2869             codec.configure(format, surface, null /* crypto */, 0 /* flags */);
2870             codec.start();
2871             codecInputBuffers = codec.getInputBuffers();
2872             codecOutputBuffers = codec.getOutputBuffers();
2873         } else if (resetMode == RESET_MODE_FLUSH) {
2874             codec.flush();
2875 
2876             // We must always queue CSD after a flush that is potentially
2877             // before we receive output format has changed.
2878             queueConfig(codec, format);
2879         }
2880 
2881         // start decode loop
2882         MediaCodec.BufferInfo info = new MediaCodec.BufferInfo();
2883 
2884         MediaFormat outFormat = codec.getOutputFormat();
2885         long kTimeOutUs = 5000; // 5ms timeout
2886         String outMime = format.getString(MediaFormat.KEY_MIME);
2887         if ((surface == null) && (outMime != null) && outMime.startsWith("video/")) {
2888             int outWidth = outFormat.getInteger(MediaFormat.KEY_WIDTH);
2889             int outHeight = outFormat.getInteger(MediaFormat.KEY_HEIGHT);
2890             // in the 4K decoding case in byte buffer mode, set kTimeOutUs to 10ms as decode may
2891             // involve a memcpy
2892             if (outWidth * outHeight >= 8000000) {
2893                 kTimeOutUs = 10000;
2894             }
2895         }
2896 
2897         boolean sawInputEOS = false;
2898         boolean sawOutputEOS = false;
2899         int deadDecoderCounter = 0;
2900         int samplenum = 0;
2901         int numframes = 0;
2902         int outputSize = 0;
2903         int width = 0;
2904         int height = 0;
2905         boolean dochecksum = false;
2906         ArrayList<Long> timestamps = new ArrayList<Long>();
2907         if ((checkFlags & CHECKFLAG_SETPTS) != 0) {
2908             outputTimestamps.clear();
2909         }
2910         if ((checkFlags & CHECKFLAG_SETCHECKSUM) != 0) {
2911             outputChecksums.clear();
2912         }
2913         boolean advanceDone = true;
2914         while (!sawOutputEOS && deadDecoderCounter < 100) {
2915             // handle input
2916             if (!sawInputEOS) {
2917                 int inputBufIndex = codec.dequeueInputBuffer(kTimeOutUs);
2918 
2919                 if (inputBufIndex >= 0) {
2920                     ByteBuffer dstBuf = codecInputBuffers[inputBufIndex];
2921 
2922                     int sampleSize =
2923                             extractor.readSampleData(dstBuf, 0 /* offset */);
2924                     assertEquals("end of stream should match extractor.advance()", sampleSize >= 0,
2925                             advanceDone);
2926                     long presentationTimeUs = extractor.getSampleTime();
2927                     advanceDone = extractor.advance();
2928                     // int flags = extractor.getSampleFlags();
2929                     // Log.i("@@@@", "read sample " + samplenum + ":" +
2930                     // extractor.getSampleFlags()
2931                     // + " @ " + extractor.getSampleTime() + " size " +
2932                     // sampleSize);
2933 
2934                     if (sampleSize < 0) {
2935                         assertFalse("advance succeeded after failed read", advanceDone);
2936                         Log.d(TAG, "saw input EOS.");
2937                         sawInputEOS = true;
2938                         assertEquals("extractor.readSampleData() must return -1 at end of stream",
2939                                 -1, sampleSize);
2940                         assertEquals("extractor.getSampleTime() must return -1 at end of stream",
2941                                 -1, presentationTimeUs);
2942                         sampleSize = 0; // required otherwise queueInputBuffer
2943                                         // returns invalid.
2944                     } else {
2945                         timestamps.add(presentationTimeUs);
2946                         samplenum++; // increment before comparing with stopAtSample
2947                         if (samplenum == stopAtSample) {
2948                             Log.d(TAG, "saw input EOS (stop at sample).");
2949                             sawInputEOS = true; // tag this sample as EOS
2950                         }
2951                     }
2952                     codec.queueInputBuffer(
2953                             inputBufIndex,
2954                             0 /* offset */,
2955                             sampleSize,
2956                             presentationTimeUs,
2957                             sawInputEOS ? MediaCodec.BUFFER_FLAG_END_OF_STREAM : 0);
2958                 } else {
2959                     assertEquals(
2960                             "codec.dequeueInputBuffer() unrecognized return value: " + inputBufIndex,
2961                             MediaCodec.INFO_TRY_AGAIN_LATER, inputBufIndex);
2962                 }
2963             }
2964 
2965             // handle output
2966             int outputBufIndex = codec.dequeueOutputBuffer(info, kTimeOutUs);
2967 
2968             deadDecoderCounter++;
2969             if (outputBufIndex >= 0) {
2970                 if (info.size > 0) { // Disregard 0-sized buffers at the end.
2971                     deadDecoderCounter = 0;
2972                     if (resetMode != RESET_MODE_NONE) {
2973                         // once we've gotten some data out of the decoder, reset
2974                         // and start again
2975                         if (resetMode == RESET_MODE_RECONFIGURE) {
2976                             codec.stop();
2977                             codec.configure(format, surface /* surface */, null /* crypto */,
2978                                     0 /* flags */);
2979                             codec.start();
2980                             codecInputBuffers = codec.getInputBuffers();
2981                             codecOutputBuffers = codec.getOutputBuffers();
2982                         } else if (resetMode == RESET_MODE_FLUSH) {
2983                             codec.flush();
2984                         } else {
2985                             fail("unknown resetMode: " + resetMode);
2986                         }
2987                         // restart at beginning, clear resetMode
2988                         resetMode = RESET_MODE_NONE;
2989                         extractor.seekTo(0, MediaExtractor.SEEK_TO_NEXT_SYNC);
2990                         sawInputEOS = false;
2991                         numframes = 0;
2992                         timestamps.clear();
2993                         if ((checkFlags & CHECKFLAG_SETPTS) != 0) {
2994                             outputTimestamps.clear();
2995                         }
2996                         if ((checkFlags & CHECKFLAG_SETCHECKSUM) != 0) {
2997                             outputChecksums.clear();
2998                         }
2999                         continue;
3000                     }
3001                     if ((checkFlags & CHECKFLAG_COMPAREPTS) != 0) {
3002                         assertTrue("number of frames (" + numframes
3003                                 + ") exceeds number of reference timestamps",
3004                                 numframes < outputTimestamps.size());
3005                         assertEquals("frame ts mismatch at frame " + numframes,
3006                                 (long) outputTimestamps.get(numframes), info.presentationTimeUs);
3007                     } else if ((checkFlags & CHECKFLAG_SETPTS) != 0) {
3008                         outputTimestamps.add(info.presentationTimeUs);
3009                     }
3010                     if ((checkFlags & (CHECKFLAG_SETCHECKSUM | CHECKFLAG_COMPARECHECKSUM)) != 0) {
3011                         long sum = 0;   // note: checksum is 0 if buffer format unrecognized
3012                         if (dochecksum) {
3013                             Image image = codec.getOutputImage(outputBufIndex);
3014                             // use image to do crc if it's available
3015                             // fall back to buffer if image is not available
3016                             if (image != null) {
3017                                 sum = checksum(image);
3018                             } else {
3019                                 // TODO: add stride - right now just use info.size (as before)
3020                                 //sum = checksum(codecOutputBuffers[outputBufIndex], width, height,
3021                                 //        stride);
3022                                 ByteBuffer outputBuffer = codec.getOutputBuffer(outputBufIndex);
3023                                 outputBuffer.position(info.offset);
3024                                 sum = checksum(outputBuffer, info.size);
3025                             }
3026                         }
3027                         if ((checkFlags & CHECKFLAG_COMPARECHECKSUM) != 0) {
3028                             assertTrue("number of frames (" + numframes
3029                                     + ") exceeds number of reference checksums",
3030                                     numframes < outputChecksums.size());
3031                             Log.d(TAG, "orig checksum: " + outputChecksums.get(numframes)
3032                                     + " new checksum: " + sum);
3033                             assertEquals("frame data mismatch at frame " + numframes,
3034                                     (long) outputChecksums.get(numframes), sum);
3035                         } else if ((checkFlags & CHECKFLAG_SETCHECKSUM) != 0) {
3036                             outputChecksums.add(sum);
3037                         }
3038                     }
3039                     if ((checkFlags & CHECKFLAG_COMPAREINPUTOUTPUTPTSMATCH) != 0) {
3040                         assertTrue("output timestamp " + info.presentationTimeUs
3041                                 + " without corresponding input timestamp"
3042                                 , timestamps.remove(info.presentationTimeUs));
3043                     }
3044                     outputSize += info.size;
3045                     numframes++;
3046                 }
3047                 // Log.d(TAG, "got frame, size " + info.size + "/" +
3048                 // info.presentationTimeUs +
3049                 // "/" + numframes + "/" + info.flags);
3050                 codec.releaseOutputBuffer(outputBufIndex, true /* render */);
3051                 if ((info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
3052                     Log.d(TAG, "saw output EOS.");
3053                     sawOutputEOS = true;
3054                 }
3055             } else if (outputBufIndex == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
3056                 codecOutputBuffers = codec.getOutputBuffers();
3057                 Log.d(TAG, "output buffers have changed.");
3058             } else if (outputBufIndex == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
3059                 MediaFormat oformat = codec.getOutputFormat();
3060                 if (oformat.containsKey(MediaFormat.KEY_COLOR_FORMAT) &&
3061                         oformat.containsKey(MediaFormat.KEY_WIDTH) &&
3062                         oformat.containsKey(MediaFormat.KEY_HEIGHT)) {
3063                     int colorFormat = oformat.getInteger(MediaFormat.KEY_COLOR_FORMAT);
3064                     width = oformat.getInteger(MediaFormat.KEY_WIDTH);
3065                     height = oformat.getInteger(MediaFormat.KEY_HEIGHT);
3066                     dochecksum = isRecognizedFormat(colorFormat); // only checksum known raw
3067                                                                   // buf formats
3068                     Log.d(TAG, "checksum fmt: " + colorFormat + " dim " + width + "x" + height);
3069                 } else {
3070                     dochecksum = false; // check with audio later
3071                     width = height = 0;
3072                     Log.d(TAG, "output format has changed to (unknown video) " + oformat);
3073                 }
3074             } else {
3075                 assertEquals(
3076                         "codec.dequeueOutputBuffer() unrecognized return index: "
3077                                 + outputBufIndex,
3078                         MediaCodec.INFO_TRY_AGAIN_LATER, outputBufIndex);
3079             }
3080         }
3081         codec.stop();
3082         codec.release();
3083 
3084         assertTrue("last frame didn't have EOS", sawOutputEOS);
3085         if ((checkFlags & CHECKFLAG_COMPAREINPUTOUTPUTSAMPLEMATCH) != 0) {
3086             assertEquals("I!=O", samplenum, numframes);
3087             if (stopAtSample != 0) {
3088                 assertEquals("did not stop with right number of frames", stopAtSample, numframes);
3089             }
3090         }
3091         return (checkFlags & CHECKFLAG_RETURN_OUTPUTSIZE) != 0 ? outputSize :
3092                 (checkFlags & CHECKFLAG_RETURN_OUTPUTFRAMES) != 0 ? numframes :
3093                         0;
3094     }
3095 
3096     @Test
3097     public void testEOSBehaviorH264() throws Exception {
3098         // this video has an I frame at 44
3099         testEOSBehavior("video_480x360_mp4_h264_1000kbps_25fps_aac_stereo_128kbps_44100hz.mp4",
3100                 new int[]{1, 44, 45, 55});
3101     }
3102 
3103     @Test
3104     public void testEOSBehaviorHEVC() throws Exception {
3105         testEOSBehavior("video_480x360_mp4_hevc_650kbps_30fps_aac_stereo_128kbps_48000hz.mp4",
3106                 new int[]{1, 17, 23, 49});
3107     }
3108 
3109     @Test
3110     public void testEOSBehaviorMpeg2() throws Exception {
3111         testEOSBehavior("video_480x360_mp4_mpeg2_1500kbps_30fps_aac_stereo_128kbps_48000hz.mp4",
3112                 17);
3113         testEOSBehavior("video_480x360_mp4_mpeg2_1500kbps_30fps_aac_stereo_128kbps_48000hz.mp4",
3114                 23);
3115         testEOSBehavior("video_480x360_mp4_mpeg2_1500kbps_30fps_aac_stereo_128kbps_48000hz.mp4",
3116                 49);
3117     }
3118 
3119     @Test
3120     public void testEOSBehaviorH263() throws Exception {
3121         // this video has an I frame every 12 frames.
3122         testEOSBehavior("video_176x144_3gp_h263_300kbps_12fps_aac_stereo_128kbps_22050hz.3gp",
3123                 new int[]{1, 24, 25, 48, 50});
3124     }
3125 
3126     @Test
3127     public void testEOSBehaviorMpeg4() throws Exception {
3128         // this video has an I frame every 12 frames
3129         testEOSBehavior("video_480x360_mp4_mpeg4_860kbps_25fps_aac_stereo_128kbps_44100hz.mp4",
3130                 new int[]{1, 24, 25, 48, 50, 2});
3131     }
3132 
3133     @Test
3134     public void testEOSBehaviorVP8() throws Exception {
3135         // this video has an I frame at 46
3136         testEOSBehavior("video_480x360_webm_vp8_333kbps_25fps_vorbis_stereo_128kbps_48000hz.webm",
3137                 new int[]{1, 46, 47, 57, 45});
3138     }
3139 
3140     @Test
3141     public void testEOSBehaviorVP9() throws Exception {
3142         // this video has an I frame at 44
3143         testEOSBehavior("video_480x360_webm_vp9_333kbps_25fps_vorbis_stereo_128kbps_48000hz.webm",
3144                 new int[]{1, 44, 45, 55, 43});
3145     }
3146 
3147     @Test
3148     public void testEOSBehaviorAV1() throws Exception {
3149         // this video has an I frame at 44
3150         testEOSBehavior("video_480x360_webm_av1_400kbps_30fps_vorbis_stereo_128kbps_48000hz.webm",
3151                 new int[]{1, 44, 45, 55, 43});
3152     }
3153 
3154     /* from EncodeDecodeTest */
3155     private static boolean isRecognizedFormat(int colorFormat) {
3156         // Log.d(TAG, "color format: " + String.format("0x%08x", colorFormat));
3157         switch (colorFormat) {
3158         // these are the formats we know how to handle for this test
3159             case CodecCapabilities.COLOR_FormatYUV420Planar:
3160             case CodecCapabilities.COLOR_FormatYUV420PackedPlanar:
3161             case CodecCapabilities.COLOR_FormatYUV420SemiPlanar:
3162             case CodecCapabilities.COLOR_FormatYUV420PackedSemiPlanar:
3163             case CodecCapabilities.COLOR_TI_FormatYUV420PackedSemiPlanar:
3164             case CodecCapabilities.COLOR_QCOM_FormatYUV420SemiPlanar:
3165                 /*
3166                  * TODO: Check newer formats or ignore.
3167                  * OMX_SEC_COLOR_FormatNV12Tiled = 0x7FC00002
3168                  * OMX_QCOM_COLOR_FormatYUV420PackedSemiPlanar64x32Tile2m8ka = 0x7FA30C03: N4/N7_2
3169                  * OMX_QCOM_COLOR_FormatYUV420PackedSemiPlanar32m = 0x7FA30C04: N5
3170                  */
3171                 return true;
3172             default:
3173                 return false;
3174         }
3175     }
3176 
3177     private static long checksum(ByteBuffer buf, int size) {
3178         int cap = buf.capacity();
3179         assertTrue("checksum() params are invalid: size = " + size + " cap = " + cap,
3180                 size > 0 && size <= cap);
3181         CRC32 crc = new CRC32();
3182         if (buf.hasArray()) {
3183             crc.update(buf.array(), buf.position() + buf.arrayOffset(), size);
3184         } else {
3185             int pos = buf.position();
3186             final int rdsize = Math.min(4096, size);
3187             byte bb[] = new byte[rdsize];
3188             int chk;
3189             for (int i = 0; i < size; i += chk) {
3190                 chk = Math.min(rdsize, size - i);
3191                 buf.get(bb, 0, chk);
3192                 crc.update(bb, 0, chk);
3193             }
3194             buf.position(pos);
3195         }
3196         return crc.getValue();
3197     }
3198 
3199     private static long checksum(ByteBuffer buf, int width, int height, int stride) {
3200         int cap = buf.capacity();
3201         assertTrue("checksum() params are invalid: w x h , s = "
3202                 + width + " x " + height + " , " + stride + " cap = " + cap,
3203                 width > 0 && width <= stride && height > 0 && height * stride <= cap);
3204         // YUV 4:2:0 should generally have a data storage height 1.5x greater
3205         // than the declared image height, representing the UV planes.
3206         //
3207         // We only check Y frame for now. Somewhat unknown with tiling effects.
3208         //
3209         //long tm = System.nanoTime();
3210         final int lineinterval = 1; // line sampling frequency
3211         CRC32 crc = new CRC32();
3212         if (buf.hasArray()) {
3213             byte b[] = buf.array();
3214             int offs = buf.arrayOffset();
3215             for (int i = 0; i < height; i += lineinterval) {
3216                 crc.update(b, i * stride + offs, width);
3217             }
3218         } else { // almost always ends up here due to direct buffers
3219             int pos = buf.position();
3220             if (true) { // this {} is 80x times faster than else {} below.
3221                 byte[] bb = new byte[width]; // local line buffer
3222                 for (int i = 0; i < height; i += lineinterval) {
3223                     buf.position(pos + i * stride);
3224                     buf.get(bb, 0, width);
3225                     crc.update(bb, 0, width);
3226                 }
3227             } else {
3228                 for (int i = 0; i < height; i += lineinterval) {
3229                     buf.position(pos + i * stride);
3230                     for (int j = 0; j < width; ++j) {
3231                         crc.update(buf.get());
3232                     }
3233                 }
3234             }
3235             buf.position(pos);
3236         }
3237         //tm = System.nanoTime() - tm;
3238         //Log.d(TAG, "checksum time " + tm);
3239         return crc.getValue();
3240     }
3241 
3242     private static long checksum(Image image) {
3243         int format = image.getFormat();
3244         assertEquals("unsupported image format", ImageFormat.YUV_420_888, format);
3245 
3246         CRC32 crc = new CRC32();
3247 
3248         int imageWidth = image.getWidth();
3249         int imageHeight = image.getHeight();
3250 
3251         Image.Plane[] planes = image.getPlanes();
3252         for (int i = 0; i < planes.length; ++i) {
3253             ByteBuffer buf = planes[i].getBuffer();
3254 
3255             int width, height, rowStride, pixelStride, x, y;
3256             rowStride = planes[i].getRowStride();
3257             pixelStride = planes[i].getPixelStride();
3258             if (i == 0) {
3259                 width = imageWidth;
3260                 height = imageHeight;
3261             } else {
3262                 width = imageWidth / 2;
3263                 height = imageHeight /2;
3264             }
3265             // local contiguous pixel buffer
3266             byte[] bb = new byte[width * height];
3267             if (buf.hasArray()) {
3268                 byte b[] = buf.array();
3269                 int offs = buf.arrayOffset();
3270                 if (pixelStride == 1) {
3271                     for (y = 0; y < height; ++y) {
3272                         System.arraycopy(bb, y * width, b, y * rowStride + offs, width);
3273                     }
3274                 } else {
3275                     // do it pixel-by-pixel
3276                     for (y = 0; y < height; ++y) {
3277                         int lineOffset = offs + y * rowStride;
3278                         for (x = 0; x < width; ++x) {
3279                             bb[y * width + x] = b[lineOffset + x * pixelStride];
3280                         }
3281                     }
3282                 }
3283             } else { // almost always ends up here due to direct buffers
3284                 int pos = buf.position();
3285                 if (pixelStride == 1) {
3286                     for (y = 0; y < height; ++y) {
3287                         buf.position(pos + y * rowStride);
3288                         buf.get(bb, y * width, width);
3289                     }
3290                 } else {
3291                     // local line buffer
3292                     byte[] lb = new byte[rowStride];
3293                     // do it pixel-by-pixel
3294                     for (y = 0; y < height; ++y) {
3295                         buf.position(pos + y * rowStride);
3296                         // we're only guaranteed to have pixelStride * (width - 1) + 1 bytes
3297                         buf.get(lb, 0, pixelStride * (width - 1) + 1);
3298                         for (x = 0; x < width; ++x) {
3299                             bb[y * width + x] = lb[x * pixelStride];
3300                         }
3301                     }
3302                 }
3303                 buf.position(pos);
3304             }
3305             crc.update(bb, 0, width * height);
3306         }
3307 
3308         return crc.getValue();
3309     }
3310 
3311     @ApiTest(apis = {"android.media.MediaCodec#flush"})
3312     @Test
3313     public void testFlush() throws Exception {
3314         testFlush("loudsoftwav.wav");
3315         testFlush("loudsoftogg.ogg");
3316         testFlush("loudsoftoggmkv.mkv");
3317         testFlush("loudsoftoggmp4.mp4");
3318         testFlush("loudsoftmp3.mp3");
3319         testFlush("loudsoftaac.aac");
3320         testFlush("loudsoftfaac.m4a");
3321         testFlush("loudsoftitunes.m4a");
3322     }
3323 
3324     private void testFlush(final String resource) throws Exception {
3325         MediaExtractor extractor;
3326         MediaCodec codec;
3327         ByteBuffer[] codecInputBuffers;
3328         ByteBuffer[] codecOutputBuffers;
3329 
3330         extractor = new MediaExtractor();
3331         extractor.setDataSource(mInpPrefix + resource);
3332 
3333         assertEquals("wrong number of tracks", 1, extractor.getTrackCount());
3334         MediaFormat format = extractor.getTrackFormat(0);
3335         String mime = format.getString(MediaFormat.KEY_MIME);
3336         assertTrue("not an audio file", mime.startsWith("audio/"));
3337 
3338         codec = MediaCodec.createDecoderByType(mime);
3339         assertNotNull("couldn't find codec " + mime, codec);
3340 
3341         codec.configure(format, null /* surface */, null /* crypto */, 0 /* flags */);
3342         codec.start();
3343         codecInputBuffers = codec.getInputBuffers();
3344         codecOutputBuffers = codec.getOutputBuffers();
3345 
3346         extractor.selectTrack(0);
3347 
3348         // decode a bit of the first part of the file, and verify the amplitude
3349         short maxvalue1 = getAmplitude(extractor, codec);
3350 
3351         // flush the codec and seek the extractor a different position, then decode a bit more
3352         // and check the amplitude
3353         extractor.seekTo(8000000, 0);
3354         codec.flush();
3355         short maxvalue2 = getAmplitude(extractor, codec);
3356 
3357         assertTrue("first section amplitude too low", maxvalue1 > 20000);
3358         assertTrue("second section amplitude too high", maxvalue2 < 5000);
3359         codec.stop();
3360         codec.release();
3361 
3362     }
3363 
3364     private short getAmplitude(MediaExtractor extractor, MediaCodec codec) {
3365         short maxvalue = 0;
3366         int numBytesDecoded = 0;
3367         final long kTimeOutUs = 5000;
3368         ByteBuffer[] codecInputBuffers = codec.getInputBuffers();
3369         ByteBuffer[] codecOutputBuffers = codec.getOutputBuffers();
3370         MediaCodec.BufferInfo info = new MediaCodec.BufferInfo();
3371 
3372         while(numBytesDecoded < 44100 * 2) {
3373             int inputBufIndex = codec.dequeueInputBuffer(kTimeOutUs);
3374 
3375             if (inputBufIndex >= 0) {
3376                 ByteBuffer dstBuf = codecInputBuffers[inputBufIndex];
3377 
3378                 int sampleSize = extractor.readSampleData(dstBuf, 0 /* offset */);
3379                 long presentationTimeUs = extractor.getSampleTime();
3380 
3381                 codec.queueInputBuffer(
3382                         inputBufIndex,
3383                         0 /* offset */,
3384                         sampleSize,
3385                         presentationTimeUs,
3386                         0 /* flags */);
3387 
3388                 extractor.advance();
3389             }
3390             int res = codec.dequeueOutputBuffer(info, kTimeOutUs);
3391 
3392             if (res >= 0) {
3393 
3394                 int outputBufIndex = res;
3395                 ByteBuffer buf = codecOutputBuffers[outputBufIndex];
3396 
3397                 buf.position(info.offset);
3398                 for (int i = 0; i < info.size; i += 2) {
3399                     short sample = buf.getShort();
3400                     if (maxvalue < sample) {
3401                         maxvalue = sample;
3402                     }
3403                     int idx = (numBytesDecoded + i) / 2;
3404                 }
3405 
3406                 numBytesDecoded += info.size;
3407 
3408                 codec.releaseOutputBuffer(outputBufIndex, false /* render */);
3409             } else if (res == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
3410                 codecOutputBuffers = codec.getOutputBuffers();
3411             } else if (res == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
3412                 MediaFormat oformat = codec.getOutputFormat();
3413             }
3414         }
3415         return maxvalue;
3416     }
3417 
3418     /* return true if a particular video feature is supported for the given mimetype */
3419     private boolean isVideoFeatureSupported(String mimeType, String feature) {
3420         MediaFormat format = MediaFormat.createVideoFormat( mimeType, 1920, 1080);
3421         format.setFeatureEnabled(feature, true);
3422         MediaCodecList mcl = new MediaCodecList(MediaCodecList.ALL_CODECS);
3423         String codecName = mcl.findDecoderForFormat(format);
3424         return (codecName == null) ? false : true;
3425     }
3426 
3427     /**
3428      * Test tunneled video playback mode if supported
3429      *
3430      * TODO(b/182915887): Test all the codecs advertised by the DUT for the provided test content
3431      */
3432     private void tunneledVideoPlayback(String mimeType, String videoName) throws Exception {
3433         if (!MediaUtils.check(isVideoFeatureSupported(mimeType, FEATURE_TunneledPlayback),
3434                     "No tunneled video playback codec found for MIME " + mimeType)) {
3435             return;
3436         }
3437 
3438         AudioManager am = (AudioManager)mContext.getSystemService(Context.AUDIO_SERVICE);
3439         mMediaCodecPlayer = new MediaCodecTunneledPlayer(
3440                 mContext, getActivity().getSurfaceHolder(), true, am.generateAudioSessionId());
3441 
3442         Uri mediaUri = Uri.fromFile(new File(mInpPrefix, videoName));
3443         mMediaCodecPlayer.setAudioDataSource(mediaUri, null);
3444         mMediaCodecPlayer.setVideoDataSource(mediaUri, null);
3445         assertTrue("MediaCodecPlayer.prepare() failed!", mMediaCodecPlayer.prepare());
3446         mMediaCodecPlayer.startCodec();
3447 
3448         // When video codecs are started, large chunks of contiguous physical memory need to be
3449         // allocated, which, on low-RAM devices, can trigger high CPU usage for moving memory
3450         // around to create contiguous space for the video decoder. This can cause an increase in
3451         // startup time for playback.
3452         ActivityManager activityManager = mContext.getSystemService(ActivityManager.class);
3453         long firstFrameRenderedTimeoutSeconds = activityManager.isLowRamDevice() ? 3 : 1;
3454 
3455         mMediaCodecPlayer.play();
3456         sleepUntil(() ->
3457                 mMediaCodecPlayer.getCurrentPosition() > CodecState.UNINITIALIZED_TIMESTAMP
3458                 && mMediaCodecPlayer.getTimestamp() != null
3459                 && mMediaCodecPlayer.getTimestamp().framePosition > 0,
3460                 Duration.ofSeconds(firstFrameRenderedTimeoutSeconds));
3461         assertNotEquals("onFrameRendered was not called",
3462                 mMediaCodecPlayer.getVideoTimeUs(), CodecState.UNINITIALIZED_TIMESTAMP);
3463         assertNotEquals("Audio timestamp is null", mMediaCodecPlayer.getTimestamp(), null);
3464         assertNotEquals("Audio timestamp has a zero frame position",
3465                 mMediaCodecPlayer.getTimestamp().framePosition, 0);
3466 
3467         final long durationMs = mMediaCodecPlayer.getDuration();
3468         final long timeOutMs = System.currentTimeMillis() + durationMs + 5 * 1000; // add 5 sec
3469         while (!mMediaCodecPlayer.isEnded()) {
3470             assertTrue("Tunneled video playback timeout exceeded",
3471                     timeOutMs > System.currentTimeMillis());
3472             Thread.sleep(SLEEP_TIME_MS);
3473             if (mMediaCodecPlayer.getCurrentPosition() >= mMediaCodecPlayer.getDuration()) {
3474                 Log.d(TAG, "testTunneledVideoPlayback -- current pos = " +
3475                         mMediaCodecPlayer.getCurrentPosition() +
3476                         ">= duration = " + mMediaCodecPlayer.getDuration());
3477                 break;
3478             }
3479         }
3480         // mMediaCodecPlayer.reset() handled in TearDown();
3481     }
3482 
3483     /**
3484      * Test tunneled video playback mode with HEVC if supported
3485      */
3486     @Test
3487     @ApiTest(apis={"android.media.MediaCodecInfo.CodecCapabilities#FEATURE_TunneledPlayback"})
3488     @SdkSuppress(minSdkVersion = Build.VERSION_CODES.S)
3489     public void testTunneledVideoPlaybackHevc() throws Exception {
3490         tunneledVideoPlayback(MediaFormat.MIMETYPE_VIDEO_HEVC,
3491                     "video_1280x720_mkv_h265_500kbps_25fps_aac_stereo_128kbps_44100hz.mkv");
3492     }
3493 
3494     /**
3495      * Test tunneled video playback mode with AVC if supported
3496      */
3497     @Test
3498     @ApiTest(apis={"android.media.MediaCodecInfo.CodecCapabilities#FEATURE_TunneledPlayback"})
3499     @SdkSuppress(minSdkVersion = Build.VERSION_CODES.S)
3500     public void testTunneledVideoPlaybackAvc() throws Exception {
3501         tunneledVideoPlayback(MediaFormat.MIMETYPE_VIDEO_AVC,
3502                 "video_480x360_mp4_h264_1000kbps_25fps_aac_stereo_128kbps_44100hz.mp4");
3503     }
3504 
3505     /**
3506      * Test tunneled video playback mode with VP9 if supported
3507      */
3508     @Test
3509     @ApiTest(apis={"android.media.MediaCodecInfo.CodecCapabilities#FEATURE_TunneledPlayback"})
3510     @SdkSuppress(minSdkVersion = Build.VERSION_CODES.S)
3511     public void testTunneledVideoPlaybackVp9() throws Exception {
3512         tunneledVideoPlayback(MediaFormat.MIMETYPE_VIDEO_VP9,
3513                 "bbb_s1_640x360_webm_vp9_0p21_1600kbps_30fps_vorbis_stereo_128kbps_48000hz.webm");
3514     }
3515 
3516     /**
3517      * Test tunneled video playback flush if supported
3518      *
3519      * TODO(b/182915887): Test all the codecs advertised by the DUT for the provided test content
3520      */
3521     private void testTunneledVideoFlush(String mimeType, String videoName) throws Exception {
3522         if (!MediaUtils.check(isVideoFeatureSupported(mimeType, FEATURE_TunneledPlayback),
3523                     "No tunneled video playback codec found for MIME " + mimeType)) {
3524             return;
3525         }
3526 
3527         AudioManager am = (AudioManager)mContext.getSystemService(Context.AUDIO_SERVICE);
3528         mMediaCodecPlayer = new MediaCodecTunneledPlayer(
3529                 mContext, getActivity().getSurfaceHolder(), true, am.generateAudioSessionId());
3530 
3531         Uri mediaUri = Uri.fromFile(new File(mInpPrefix, videoName));
3532         mMediaCodecPlayer.setAudioDataSource(mediaUri, null);
3533         mMediaCodecPlayer.setVideoDataSource(mediaUri, null);
3534         assertTrue("MediaCodecPlayer.prepare() failed!", mMediaCodecPlayer.prepare());
3535         mMediaCodecPlayer.startCodec();
3536 
3537         mMediaCodecPlayer.play();
3538         sleepUntil(() ->
3539                 mMediaCodecPlayer.getCurrentPosition() > CodecState.UNINITIALIZED_TIMESTAMP
3540                 && mMediaCodecPlayer.getTimestamp() != null
3541                 && mMediaCodecPlayer.getTimestamp().framePosition > 0,
3542                 Duration.ofSeconds(1));
3543         assertNotEquals("onFrameRendered was not called",
3544                 mMediaCodecPlayer.getVideoTimeUs(), CodecState.UNINITIALIZED_TIMESTAMP);
3545         assertNotEquals("Audio timestamp is null", mMediaCodecPlayer.getTimestamp(), null);
3546         assertNotEquals("Audio timestamp has a zero frame position",
3547                 mMediaCodecPlayer.getTimestamp().framePosition, 0);
3548 
3549         mMediaCodecPlayer.pause();
3550         mMediaCodecPlayer.flush();
3551         // mMediaCodecPlayer.reset() handled in TearDown();
3552     }
3553 
3554     /**
3555      * Test tunneled video playback flush with HEVC if supported
3556      */
3557     @Test
3558     @ApiTest(apis={"android.media.MediaCodecInfo.CodecCapabilities#FEATURE_TunneledPlayback"})
3559     @SdkSuppress(minSdkVersion = Build.VERSION_CODES.S)
3560     public void testTunneledVideoFlushHevc() throws Exception {
3561         testTunneledVideoFlush(MediaFormat.MIMETYPE_VIDEO_HEVC,
3562                 "video_1280x720_mkv_h265_500kbps_25fps_aac_stereo_128kbps_44100hz.mkv");
3563     }
3564 
3565     /**
3566      * Test tunneled video playback flush with AVC if supported
3567      */
3568     @Test
3569     @ApiTest(apis={"android.media.MediaCodecInfo.CodecCapabilities#FEATURE_TunneledPlayback"})
3570     @SdkSuppress(minSdkVersion = Build.VERSION_CODES.S)
3571     public void testTunneledVideoFlushAvc() throws Exception {
3572         testTunneledVideoFlush(MediaFormat.MIMETYPE_VIDEO_AVC,
3573                 "video_480x360_mp4_h264_1000kbps_25fps_aac_stereo_128kbps_44100hz.mp4");
3574     }
3575 
3576     /**
3577      * Test tunneled video playback flush with VP9 if supported
3578      */
3579     @Test
3580     @ApiTest(apis={"android.media.MediaCodecInfo.CodecCapabilities#FEATURE_TunneledPlayback"})
3581     @SdkSuppress(minSdkVersion = Build.VERSION_CODES.S)
3582     public void testTunneledVideoFlushVp9() throws Exception {
3583         testTunneledVideoFlush(MediaFormat.MIMETYPE_VIDEO_VP9,
3584                 "bbb_s1_640x360_webm_vp9_0p21_1600kbps_30fps_vorbis_stereo_128kbps_48000hz.webm");
3585     }
3586 
3587     /**
3588      * Test that the first frame is rendered when video peek is on in tunneled mode.
3589      *
3590      * TODO(b/182915887): Test all the codecs advertised by the DUT for the provided test content
3591      */
3592     private void testTunneledVideoPeekOn(String mimeType, String videoName, float frameRate)
3593             throws Exception {
3594         if (!MediaUtils.check(isVideoFeatureSupported(mimeType, FEATURE_TunneledPlayback),
3595                     "No tunneled video playback codec found for MIME " + mimeType)) {
3596             return;
3597         }
3598 
3599         // Setup tunnel mode test media player
3600         AudioManager am = mContext.getSystemService(AudioManager.class);
3601         mMediaCodecPlayer = new MediaCodecTunneledPlayer(
3602                 mContext, getActivity().getSurfaceHolder(), true, am.generateAudioSessionId());
3603 
3604         // Frame rate is needed by some devices to initialize the display hardware
3605         mMediaCodecPlayer.setFrameRate(frameRate);
3606 
3607         Uri mediaUri = Uri.fromFile(new File(mInpPrefix, videoName));
3608         mMediaCodecPlayer.setAudioDataSource(mediaUri, null);
3609         mMediaCodecPlayer.setVideoDataSource(mediaUri, null);
3610         assertTrue("MediaCodecPlayer.prepare() failed!", mMediaCodecPlayer.prepare());
3611         mMediaCodecPlayer.startCodec();
3612         mMediaCodecPlayer.setVideoPeek(true); // Enable video peek
3613 
3614         // Queue the first video frame, which should not be rendered imminently
3615         mMediaCodecPlayer.queueOneVideoFrame();
3616 
3617         // Assert that onFirstTunnelFrameReady is called
3618         final int waitForFrameReadyMs = 150;
3619         Thread.sleep(waitForFrameReadyMs);
3620         assertTrue(String.format("onFirstTunnelFrameReady not called within %d milliseconds",
3621                         waitForFrameReadyMs),
3622                 mMediaCodecPlayer.isFirstTunnelFrameReady());
3623 
3624         // This is long due to high-latency display pipelines on TV devices
3625         final int waitForRenderingMs = 1000;
3626         Thread.sleep(waitForRenderingMs);
3627 
3628         // Assert that video peek is enabled and working
3629         assertNotEquals(String.format("First frame not rendered within %d milliseconds",
3630                         waitForRenderingMs), CodecState.UNINITIALIZED_TIMESTAMP,
3631                 mMediaCodecPlayer.getCurrentPosition());
3632 
3633         // mMediaCodecPlayer.reset() handled in TearDown();
3634     }
3635 
3636     /**
3637      * Test that the first frame is rendered when video peek is on for HEVC in tunneled mode.
3638      */
3639     @Test
3640     @ApiTest(apis={"android.media.MediaCodec#PARAMETER_KEY_TUNNEL_PEEK"})
3641     @SdkSuppress(minSdkVersion = Build.VERSION_CODES.S)
3642     public void testTunneledVideoPeekOnHevc() throws Exception {
3643         // Requires vendor support of the TUNNEL_PEEK feature
3644         Assume.assumeTrue("Vendor API level is not Android 12 or later.",
3645                 IS_VENDOR_AT_LEAST_S);
3646         testTunneledVideoPeekOn(MediaFormat.MIMETYPE_VIDEO_HEVC,
3647                 "video_1280x720_mkv_h265_500kbps_25fps_aac_stereo_128kbps_44100hz.mkv", 25);
3648     }
3649 
3650     /**
3651      * Test that the first frame is rendered when video peek is on for AVC in tunneled mode.
3652      */
3653     @Test
3654     @ApiTest(apis={"android.media.MediaCodec#PARAMETER_KEY_TUNNEL_PEEK"})
3655     @SdkSuppress(minSdkVersion = Build.VERSION_CODES.S)
3656     public void testTunneledVideoPeekOnAvc() throws Exception {
3657         // Requires vendor support of the TUNNEL_PEEK feature
3658         Assume.assumeTrue("Vendor API level is not Android 12 or later.",
3659                 IS_VENDOR_AT_LEAST_S);
3660         testTunneledVideoPeekOn(MediaFormat.MIMETYPE_VIDEO_AVC,
3661                 "video_480x360_mp4_h264_1000kbps_25fps_aac_stereo_128kbps_44100hz.mp4", 25);
3662     }
3663 
3664     /**
3665      * Test that the first frame is rendered when video peek is on for VP9 in tunneled mode.
3666      */
3667     @Test
3668     @ApiTest(apis={"android.media.MediaCodec#PARAMETER_KEY_TUNNEL_PEEK"})
3669     @SdkSuppress(minSdkVersion = Build.VERSION_CODES.S)
3670     public void testTunneledVideoPeekOnVp9() throws Exception {
3671         // Requires vendor support of the TUNNEL_PEEK feature
3672         Assume.assumeTrue("Vendor API level is not Android 12 or later.",
3673                 IS_VENDOR_AT_LEAST_S);
3674         testTunneledVideoPeekOn(MediaFormat.MIMETYPE_VIDEO_VP9,
3675                 "bbb_s1_640x360_webm_vp9_0p21_1600kbps_30fps_vorbis_stereo_128kbps_48000hz.webm",
3676                 30);
3677     }
3678 
3679     /**
3680      * Test that peek off doesn't render the first frame until turned on in tunneled mode.
3681      *
3682      * TODO(b/182915887): Test all the codecs advertised by the DUT for the provided test content
3683      */
3684     private void testTunneledVideoPeekOff(String mimeType, String videoName, float frameRate)
3685             throws Exception {
3686         if (!MediaUtils.check(isVideoFeatureSupported(mimeType, FEATURE_TunneledPlayback),
3687                     "No tunneled video playback codec found for MIME " + mimeType)) {
3688             return;
3689         }
3690 
3691         // Setup tunnel mode test media player
3692         AudioManager am = mContext.getSystemService(AudioManager.class);
3693         mMediaCodecPlayer = new MediaCodecTunneledPlayer(
3694                 mContext, getActivity().getSurfaceHolder(), true, am.generateAudioSessionId());
3695 
3696         // Frame rate is needed by some devices to initialize the display hardware
3697         mMediaCodecPlayer.setFrameRate(frameRate);
3698 
3699         Uri mediaUri = Uri.fromFile(new File(mInpPrefix, videoName));
3700         mMediaCodecPlayer.setAudioDataSource(mediaUri, null);
3701         mMediaCodecPlayer.setVideoDataSource(mediaUri, null);
3702         assertTrue("MediaCodecPlayer.prepare() failed!", mMediaCodecPlayer.prepare());
3703         mMediaCodecPlayer.startCodec();
3704         mMediaCodecPlayer.setVideoPeek(false); // Disable video peek
3705 
3706         // Queue the first video frame, which should not be rendered yet
3707         mMediaCodecPlayer.queueOneVideoFrame();
3708 
3709         // Assert that onFirstTunnelFrameReady is called
3710         final int waitForFrameReadyMs = 150;
3711         Thread.sleep(waitForFrameReadyMs);
3712         assertTrue(String.format("onFirstTunnelFrameReady not called within %d milliseconds",
3713                         waitForFrameReadyMs),
3714                 mMediaCodecPlayer.isFirstTunnelFrameReady());
3715 
3716         // This is long due to high-latency display pipelines on TV devices
3717         final int waitForRenderingMs = 1000;
3718         Thread.sleep(waitForRenderingMs);
3719 
3720         // Assert the video frame has not been peeked yet
3721         assertEquals("First frame rendered while peek disabled", CodecState.UNINITIALIZED_TIMESTAMP,
3722                 mMediaCodecPlayer.getCurrentPosition());
3723 
3724         // Enable video peek
3725         mMediaCodecPlayer.setVideoPeek(true);
3726         Thread.sleep(waitForRenderingMs);
3727 
3728         // Assert that the first frame was rendered
3729         assertNotEquals(String.format(
3730                         "First frame not rendered within %d milliseconds after peek is enabled",
3731                         waitForRenderingMs), CodecState.UNINITIALIZED_TIMESTAMP,
3732                 mMediaCodecPlayer.getCurrentPosition());
3733 
3734         // mMediaCodecPlayer.reset() handled in TearDown();
3735     }
3736 
3737     /**
3738      * Test that peek off doesn't render the first frame until turned on for HEC in tunneled mode.
3739      */
3740     @Test
3741     @ApiTest(apis={"android.media.MediaCodec#PARAMETER_KEY_TUNNEL_PEEK"})
3742     @SdkSuppress(minSdkVersion = Build.VERSION_CODES.S)
3743     public void testTunneledVideoPeekOffHevc() throws Exception {
3744         // Requires vendor support of the TUNNEL_PEEK feature
3745         Assume.assumeTrue("Vendor API level is not Android 12 or later.",
3746                 IS_VENDOR_AT_LEAST_S);
3747         testTunneledVideoPeekOff(MediaFormat.MIMETYPE_VIDEO_HEVC,
3748                 "video_1280x720_mkv_h265_500kbps_25fps_aac_stereo_128kbps_44100hz.mkv", 25);
3749     }
3750 
3751     /**
3752      * Test that peek off doesn't render the first frame until turned on for AVC in tunneled mode.
3753      */
3754     @Test
3755     @ApiTest(apis={"android.media.MediaCodec#PARAMETER_KEY_TUNNEL_PEEK"})
3756     @SdkSuppress(minSdkVersion = Build.VERSION_CODES.S)
3757     public void testTunneledVideoPeekOffAvc() throws Exception {
3758         // Requires vendor support of the TUNNEL_PEEK feature
3759         Assume.assumeTrue("Vendor API level is not Android 12 or later.",
3760                 IS_VENDOR_AT_LEAST_S);
3761         testTunneledVideoPeekOff(MediaFormat.MIMETYPE_VIDEO_AVC,
3762                 "video_480x360_mp4_h264_1000kbps_25fps_aac_stereo_128kbps_44100hz.mp4", 25);
3763     }
3764 
3765     /**
3766      * Test that peek off doesn't render the first frame until turned on for VP9 in tunneled mode.
3767      */
3768     @Test
3769     @ApiTest(apis={"android.media.MediaCodec#PARAMETER_KEY_TUNNEL_PEEK"})
3770     @SdkSuppress(minSdkVersion = Build.VERSION_CODES.S)
3771     public void testTunneledVideoPeekOffVp9() throws Exception {
3772         // Requires vendor support of the TUNNEL_PEEK feature
3773         Assume.assumeTrue("Vendor API level is not Android 12 or later.",
3774                 IS_VENDOR_AT_LEAST_S);
3775         testTunneledVideoPeekOff(MediaFormat.MIMETYPE_VIDEO_VP9,
3776                 "bbb_s1_640x360_webm_vp9_0p21_1600kbps_30fps_vorbis_stereo_128kbps_48000hz.webm",
3777                 30);
3778     }
3779 
3780    /**
3781     * Test that audio timestamps don't progress during audio PTS gaps in tunneled mode.
3782     *
3783     * See: https://source.android.com/docs/devices/tv/multimedia-tunneling#behavior
3784     */
3785    private void testTunneledAudioProgressWithPtsGaps(String mimeType, String fileName)
3786             throws Exception {
3787         if (!MediaUtils.check(isVideoFeatureSupported(mimeType, FEATURE_TunneledPlayback),
3788                     "No tunneled video playback codec found for MIME " + mimeType)) {
3789             return;
3790         }
3791 
3792         AudioManager am = mContext.getSystemService(AudioManager.class);
3793 
3794         mMediaCodecPlayer = new MediaCodecTunneledPlayer(mContext,
3795                 getActivity().getSurfaceHolder(), true, am.generateAudioSessionId());
3796 
3797         final Uri mediaUri = Uri.fromFile(new File(mInpPrefix, fileName));
3798         mMediaCodecPlayer.setAudioDataSource(mediaUri, null);
3799         mMediaCodecPlayer.setVideoDataSource(mediaUri, null);
3800         assertTrue("MediaCodecPlayer.prepare() failed!", mMediaCodecPlayer.prepare());
3801         mMediaCodecPlayer.startCodec();
3802 
3803         mMediaCodecPlayer.play();
3804         sleepUntil(() ->
3805                 mMediaCodecPlayer.getCurrentPosition() > CodecState.UNINITIALIZED_TIMESTAMP
3806                 && mMediaCodecPlayer.getTimestamp() != null
3807                 && mMediaCodecPlayer.getTimestamp().framePosition > 0,
3808                 Duration.ofSeconds(1));
3809         assertNotEquals("onFrameRendered was not called",
3810                 mMediaCodecPlayer.getVideoTimeUs(), CodecState.UNINITIALIZED_TIMESTAMP);
3811         assertNotEquals("Audio timestamp is null", mMediaCodecPlayer.getTimestamp(), null);
3812         assertNotEquals("Audio timestamp has a zero frame position",
3813                 mMediaCodecPlayer.getTimestamp().framePosition, 0);
3814 
3815         // After 100 ms of playback, simulate a PTS gap of 100 ms
3816         Thread.sleep(100);
3817         mMediaCodecPlayer.setAudioTrackOffsetNs(100L * 1000000);
3818 
3819         // Verify that at some point in time in the future, the framePosition stopped advancing.
3820         // This should happen when the PTS gap is encountered - silence is rendered to fill the
3821         // PTS gap, but this silence should not cause framePosition to advance.
3822         {
3823             final long ptsGapTimeoutMs = 3000;
3824             long startTimeMs = System.currentTimeMillis();
3825             AudioTimestamp previousTimestamp;
3826             do {
3827                 assertTrue(String.format("No audio PTS gap after %d milliseconds", ptsGapTimeoutMs),
3828                         System.currentTimeMillis() - startTimeMs < ptsGapTimeoutMs);
3829                 previousTimestamp = mMediaCodecPlayer.getTimestamp();
3830                 Thread.sleep(50);
3831             } while (mMediaCodecPlayer.getTimestamp().framePosition
3832                     != previousTimestamp.framePosition);
3833         }
3834 
3835         // Allow the playback to advance past the PTS gap and back to normal operation
3836         Thread.sleep(500);
3837 
3838         // Sleep till framePosition stabilizes, i.e. playback is complete
3839         {
3840             long endOfPlayackTimeoutMs = 20000;
3841             long startTimeMs = System.currentTimeMillis();
3842             AudioTimestamp previousTimestamp;
3843             do {
3844                 assertTrue(String.format("No end of playback after %d milliseconds",
3845                                 endOfPlayackTimeoutMs),
3846                         System.currentTimeMillis() - startTimeMs < endOfPlayackTimeoutMs);
3847                 previousTimestamp = mMediaCodecPlayer.getTimestamp();
3848                 Thread.sleep(100);
3849             } while (mMediaCodecPlayer.getTimestamp().framePosition
3850                     != previousTimestamp.framePosition);
3851         }
3852 
3853         // Verify if number of frames written and played are same even if PTS gaps were present
3854         // in the playback.
3855         assertEquals("Number of frames written != Number of frames played",
3856                 mMediaCodecPlayer.getAudioFramesWritten(),
3857                 mMediaCodecPlayer.getTimestamp().framePosition);
3858     }
3859 
3860     /**
3861      * Test that audio timestamps don't progress during audio PTS gaps for HEVC in tunneled mode.
3862      */
3863     @Test
3864     @ApiTest(apis={"android.media.MediaCodecInfo.CodecCapabilities#FEATURE_TunneledPlayback"})
3865     @SdkSuppress(minSdkVersion = Build.VERSION_CODES.S)
3866     public void testTunneledAudioProgressWithPtsGapsHevc() throws Exception {
3867         testTunneledAudioProgressWithPtsGaps(MediaFormat.MIMETYPE_VIDEO_HEVC,
3868                 "video_1280x720_mkv_h265_500kbps_25fps_aac_stereo_128kbps_44100hz.mkv");
3869     }
3870 
3871     /**
3872      * Test that audio timestamps don't progress during audio PTS gaps for AVC in tunneled mode.
3873      */
3874     @Test
3875     @ApiTest(apis={"android.media.MediaCodecInfo.CodecCapabilities#FEATURE_TunneledPlayback"})
3876     @SdkSuppress(minSdkVersion = Build.VERSION_CODES.S)
3877     public void testTunneledAudioProgressWithPtsGapsAvc() throws Exception {
3878         testTunneledAudioProgressWithPtsGaps(MediaFormat.MIMETYPE_VIDEO_AVC,
3879                 "video_480x360_mp4_h264_1000kbps_25fps_aac_stereo_128kbps_44100hz.mp4");
3880     }
3881 
3882     /**
3883      * Test that audio timestamps don't progress during audio PTS gaps for VP9 in tunneled mode.
3884      */
3885     @Test
3886     @ApiTest(apis={"android.media.MediaCodecInfo.CodecCapabilities#FEATURE_TunneledPlayback"})
3887     @SdkSuppress(minSdkVersion = Build.VERSION_CODES.S)
3888     public void testTunneledAudioProgressWithPtsGapsVp9() throws Exception {
3889         testTunneledAudioProgressWithPtsGaps(MediaFormat.MIMETYPE_VIDEO_VP9,
3890                 "bbb_s1_640x360_webm_vp9_0p21_1600kbps_30fps_vorbis_stereo_128kbps_48000hz.webm");
3891     }
3892 
3893     /**
3894      * Test that audio timestamps stop progressing during underrun in tunneled mode.
3895      *
3896      * See: https://source.android.com/docs/devices/tv/multimedia-tunneling#behavior
3897      */
3898     private void testTunneledAudioProgressWithUnderrun(String mimeType, String fileName)
3899             throws Exception {
3900         if (!MediaUtils.check(isVideoFeatureSupported(mimeType, FEATURE_TunneledPlayback),
3901                 "No tunneled video playback codec found for MIME " + mimeType)) {
3902             return;
3903         }
3904 
3905         AudioManager am = mContext.getSystemService(AudioManager.class);
3906 
3907         mMediaCodecPlayer = new MediaCodecTunneledPlayer(mContext,
3908                 getActivity().getSurfaceHolder(), true, am.generateAudioSessionId());
3909 
3910         final Uri mediaUri = Uri.fromFile(new File(mInpPrefix, fileName));
3911         mMediaCodecPlayer.setAudioDataSource(mediaUri, null);
3912         mMediaCodecPlayer.setVideoDataSource(mediaUri, null);
3913         assertTrue("MediaCodecPlayer.prepare() failed!", mMediaCodecPlayer.prepare());
3914         mMediaCodecPlayer.startCodec();
3915 
3916         mMediaCodecPlayer.play();
3917         sleepUntil(() ->
3918                 mMediaCodecPlayer.getCurrentPosition() > CodecState.UNINITIALIZED_TIMESTAMP
3919                 && mMediaCodecPlayer.getTimestamp() != null
3920                 && mMediaCodecPlayer.getTimestamp().framePosition > 0,
3921                 Duration.ofSeconds(1));
3922         assertNotEquals("onFrameRendered was not called",
3923                 mMediaCodecPlayer.getVideoTimeUs(), CodecState.UNINITIALIZED_TIMESTAMP);
3924         assertNotEquals("Audio timestamp is null", mMediaCodecPlayer.getTimestamp(), null);
3925         assertNotEquals("Audio timestamp has a zero frame position",
3926                 mMediaCodecPlayer.getTimestamp().framePosition, 0);
3927 
3928         // After 200 ms of playback, stop writing to the AudioTrack to simulate underrun
3929         Thread.sleep(200);
3930         mMediaCodecPlayer.stopDrainingAudioOutputBuffers(true);
3931 
3932         // Sleep till framePosition stabilizes, i.e. AudioTrack is in an underrun condition
3933         {
3934             long endOfPlayackTimeoutMs = 3000;
3935             long startTimeMs = System.currentTimeMillis();
3936             AudioTimestamp previousTimestamp;
3937             do {
3938                 assertTrue(String.format("No underrun after %d milliseconds",
3939                                 endOfPlayackTimeoutMs),
3940                         System.currentTimeMillis() - startTimeMs < endOfPlayackTimeoutMs);
3941                 previousTimestamp = mMediaCodecPlayer.getTimestamp();
3942                 Thread.sleep(100);
3943             } while (mMediaCodecPlayer.getTimestamp().framePosition
3944                     != previousTimestamp.framePosition);
3945         }
3946 
3947         // After 200 ms of starving the AudioTrack, resume writing
3948         Thread.sleep(200);
3949         mMediaCodecPlayer.stopDrainingAudioOutputBuffers(false);
3950 
3951         // Wait until underrun recovers, otherwise false detection of end of playback occurs
3952         {
3953             long underrunRecoveryTimeoutMs = 200;
3954             long startTimeMs = System.currentTimeMillis();
3955             AudioTimestamp previousTimestamp;
3956             do {
3957               assertTrue(String.format("No underrun recovery after %d milliseconds",
3958                               underrunRecoveryTimeoutMs),
3959                       System.currentTimeMillis() - startTimeMs < underrunRecoveryTimeoutMs);
3960               previousTimestamp = mMediaCodecPlayer.getTimestamp();
3961               Thread.sleep(50);
3962             } while (mMediaCodecPlayer.getTimestamp().framePosition
3963                     == previousTimestamp.framePosition);
3964         }
3965 
3966         // Sleep till framePosition stabilizes, i.e. playback is complete
3967         {
3968             long endOfPlayackTimeoutMs = 20000;
3969             long startTimeMs = System.currentTimeMillis();
3970             AudioTimestamp previousTimestamp;
3971             do {
3972                 assertTrue(String.format("No end of playback after %d milliseconds",
3973                                 endOfPlayackTimeoutMs),
3974                         System.currentTimeMillis() - startTimeMs < endOfPlayackTimeoutMs);
3975                 previousTimestamp = mMediaCodecPlayer.getTimestamp();
3976                 Thread.sleep(100);
3977             } while (mMediaCodecPlayer.getTimestamp().framePosition
3978                     != previousTimestamp.framePosition);
3979         }
3980 
3981         // Verify if number of frames written and played are same even if an underrun condition
3982         // occurs.
3983         assertEquals("Number of frames written != Number of frames played",
3984                 mMediaCodecPlayer.getAudioFramesWritten(),
3985                 mMediaCodecPlayer.getTimestamp().framePosition);
3986     }
3987 
3988     /**
3989      * Test that audio timestamps stop progressing during underrun for HEVC in tunneled mode.
3990      */
3991     @Test
3992     @ApiTest(apis={"android.media.MediaCodecInfo.CodecCapabilities#FEATURE_TunneledPlayback"})
3993     @SdkSuppress(minSdkVersion = Build.VERSION_CODES.S)
3994     public void testTunneledAudioProgressWithUnderrunHevc() throws Exception {
3995         testTunneledAudioProgressWithUnderrun(MediaFormat.MIMETYPE_VIDEO_HEVC,
3996                 "video_1280x720_mkv_h265_500kbps_25fps_aac_stereo_128kbps_44100hz.mkv");
3997     }
3998 
3999     /**
4000      * Test that audio timestamps stop progressing during underrun for AVC in tunneled mode.
4001      */
4002     @Test
4003     @ApiTest(apis={"android.media.MediaCodecInfo.CodecCapabilities#FEATURE_TunneledPlayback"})
4004     @SdkSuppress(minSdkVersion = Build.VERSION_CODES.S)
4005     public void testTunneledAudioProgressWithUnderrunAvc() throws Exception {
4006         testTunneledAudioProgressWithUnderrun(MediaFormat.MIMETYPE_VIDEO_AVC,
4007                 "video_480x360_mp4_h264_1000kbps_25fps_aac_stereo_128kbps_44100hz.mp4");
4008     }
4009 
4010     /**
4011      * Test that audio timestamps stop progressing during underrun for VP9 in tunneled mode.
4012      */
4013     @Test
4014     @ApiTest(apis={"android.media.MediaCodecInfo.CodecCapabilities#FEATURE_TunneledPlayback"})
4015     @SdkSuppress(minSdkVersion = Build.VERSION_CODES.S)
4016     public void testTunneledAudioProgressWithUnderrunVp9() throws Exception {
4017         testTunneledAudioProgressWithUnderrun(MediaFormat.MIMETYPE_VIDEO_VP9,
4018                 "bbb_s1_640x360_webm_vp9_0p21_1600kbps_30fps_vorbis_stereo_128kbps_48000hz.webm");
4019     }
4020 
4021     /**
4022      * Test accurate video rendering after a flush in tunneled mode.
4023      *
4024      * Test On some devices, queuing content when the player is paused, then triggering a flush,
4025      * then queuing more content does not behave as expected. The queued content gets lost and the
4026      * flush is really only applied once playback has resumed.
4027      *
4028      * TODO(b/182915887): Test all the codecs advertised by the DUT for the provided test content
4029      */
4030     private void testTunneledAccurateVideoFlush(String mimeType, String videoName)
4031             throws Exception {
4032         if (!MediaUtils.check(isVideoFeatureSupported(mimeType, FEATURE_TunneledPlayback),
4033                     "No tunneled video playback codec found for MIME " + mimeType)) {
4034             return;
4035         }
4036 
4037         // Below are some timings used throughout this test.
4038         //
4039         // Maximum allowed time between start of playback and first frame displayed
4040         final long maxAllowedTimeToFirstFrameMs = 500;
4041         // Maximum allowed time between issuing a pause and the last frame being displayed
4042         final long maxDrainTimeMs = 200;
4043 
4044         // Setup tunnel mode test media player
4045         AudioManager am = mContext.getSystemService(AudioManager.class);
4046         mMediaCodecPlayer = new MediaCodecTunneledPlayer(
4047                 mContext, getActivity().getSurfaceHolder(), true, am.generateAudioSessionId());
4048 
4049         Uri mediaUri = Uri.fromFile(new File(mInpPrefix, videoName));
4050         mMediaCodecPlayer.setAudioDataSource(mediaUri, null);
4051         mMediaCodecPlayer.setVideoDataSource(mediaUri, null);
4052         assertTrue("MediaCodecPlayer.prepare() failed!", mMediaCodecPlayer.prepare());
4053         mMediaCodecPlayer.startCodec();
4054         // Video peek might interfere with the test: we want to ensure that queuing more data during
4055         // a pause does not cause displaying more video frames, which is precisely what video peek
4056         // does.
4057         mMediaCodecPlayer.setVideoPeek(false);
4058 
4059         mMediaCodecPlayer.play();
4060         sleepUntil(() ->
4061                 mMediaCodecPlayer.getCurrentPosition() > CodecState.UNINITIALIZED_TIMESTAMP
4062                 && mMediaCodecPlayer.getTimestamp() != null
4063                 && mMediaCodecPlayer.getTimestamp().framePosition > 0,
4064                 Duration.ofSeconds(1));
4065         assertNotEquals("onFrameRendered was not called",
4066                 mMediaCodecPlayer.getVideoTimeUs(), CodecState.UNINITIALIZED_TIMESTAMP);
4067         assertNotEquals("Audio timestamp is null", mMediaCodecPlayer.getTimestamp(), null);
4068         assertNotEquals("Audio timestamp has a zero frame position",
4069                 mMediaCodecPlayer.getTimestamp().framePosition, 0);
4070 
4071         // Allow some time for playback to commence
4072         Thread.sleep(500);
4073 
4074         // Pause playback
4075         mMediaCodecPlayer.pause();
4076 
4077         // Wait for audio to pause
4078         AudioTimestamp pauseAudioTimestamp;
4079         {
4080             AudioTimestamp currentAudioTimestamp = mMediaCodecPlayer.getTimestamp();
4081             long startTimeMs = System.currentTimeMillis();
4082             do {
4083                 // If it takes longer to pause, the UX won't feel responsive to the user
4084                 int audioPauseTimeoutMs = 250;
4085                 assertTrue(String.format("No audio pause after %d milliseconds",
4086                                 audioPauseTimeoutMs),
4087                         System.currentTimeMillis() - startTimeMs < audioPauseTimeoutMs);
4088                 pauseAudioTimestamp = currentAudioTimestamp;
4089                 Thread.sleep(50);
4090                 currentAudioTimestamp = mMediaCodecPlayer.getTimestamp();
4091             } while (currentAudioTimestamp.framePosition != pauseAudioTimestamp.framePosition);
4092         }
4093         long pauseAudioSystemTimeMs = pauseAudioTimestamp.nanoTime / 1000 / 1000;
4094 
4095         // Wait for video to pause
4096         long pauseVideoSystemTimeNs;
4097         long pauseVideoPositionUs;
4098         {
4099             long currentVideoSystemTimeNs = mMediaCodecPlayer.getCurrentRenderedSystemTimeNano();
4100             long startTimeMs = System.currentTimeMillis();
4101             do {
4102                 int videoUnderrunTimeoutMs = 2000;
4103                 assertTrue(String.format("No video pause after %d milliseconds",
4104                                 videoUnderrunTimeoutMs),
4105                         System.currentTimeMillis() - startTimeMs < videoUnderrunTimeoutMs);
4106                 pauseVideoSystemTimeNs = currentVideoSystemTimeNs;
4107                 Thread.sleep(250); // onFrameRendered can get delayed in the Framework
4108                 currentVideoSystemTimeNs = mMediaCodecPlayer.getCurrentRenderedSystemTimeNano();
4109             } while (currentVideoSystemTimeNs != pauseVideoSystemTimeNs);
4110             pauseVideoPositionUs = mMediaCodecPlayer.getVideoTimeUs();
4111         }
4112         long pauseVideoSystemTimeMs = pauseVideoSystemTimeNs / 1000 / 1000;
4113 
4114         // Video should not continue running for a long period of time after audio pauses
4115         long pauseVideoToleranceMs = 500;
4116         assertTrue(String.format(
4117                         "Video ran %d milliseconds longer than audio (video:%d audio:%d)",
4118                         pauseVideoToleranceMs, pauseVideoSystemTimeMs, pauseAudioSystemTimeMs),
4119                 pauseVideoSystemTimeMs - pauseAudioSystemTimeMs < pauseVideoToleranceMs);
4120 
4121         // Verify that playback stays paused
4122         Thread.sleep(500);
4123         assertEquals(mMediaCodecPlayer.getTimestamp().framePosition,
4124                 pauseAudioTimestamp.framePosition);
4125         assertEquals(mMediaCodecPlayer.getCurrentRenderedSystemTimeNano(), pauseVideoSystemTimeNs);
4126         assertEquals(mMediaCodecPlayer.getVideoTimeUs(), pauseVideoPositionUs);
4127 
4128         // Verify audio and video are roughly in sync when paused
4129         long framePosition = mMediaCodecPlayer.getTimestamp().framePosition;
4130         long playbackRateFps = mMediaCodecPlayer.getAudioTrack().getPlaybackRate();
4131         long pauseAudioPositionMs = pauseAudioTimestamp.framePosition * 1000 / playbackRateFps;
4132         long pauseVideoPositionMs = pauseVideoPositionUs / 1000;
4133         long deltaMs = pauseVideoPositionMs - pauseAudioPositionMs;
4134         assertTrue(String.format(
4135                         "Video is %d milliseconds out of sync from audio (video:%d audio:%d)",
4136                         deltaMs, pauseVideoPositionMs, pauseAudioPositionMs),
4137                 deltaMs > -80 && deltaMs < pauseVideoToleranceMs);
4138 
4139         // Flush both audio and video pipelines
4140         mMediaCodecPlayer.flush();
4141 
4142         // The flush should not cause any frame to be displayed.
4143         // Wait for the max startup latency to see if one (incorrectly) arrives.
4144         Thread.sleep(maxAllowedTimeToFirstFrameMs);
4145         assertEquals("Video frame rendered after flush", mMediaCodecPlayer.getVideoTimeUs(),
4146                 CodecState.UNINITIALIZED_TIMESTAMP);
4147 
4148         // Ensure video peek is disabled before queuing the next frame, otherwise it will
4149         // automatically be rendered when queued.
4150         mMediaCodecPlayer.setVideoPeek(false);
4151 
4152         // We rewind to the beginning of the stream (to a key frame) and queue one frame, but
4153         // pretend like we're seeking 1 second forward in the stream.
4154         long presentationTimeOffsetUs = pauseVideoPositionUs + 1000 * 1000;
4155         mMediaCodecPlayer.seekToBeginning(presentationTimeOffsetUs);
4156         Long queuedVideoTimestamp = mMediaCodecPlayer.queueOneVideoFrame();
4157         assertNotNull("Failed to queue a video frame", queuedVideoTimestamp);
4158 
4159         // The enqueued frame should not be rendered while we're paused.
4160         // Wait for the max startup latency to see if it (incorrectly) arrives.
4161         Thread.sleep(maxAllowedTimeToFirstFrameMs);
4162         assertEquals("Video frame rendered during pause", mMediaCodecPlayer.getVideoTimeUs(),
4163                 CodecState.UNINITIALIZED_TIMESTAMP);
4164 
4165         // Resume playback
4166         mMediaCodecPlayer.resume();
4167         Thread.sleep(maxAllowedTimeToFirstFrameMs);
4168         // Verify that the first rendered frame was the first queued frame
4169         ImmutableList<Long> renderedVideoTimestamps =
4170                 mMediaCodecPlayer.getRenderedVideoFrameTimestampList();
4171         assertFalse(String.format("No frame rendered after resume within %d ms",
4172                         maxAllowedTimeToFirstFrameMs), renderedVideoTimestamps.isEmpty());
4173         assertEquals("First rendered video frame does not match first queued video frame",
4174                 renderedVideoTimestamps.get(0), queuedVideoTimestamp);
4175         // mMediaCodecPlayer.reset() handled in TearDown();
4176     }
4177 
4178     /**
4179      * Test accurate video rendering after a video MediaCodec flush with HEVC if supported
4180      */
4181     @Test
4182     @ApiTest(apis={"android.media.MediaCodecInfo.CodecCapabilities#FEATURE_TunneledPlayback"})
4183     @SdkSuppress(minSdkVersion = Build.VERSION_CODES.S)
4184     public void testTunneledAccurateVideoFlushHevc() throws Exception {
4185         // Requires vendor changes to support this.
4186         Assume.assumeTrue("Vendor API level is not Android 12 or later.",
4187                 IS_VENDOR_AT_LEAST_S);
4188         testTunneledAccurateVideoFlush(MediaFormat.MIMETYPE_VIDEO_HEVC,
4189                 "video_1280x720_mkv_h265_500kbps_25fps_aac_stereo_128kbps_44100hz.mkv");
4190     }
4191 
4192     /**
4193      * Test accurate video rendering after a video MediaCodec flush with AVC if supported
4194      */
4195     @Test
4196     @ApiTest(apis={"android.media.MediaCodecInfo.CodecCapabilities#FEATURE_TunneledPlayback"})
4197     @SdkSuppress(minSdkVersion = Build.VERSION_CODES.S)
4198     public void testTunneledAccurateVideoFlushAvc() throws Exception {
4199         // Requires vendor changes to support this.
4200         Assume.assumeTrue("Vendor API level is not Android 12 or later.",
4201                 IS_VENDOR_AT_LEAST_S);
4202         testTunneledAccurateVideoFlush(MediaFormat.MIMETYPE_VIDEO_AVC,
4203                 "video_480x360_mp4_h264_1000kbps_25fps_aac_stereo_128kbps_44100hz.mp4");
4204     }
4205 
4206     /**
4207      * Test accurate video rendering after a video MediaCodec flush with VP9 if supported
4208      */
4209     @Test
4210     @ApiTest(apis={"android.media.MediaCodecInfo.CodecCapabilities#FEATURE_TunneledPlayback"})
4211     @SdkSuppress(minSdkVersion = Build.VERSION_CODES.S)
4212     public void testTunneledAccurateVideoFlushVp9() throws Exception {
4213         // Requires vendor changes to support this.
4214         Assume.assumeTrue("Vendor API level is not Android 12 or later.",
4215                 IS_VENDOR_AT_LEAST_S);
4216         testTunneledAccurateVideoFlush(MediaFormat.MIMETYPE_VIDEO_VP9,
4217                 "bbb_s1_640x360_webm_vp9_0p21_1600kbps_30fps_vorbis_stereo_128kbps_48000hz.webm");
4218     }
4219 
4220     /**
4221      * Test that audio timestamps stop progressing during pause in tunneled mode.
4222      */
4223     private void testTunneledAudioProgressWithPause(String mimeType, String videoName)
4224             throws Exception {
4225         if (!MediaUtils.check(isVideoFeatureSupported(mimeType, FEATURE_TunneledPlayback),
4226                     "No tunneled video playback codec found for MIME " + mimeType)) {
4227             return;
4228         }
4229 
4230         AudioManager am = mContext.getSystemService(AudioManager.class);
4231         mMediaCodecPlayer = new MediaCodecTunneledPlayer(
4232                 mContext, getActivity().getSurfaceHolder(), true, am.generateAudioSessionId());
4233 
4234         Uri mediaUri = Uri.fromFile(new File(mInpPrefix, videoName));
4235         mMediaCodecPlayer.setAudioDataSource(mediaUri, null);
4236         mMediaCodecPlayer.setVideoDataSource(mediaUri, null);
4237         assertTrue("MediaCodecPlayer.prepare() failed!", mMediaCodecPlayer.prepare());
4238         mMediaCodecPlayer.startCodec();
4239 
4240         mMediaCodecPlayer.play();
4241         sleepUntil(() ->
4242                 mMediaCodecPlayer.getCurrentPosition() > CodecState.UNINITIALIZED_TIMESTAMP
4243                 && mMediaCodecPlayer.getTimestamp() != null
4244                 && mMediaCodecPlayer.getTimestamp().framePosition > 0,
4245                 Duration.ofSeconds(1));
4246         long firstVideoPosition = mMediaCodecPlayer.getVideoTimeUs();
4247         assertNotEquals("onFrameRendered was not called",
4248                 firstVideoPosition, CodecState.UNINITIALIZED_TIMESTAMP);
4249         AudioTimestamp firstAudioTimestamp = mMediaCodecPlayer.getTimestamp();
4250         assertNotEquals("Audio timestamp is null", firstAudioTimestamp, null);
4251         assertNotEquals("Audio timestamp has a zero frame position",
4252                 firstAudioTimestamp.framePosition, 0);
4253 
4254         // Expected stabilization wait is 60ms. We triple to 180ms to prevent flakiness
4255         // and still test basic functionality.
4256         final int sleepTimeMs = 180;
4257         Thread.sleep(sleepTimeMs);
4258         mMediaCodecPlayer.pause();
4259         // pause might take some time to ramp volume down.
4260         Thread.sleep(sleepTimeMs);
4261         AudioTimestamp audioTimestampAfterPause = mMediaCodecPlayer.getTimestamp();
4262         // Verify the video has advanced beyond the first position.
4263         assertTrue(mMediaCodecPlayer.getVideoTimeUs() > firstVideoPosition);
4264         // Verify that the timestamp has advanced beyond the first timestamp.
4265         assertTrue(audioTimestampAfterPause.nanoTime > firstAudioTimestamp.nanoTime);
4266 
4267         Thread.sleep(sleepTimeMs);
4268         // Verify that the timestamp does not advance after pause.
4269         assertEquals(audioTimestampAfterPause.nanoTime, mMediaCodecPlayer.getTimestamp().nanoTime);
4270     }
4271 
4272 
4273     /**
4274      * Test that audio timestamps stop progressing during pause for HEVC in tunneled mode.
4275      */
4276     @Test
4277     @ApiTest(apis={"android.media.MediaCodecInfo.CodecCapabilities#FEATURE_TunneledPlayback"})
4278     @SdkSuppress(minSdkVersion = Build.VERSION_CODES.S)
4279     public void testTunneledAudioProgressWithPauseHevc() throws Exception {
4280         testTunneledAudioProgressWithPause(MediaFormat.MIMETYPE_VIDEO_HEVC,
4281                 "video_1280x720_mkv_h265_500kbps_25fps_aac_stereo_128kbps_44100hz.mkv");
4282     }
4283 
4284     /**
4285      * Test that audio timestamps stop progressing during pause for AVC in tunneled mode.
4286      */
4287     @Test
4288     @ApiTest(apis={"android.media.MediaCodecInfo.CodecCapabilities#FEATURE_TunneledPlayback"})
4289     @SdkSuppress(minSdkVersion = Build.VERSION_CODES.S)
4290     public void testTunneledAudioProgressWithPauseAvc() throws Exception {
4291         testTunneledAudioProgressWithPause(MediaFormat.MIMETYPE_VIDEO_AVC,
4292                 "video_480x360_mp4_h264_1000kbps_25fps_aac_stereo_128kbps_44100hz.mp4");
4293     }
4294 
4295     /**
4296      * Test that audio timestamps stop progressing during pause for VP9 in tunneled mode.
4297      */
4298     @Test
4299     @ApiTest(apis={"android.media.MediaCodecInfo.CodecCapabilities#FEATURE_TunneledPlayback"})
4300     @SdkSuppress(minSdkVersion = Build.VERSION_CODES.S)
4301     public void testTunneledAudioProgressWithPauseVp9() throws Exception {
4302         testTunneledAudioProgressWithPause(MediaFormat.MIMETYPE_VIDEO_VP9,
4303                 "bbb_s1_640x360_webm_vp9_0p21_1600kbps_30fps_vorbis_stereo_128kbps_48000hz.webm");
4304     }
4305 
4306     /**
4307      * Test that audio underrun pauses video and resumes in-sync in tunneled mode.
4308      *
4309      * TODO(b/182915887): Test all the codecs advertised by the DUT for the provided test content
4310      */
4311     private void tunneledAudioUnderrun(String mimeType, String videoName)
4312             throws Exception {
4313         if (!MediaUtils.check(isVideoFeatureSupported(mimeType, FEATURE_TunneledPlayback),
4314                 "No tunneled video playback codec found for MIME " + mimeType)) {
4315             return;
4316         }
4317 
4318         AudioManager am = mContext.getSystemService(AudioManager.class);
4319         mMediaCodecPlayer = new MediaCodecTunneledPlayer(
4320                 mContext, getActivity().getSurfaceHolder(), true, am.generateAudioSessionId());
4321 
4322         Uri mediaUri = Uri.fromFile(new File(mInpPrefix, videoName));
4323         mMediaCodecPlayer.setAudioDataSource(mediaUri, null);
4324         mMediaCodecPlayer.setVideoDataSource(mediaUri, null);
4325         assertTrue("MediaCodecPlayer.prepare() failed!", mMediaCodecPlayer.prepare());
4326         mMediaCodecPlayer.startCodec();
4327 
4328         mMediaCodecPlayer.play();
4329         sleepUntil(() ->
4330                 mMediaCodecPlayer.getCurrentPosition() > CodecState.UNINITIALIZED_TIMESTAMP
4331                 && mMediaCodecPlayer.getTimestamp() != null
4332                 && mMediaCodecPlayer.getTimestamp().framePosition > 0,
4333                 Duration.ofSeconds(1));
4334         assertNotEquals("onFrameRendered was not called",
4335                 mMediaCodecPlayer.getVideoTimeUs(), CodecState.UNINITIALIZED_TIMESTAMP);
4336         assertNotEquals("Audio timestamp is null", mMediaCodecPlayer.getTimestamp(), null);
4337         assertNotEquals("Audio timestamp has a zero frame position",
4338                 mMediaCodecPlayer.getTimestamp().framePosition, 0);
4339 
4340         // Simulate underrun by starving the audio track of data
4341         mMediaCodecPlayer.stopDrainingAudioOutputBuffers(true);
4342 
4343         // Wait for audio underrun
4344         AudioTimestamp underrunAudioTimestamp;
4345         {
4346             AudioTimestamp currentAudioTimestamp = mMediaCodecPlayer.getTimestamp();
4347             long startTimeMs = System.currentTimeMillis();
4348             do {
4349                 int audioUnderrunTimeoutMs = 1000;
4350                 assertTrue(String.format("No audio underrun after %d milliseconds",
4351                                 System.currentTimeMillis() - startTimeMs),
4352                         System.currentTimeMillis() - startTimeMs < audioUnderrunTimeoutMs);
4353                 underrunAudioTimestamp = currentAudioTimestamp;
4354                 Thread.sleep(50);
4355                 currentAudioTimestamp = mMediaCodecPlayer.getTimestamp();
4356             } while (currentAudioTimestamp.framePosition != underrunAudioTimestamp.framePosition);
4357         }
4358 
4359         // Wait until video playback pauses due to underrunning audio
4360         long pausedVideoTimeUs = -1;
4361         {
4362             long currentVideoTimeUs = mMediaCodecPlayer.getVideoTimeUs();
4363             long startTimeMs = System.currentTimeMillis();
4364             do {
4365                 int videoPauseTimeoutMs = 2000;
4366                 assertTrue(String.format("No video pause after %d milliseconds",
4367                                 videoPauseTimeoutMs),
4368                         System.currentTimeMillis() - startTimeMs < videoPauseTimeoutMs);
4369                 pausedVideoTimeUs = currentVideoTimeUs;
4370                 Thread.sleep(250); // onFrameRendered messages can get delayed in the Framework
4371                 currentVideoTimeUs = mMediaCodecPlayer.getVideoTimeUs();
4372             } while (currentVideoTimeUs != pausedVideoTimeUs);
4373         }
4374 
4375         // Retrieve index for the video rendered frame at the time of video pausing
4376         int pausedVideoRenderedTimestampIndex =
4377                 mMediaCodecPlayer.getRenderedVideoFrameTimestampList().size() - 1;
4378 
4379         // Resume audio playback with a negative offset, in order to simulate a desynchronisation.
4380         // TODO(b/202710709): Use timestamp relative to last played video frame before pause
4381         mMediaCodecPlayer.setAudioTrackOffsetNs(-100L * 1000000);
4382         mMediaCodecPlayer.stopDrainingAudioOutputBuffers(false);
4383 
4384         // Wait until audio playback resumes
4385         AudioTimestamp postResumeAudioTimestamp;
4386         {
4387             AudioTimestamp previousAudioTimestamp;
4388             long startTimeMs = System.currentTimeMillis();
4389             do {
4390                 int audioResumeTimeoutMs = 1000;
4391                 assertTrue(String.format("Audio has not resumed after %d milliseconds",
4392                                 audioResumeTimeoutMs),
4393                         System.currentTimeMillis() - startTimeMs < audioResumeTimeoutMs);
4394                 previousAudioTimestamp = mMediaCodecPlayer.getTimestamp();
4395                 Thread.sleep(50);
4396                 postResumeAudioTimestamp = mMediaCodecPlayer.getTimestamp();
4397             } while (postResumeAudioTimestamp.framePosition
4398                     == previousAudioTimestamp.framePosition);
4399         }
4400 
4401         // Now that audio playback has resumed, wait until video playback resumes
4402         {
4403             // We actually don't care about trying to capture the exact time video resumed, because
4404             // we can just look at the historical list of rendered video timestamps
4405             long postResumeVideoTimeUs;
4406             long previousVideoTimeUs;
4407             long startTimeMs = System.currentTimeMillis();
4408             do {
4409                 int videoResumeTimeoutMs = 2000;
4410                 assertTrue(String.format("Video has not resumed after %d milliseconds",
4411                                 videoResumeTimeoutMs),
4412                         System.currentTimeMillis() - startTimeMs < videoResumeTimeoutMs);
4413                 previousVideoTimeUs = mMediaCodecPlayer.getVideoTimeUs();
4414                 Thread.sleep(50);
4415                 postResumeVideoTimeUs = mMediaCodecPlayer.getVideoTimeUs();
4416             } while (postResumeVideoTimeUs == previousVideoTimeUs);
4417         }
4418 
4419         // The system time when rendering the first audio frame after the resume
4420         long playbackRateFps = mMediaCodecPlayer.getAudioTrack().getPlaybackRate();
4421         long playedFrames = postResumeAudioTimestamp.framePosition
4422                 - underrunAudioTimestamp.framePosition + 1;
4423         double elapsedTimeNs = playedFrames * (1000.0 * 1000.0 * 1000.0 / playbackRateFps);
4424         long resumeAudioSystemTimeNs = postResumeAudioTimestamp.nanoTime - (long) elapsedTimeNs;
4425         long resumeAudioSystemTimeMs = resumeAudioSystemTimeNs / 1000 / 1000;
4426 
4427         // The system time when rendering the first video frame after video playback resumes
4428         long resumeVideoSystemTimeMs = mMediaCodecPlayer.getRenderedVideoFrameSystemTimeList()
4429                 .get(pausedVideoRenderedTimestampIndex + 1) / 1000 / 1000;
4430 
4431         // Verify that video resumes in a reasonable amount of time after audio resumes
4432         // Note: Because a -100ms PTS gap is introduced, the video should resume 100ms later
4433         resumeAudioSystemTimeMs += 100;
4434         long resumeDeltaMs = resumeVideoSystemTimeMs - resumeAudioSystemTimeMs;
4435         assertTrue(String.format("Video started %s milliseconds before audio resumed "
4436                         + "(video:%d audio:%d)", resumeDeltaMs * -1, resumeVideoSystemTimeMs,
4437                         resumeAudioSystemTimeMs),
4438                 resumeDeltaMs > 0); // video is expected to start after audio resumes
4439         assertTrue(String.format(
4440                         "Video started %d milliseconds after audio resumed (video:%d audio:%d)",
4441                         resumeDeltaMs, resumeVideoSystemTimeMs, resumeAudioSystemTimeMs),
4442                 resumeDeltaMs <= 600); // video starting 300ms after audio is barely noticeable
4443 
4444         // Determine the system time of the audio frame that matches the presentation timestamp of
4445         // the resumed video frame
4446         long resumeVideoPresentationTimeUs = mMediaCodecPlayer.getRenderedVideoFrameTimestampList()
4447                 .get(pausedVideoRenderedTimestampIndex + 1);
4448         long matchingAudioFramePosition =
4449                 resumeVideoPresentationTimeUs * playbackRateFps / 1000 / 1000;
4450         playedFrames = matchingAudioFramePosition - postResumeAudioTimestamp.framePosition;
4451         elapsedTimeNs = playedFrames * (1000.0 * 1000.0 * 1000.0 / playbackRateFps);
4452         long matchingAudioSystemTimeNs = postResumeAudioTimestamp.nanoTime + (long) elapsedTimeNs;
4453         long matchingAudioSystemTimeMs = matchingAudioSystemTimeNs / 1000 / 1000;
4454 
4455         // Verify that video and audio are in sync at the time when video resumes
4456         // Note: Because a -100ms PTS gap is introduced, the video should resume 100ms later
4457         matchingAudioSystemTimeMs += 100;
4458         long avSyncOffsetMs =  resumeVideoSystemTimeMs - matchingAudioSystemTimeMs;
4459         assertTrue(String.format("Video is %d milliseconds out of sync of audio after resuming "
4460                         + "(video:%d, audio:%d)", avSyncOffsetMs, resumeVideoSystemTimeMs,
4461                         matchingAudioSystemTimeMs),
4462                 // some leniency in AV sync is required because Android TV STB/OTT OEMs often have
4463                 // to tune for imperfect downstream TVs (that have processing delays on the video)
4464                 // by knowingly producing HDMI output that has audio and video mildly out of sync
4465                 Math.abs(avSyncOffsetMs) <= 80);
4466     }
4467 
4468     /**
4469      * Test that audio underrun pauses video and resumes in-sync for HEVC in tunneled mode.
4470      */
4471     @Test
4472     @ApiTest(apis={"android.media.MediaCodecInfo.CodecCapabilities#FEATURE_TunneledPlayback"})
4473     @SdkSuppress(minSdkVersion = Build.VERSION_CODES.S)
4474     public void testTunneledAudioUnderrunHevc() throws Exception {
4475         tunneledAudioUnderrun(MediaFormat.MIMETYPE_VIDEO_HEVC,
4476                 "video_1280x720_mkv_h265_500kbps_25fps_aac_stereo_128kbps_44100hz.mkv");
4477     }
4478 
4479     /**
4480      * Test that audio underrun pauses video and resumes in-sync for AVC in tunneled mode.
4481      */
4482     @Test
4483     @ApiTest(apis={"android.media.MediaCodecInfo.CodecCapabilities#FEATURE_TunneledPlayback"})
4484     @SdkSuppress(minSdkVersion = Build.VERSION_CODES.S)
4485     public void testTunneledAudioUnderrunAvc() throws Exception {
4486         tunneledAudioUnderrun(MediaFormat.MIMETYPE_VIDEO_AVC,
4487                 "video_480x360_mp4_h264_1000kbps_25fps_aac_stereo_128kbps_44100hz.mp4");
4488     }
4489 
4490     /**
4491      * Test that audio underrun pauses video and resumes in-sync for VP9 in tunneled mode.
4492      */
4493     @Test
4494     @ApiTest(apis={"android.media.MediaCodecInfo.CodecCapabilities#FEATURE_TunneledPlayback"})
4495     @SdkSuppress(minSdkVersion = Build.VERSION_CODES.S)
4496     public void testTunneledAudioUnderrunVp9() throws Exception {
4497         tunneledAudioUnderrun(MediaFormat.MIMETYPE_VIDEO_VP9,
4498                 "bbb_s1_640x360_webm_vp9_0p21_1600kbps_30fps_vorbis_stereo_128kbps_48000hz.webm");
4499     }
4500 
4501     private void sleepUntil(Supplier<Boolean> supplier, Duration maxWait) throws Exception {
4502         final long deadLineMs = System.currentTimeMillis() + maxWait.toMillis();
4503         do {
4504             Thread.sleep(50);
4505         } while (!supplier.get() && System.currentTimeMillis() < deadLineMs);
4506     }
4507 
4508     /**
4509      * Returns list of CodecCapabilities advertising support for the given MIME type.
4510      */
4511     private static List<CodecCapabilities> getCodecCapabilitiesForMimeType(String mimeType) {
4512         int numCodecs = MediaCodecList.getCodecCount();
4513         List<CodecCapabilities> caps = new ArrayList<CodecCapabilities>();
4514         for (int i = 0; i < numCodecs; i++) {
4515             MediaCodecInfo codecInfo = MediaCodecList.getCodecInfoAt(i);
4516             if (codecInfo.isAlias()) {
4517                 continue;
4518             }
4519             if (codecInfo.isEncoder()) {
4520                 continue;
4521             }
4522 
4523             String[] types = codecInfo.getSupportedTypes();
4524             for (int j = 0; j < types.length; j++) {
4525                 if (types[j].equalsIgnoreCase(mimeType)) {
4526                     caps.add(codecInfo.getCapabilitiesForType(mimeType));
4527                 }
4528             }
4529         }
4530         return caps;
4531     }
4532 
4533     /**
4534      * Returns true if there exists a codec supporting the given MIME type that meets the
4535      * minimum specification for VR high performance requirements.
4536      *
4537      * The requirements are as follows:
4538      *   - At least 243000 blocks per second (where blocks are defined as 16x16 -- note this
4539      *   is equivalent to 1920x1080@30fps)
4540      *   - Feature adaptive-playback present
4541      */
4542     private static boolean doesMimeTypeHaveMinimumSpecVrReadyCodec(String mimeType) {
4543         List<CodecCapabilities> caps = getCodecCapabilitiesForMimeType(mimeType);
4544         for (CodecCapabilities c : caps) {
4545             if (!c.isFeatureSupported(CodecCapabilities.FEATURE_AdaptivePlayback)) {
4546                 continue;
4547             }
4548 
4549             if (!c.getVideoCapabilities().areSizeAndRateSupported(1920, 1080, 30.0)) {
4550                 continue;
4551             }
4552 
4553             return true;
4554         }
4555 
4556         return false;
4557     }
4558 
4559     /**
4560      * Returns true if there exists a codec supporting the given MIME type that meets VR high
4561      * performance requirements.
4562      *
4563      * The requirements are as follows:
4564      *   - At least 972000 blocks per second (where blocks are defined as 16x16 -- note this
4565      *   is equivalent to 3840x2160@30fps)
4566      *   - At least 4 concurrent instances
4567      *   - Feature adaptive-playback present
4568      */
4569     private static boolean doesMimeTypeHaveVrReadyCodec(String mimeType) {
4570         List<CodecCapabilities> caps = getCodecCapabilitiesForMimeType(mimeType);
4571         for (CodecCapabilities c : caps) {
4572             if (c.getMaxSupportedInstances() < 4) {
4573                 continue;
4574             }
4575 
4576             if (!c.isFeatureSupported(CodecCapabilities.FEATURE_AdaptivePlayback)) {
4577                 continue;
4578             }
4579 
4580             if (!c.getVideoCapabilities().areSizeAndRateSupported(3840, 2160, 30.0)) {
4581                 continue;
4582             }
4583 
4584             return true;
4585         }
4586 
4587         return false;
4588     }
4589 
4590     @CddTest(requirements = {"7.9.2/C-1-11"})
4591     @Test
4592     public void testVrHighPerformanceH264() throws Exception {
4593         if (!supportsVrHighPerformance()) {
4594             MediaUtils.skipTest(TAG, "FEATURE_VR_MODE_HIGH_PERFORMANCE not present");
4595             return;
4596         }
4597 
4598         boolean h264IsReady = doesMimeTypeHaveVrReadyCodec(MediaFormat.MIMETYPE_VIDEO_AVC);
4599         assertTrue("Did not find a VR ready H.264 decoder", h264IsReady);
4600     }
4601 
4602     @CddTest(requirements = {"7.9.2/C-1-12"})
4603     @Test
4604     public void testVrHighPerformanceHEVC() throws Exception {
4605         if (!supportsVrHighPerformance()) {
4606             MediaUtils.skipTest(TAG, "FEATURE_VR_MODE_HIGH_PERFORMANCE not present");
4607             return;
4608         }
4609 
4610         // Test minimum mandatory requirements.
4611         assertTrue(doesMimeTypeHaveMinimumSpecVrReadyCodec(MediaFormat.MIMETYPE_VIDEO_HEVC));
4612 
4613         boolean hevcIsReady = doesMimeTypeHaveVrReadyCodec(MediaFormat.MIMETYPE_VIDEO_HEVC);
4614         if (!hevcIsReady) {
4615             Log.d(TAG, "HEVC isn't required to be VR ready");
4616             return;
4617         }
4618     }
4619 
4620     @CddTest(requirements = {"7.9.2/C-1-12"})
4621     @Test
4622     public void testVrHighPerformanceVP9() throws Exception {
4623         if (!supportsVrHighPerformance()) {
4624             MediaUtils.skipTest(TAG, "FEATURE_VR_MODE_HIGH_PERFORMANCE not present");
4625             return;
4626         }
4627 
4628         // Test minimum mandatory requirements.
4629         assertTrue(doesMimeTypeHaveMinimumSpecVrReadyCodec(MediaFormat.MIMETYPE_VIDEO_VP9));
4630 
4631         boolean vp9IsReady = doesMimeTypeHaveVrReadyCodec(MediaFormat.MIMETYPE_VIDEO_VP9);
4632         if (!vp9IsReady) {
4633             Log.d(TAG, "VP9 isn't required to be VR ready");
4634             return;
4635         }
4636     }
4637 
4638     private boolean supportsVrHighPerformance() {
4639         PackageManager pm = mContext.getPackageManager();
4640         return pm.hasSystemFeature(PackageManager.FEATURE_VR_MODE_HIGH_PERFORMANCE);
4641     }
4642 }
4643