1 /* 2 * Copyright (C) 2022 The Android Open Source Project 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 17 package android.media.decoder.cts; 18 19 import static org.junit.Assert.assertEquals; 20 import static org.junit.Assert.assertFalse; 21 import static org.junit.Assert.assertTrue; 22 23 import android.app.ActivityManager; 24 import android.media.AudioAttributes; 25 import android.media.AudioFormat; 26 import android.media.AudioManager; 27 import android.media.AudioTrack; 28 import android.media.MediaCodec; 29 import android.media.MediaCodecInfo; 30 import android.media.MediaCodecList; 31 import android.media.MediaExtractor; 32 import android.media.MediaFormat; 33 import android.media.cts.MediaHeavyPresubmitTest; 34 import android.media.cts.MediaTestBase; 35 import android.os.Build; 36 import android.os.Bundle; 37 import android.os.Handler; 38 import android.os.HandlerThread; 39 import android.os.Looper; 40 import android.os.SystemProperties; 41 import android.platform.test.annotations.AppModeFull; 42 import android.view.Surface; 43 44 import androidx.test.ext.junit.runners.AndroidJUnit4; 45 import androidx.test.filters.SdkSuppress; 46 47 import com.android.compatibility.common.util.ApiTest; 48 import com.android.compatibility.common.util.Preconditions; 49 50 import org.junit.After; 51 import org.junit.Assert; 52 import org.junit.Assume; 53 import org.junit.Before; 54 import org.junit.Test; 55 import org.junit.runner.RunWith; 56 57 import java.io.IOException; 58 import java.nio.ByteBuffer; 59 import java.time.Duration; 60 import java.util.ArrayList; 61 import java.util.Collections; 62 import java.util.List; 63 import java.util.concurrent.atomic.AtomicBoolean; 64 import java.util.concurrent.atomic.AtomicLong; 65 import java.util.function.Supplier; 66 67 @MediaHeavyPresubmitTest 68 @AppModeFull(reason = "There should be no instant apps specific behavior related to decoders") 69 @SdkSuppress(minSdkVersion = Build.VERSION_CODES.UPSIDE_DOWN_CAKE, codeName = "UpsideDownCake") 70 @RunWith(AndroidJUnit4.class) 71 @ApiTest(apis = {"android.media.MediaCodec#BUFFER_FLAG_DECODE_ONLY"}) 72 public class DecodeOnlyTest extends MediaTestBase { 73 public static final boolean WAS_LAUNCHED_ON_U_OR_LATER = 74 SystemProperties.getInt("ro.product.first_api_level", 75 Build.VERSION_CODES.CUR_DEVELOPMENT) 76 >= Build.VERSION_CODES.UPSIDE_DOWN_CAKE; 77 78 private static final String MEDIA_DIR_STRING = WorkDir.getMediaDirString(); 79 private static final String HEVC_VIDEO = 80 "video_1280x720_mkv_h265_500kbps_25fps_aac_stereo_128kbps_44100hz.mkv"; 81 private static final String AVC_VIDEO = 82 "video_480x360_mp4_h264_1000kbps_25fps_aac_stereo_128kbps_44100hz.mp4"; 83 private static final String VP9_VIDEO = 84 "bbb_s1_640x360_webm_vp9_0p21_1600kbps_30fps_vorbis_stereo_128kbps_48000hz.webm"; 85 private static final String MIME_VIDEO_PREFIX = "video/"; 86 private static final String MIME_AUDIO_PREFIX = "audio/"; 87 private static final long EOS_TIMESTAMP_TUNNEL_MODE = Long.MAX_VALUE; 88 89 static { 90 System.loadLibrary("ctsmediadecodertest_jni"); 91 } 92 93 @Before 94 @Override setUp()95 public void setUp() throws Throwable { 96 super.setUp(); 97 } 98 99 @After 100 @Override tearDown()101 public void tearDown() { 102 super.tearDown(); 103 } 104 105 /** 106 * When testing perfect seek, assert that the first frame rendered after seeking is the exact 107 * frame we seeked to 108 */ 109 @Test 110 @ApiTest(apis = {"android.media.MediaCodec#BUFFER_FLAG_DECODE_ONLY"}) testTunneledPerfectSeekInitialPeekOnAvc()111 public void testTunneledPerfectSeekInitialPeekOnAvc() throws Exception { 112 // Tunnel mode requires vendor support of the DECODE_ONLY feature 113 Assume.assumeTrue("First API level is not Android 14 or later.", 114 WAS_LAUNCHED_ON_U_OR_LATER); 115 testTunneledPerfectSeek(AVC_VIDEO, true); 116 } 117 118 @Test 119 @ApiTest(apis = {"android.media.MediaCodec#BUFFER_FLAG_DECODE_ONLY"}) testTunneledPerfectSeekInitialPeekOnVp9()120 public void testTunneledPerfectSeekInitialPeekOnVp9() throws Exception { 121 // Tunnel mode requires vendor support of the DECODE_ONLY feature 122 Assume.assumeTrue("First API level is not Android 14 or later.", 123 WAS_LAUNCHED_ON_U_OR_LATER); 124 testTunneledPerfectSeek(VP9_VIDEO, true); 125 } 126 127 @Test 128 @ApiTest(apis = {"android.media.MediaCodec#BUFFER_FLAG_DECODE_ONLY"}) testTunneledPerfectSeekInitialPeekOnHevc()129 public void testTunneledPerfectSeekInitialPeekOnHevc() throws Exception { 130 // Tunnel mode requires vendor support of the DECODE_ONLY feature 131 Assume.assumeTrue("First API level is not Android 14 or later.", 132 WAS_LAUNCHED_ON_U_OR_LATER); 133 testTunneledPerfectSeek(HEVC_VIDEO, true); 134 } 135 136 @Test 137 @ApiTest(apis = {"android.media.MediaCodec#BUFFER_FLAG_DECODE_ONLY"}) testTunneledPerfectSeekInitialPeekOffAvc()138 public void testTunneledPerfectSeekInitialPeekOffAvc() throws Exception { 139 // Tunnel mode requires vendor support of the DECODE_ONLY feature 140 Assume.assumeTrue("First API level is not Android 14 or later.", 141 WAS_LAUNCHED_ON_U_OR_LATER); 142 testTunneledPerfectSeek(AVC_VIDEO, false); 143 } 144 145 @Test 146 @ApiTest(apis = {"android.media.MediaCodec#BUFFER_FLAG_DECODE_ONLY"}) testTunneledPerfectSeekInitialPeekOffVp9()147 public void testTunneledPerfectSeekInitialPeekOffVp9() throws Exception { 148 // Tunnel mode requires vendor support of the DECODE_ONLY feature 149 Assume.assumeTrue("First API level is not Android 14 or later.", 150 WAS_LAUNCHED_ON_U_OR_LATER); 151 testTunneledPerfectSeek(VP9_VIDEO, false); 152 } 153 154 @Test 155 @ApiTest(apis = {"android.media.MediaCodec#BUFFER_FLAG_DECODE_ONLY"}) testTunneledPerfectSeekInitialPeekOffHevc()156 public void testTunneledPerfectSeekInitialPeekOffHevc() throws Exception { 157 // Tunnel mode requires vendor support of the DECODE_ONLY feature 158 Assume.assumeTrue("First API level is not Android 14 or later.", 159 WAS_LAUNCHED_ON_U_OR_LATER); 160 testTunneledPerfectSeek(HEVC_VIDEO, false); 161 } 162 163 /** 164 * In trick play, we expect to receive/render the non DECODE_ONLY frames only 165 */ 166 @Test 167 @ApiTest(apis = {"android.media.MediaCodec#BUFFER_FLAG_DECODE_ONLY"}) testTunneledTrickPlayHevc()168 public void testTunneledTrickPlayHevc() throws Exception { 169 // Tunnel mode requires vendor support of the DECODE_ONLY feature 170 Assume.assumeTrue("First API level is not Android 14 or later.", 171 WAS_LAUNCHED_ON_U_OR_LATER); 172 testTunneledTrickPlay(HEVC_VIDEO); 173 } 174 175 @Test 176 @ApiTest(apis = {"android.media.MediaCodec#BUFFER_FLAG_DECODE_ONLY"}) testTunneledTrickPlayAvc()177 public void testTunneledTrickPlayAvc() throws Exception { 178 // Tunnel mode requires vendor support of the DECODE_ONLY feature 179 Assume.assumeTrue("First API level is not Android 14 or later.", 180 WAS_LAUNCHED_ON_U_OR_LATER); 181 testTunneledTrickPlay(AVC_VIDEO); 182 } 183 184 @Test 185 @ApiTest(apis = {"android.media.MediaCodec#BUFFER_FLAG_DECODE_ONLY"}) testTunneledTrickPlayVp9()186 public void testTunneledTrickPlayVp9() throws Exception { 187 // Tunnel mode requires vendor support of the DECODE_ONLY feature 188 Assume.assumeTrue("First API level is not Android 14 or later.", 189 WAS_LAUNCHED_ON_U_OR_LATER); 190 testTunneledTrickPlay(VP9_VIDEO); 191 } 192 nativeTestNonTunneledTrickPlay(String fileName, Surface surface, boolean isAsync)193 private static native boolean nativeTestNonTunneledTrickPlay(String fileName, Surface surface, 194 boolean isAsync); 195 196 @Test 197 @ApiTest(apis = {"android.media.MediaCodec#BUFFER_FLAG_DECODE_ONLY"}) nativeTestNonTunneledTrickPlayHevc()198 public void nativeTestNonTunneledTrickPlayHevc() { 199 boolean[] boolStates = {true, false}; 200 for (boolean isAsync : boolStates) { 201 assertTrue(nativeTestNonTunneledTrickPlay(MEDIA_DIR_STRING + HEVC_VIDEO, 202 getActivity().getSurfaceHolder().getSurface(), isAsync)); 203 } 204 } 205 206 @Test 207 @ApiTest(apis = {"android.media.MediaCodec#BUFFER_FLAG_DECODE_ONLY"}) testNonTunneledTrickPlayHevc()208 public void testNonTunneledTrickPlayHevc() throws Exception { 209 testNonTunneledTrickPlay(HEVC_VIDEO); 210 } 211 212 @Test 213 @ApiTest(apis = {"android.media.MediaCodec#BUFFER_FLAG_DECODE_ONLY"}) testNonTunneledTrickPlayAvc()214 public void testNonTunneledTrickPlayAvc() throws Exception { 215 testNonTunneledTrickPlay(AVC_VIDEO); 216 } 217 218 @Test 219 @ApiTest(apis = {"android.media.MediaCodec#BUFFER_FLAG_DECODE_ONLY"}) testNonTunneledTrickPlayVp9()220 public void testNonTunneledTrickPlayVp9() throws Exception { 221 testNonTunneledTrickPlay(VP9_VIDEO); 222 } 223 testNonTunneledTrickPlay(String fileName)224 private void testNonTunneledTrickPlay(String fileName) throws Exception { 225 Preconditions.assertTestFileExists(MEDIA_DIR_STRING + fileName); 226 // create the video extractor 227 MediaExtractor videoExtractor = createMediaExtractor(fileName); 228 229 // choose the first track that has the prefix "video/" and select it 230 int videoTrackIndex = getFirstTrackWithMimePrefix(MIME_VIDEO_PREFIX, videoExtractor); 231 videoExtractor.selectTrack(videoTrackIndex); 232 233 // create the video codec 234 MediaFormat videoFormat = videoExtractor.getTrackFormat(videoTrackIndex); 235 String mime = videoFormat.getString(MediaFormat.KEY_MIME); 236 MediaCodec videoCodec = MediaCodec.createDecoderByType(mime); 237 238 AtomicBoolean done = new AtomicBoolean(false); 239 List<Long> expectedPresentationTimes = new ArrayList<>(); 240 List<Long> receivedPresentationTimes = new ArrayList<>(); 241 242 // set a callback on the video codec to process the frames 243 videoCodec.setCallback(new MediaCodec.Callback() { 244 private boolean mEosQueued; 245 int mDecodeOnlyCounter = 0; 246 247 // before queueing a frame, check if it is the last frame and set the EOS flag 248 // to the frame if that's the case. If the frame is to be only decoded 249 // (every other frame), then set the DECODE_ONLY flag to the frame. Only frames not 250 // tagged with EOS or DECODE_ONLY are expected to be rendered and added 251 // to expectedPresentationTimes 252 @Override 253 public void onInputBufferAvailable(MediaCodec codec, int index) { 254 if (!mEosQueued) { 255 ByteBuffer inputBuffer = videoCodec.getInputBuffer(index); 256 int sampleSize = videoExtractor.readSampleData(inputBuffer, 0); 257 long presentationTime = videoExtractor.getSampleTime(); 258 int flags = 0; 259 if (sampleSize < 0) { 260 flags = MediaCodec.BUFFER_FLAG_END_OF_STREAM; 261 sampleSize = 0; 262 mEosQueued = true; 263 } else if (mDecodeOnlyCounter % 2 == 0) { 264 flags = MediaCodec.BUFFER_FLAG_DECODE_ONLY; 265 } else { 266 expectedPresentationTimes.add(presentationTime); 267 } 268 mDecodeOnlyCounter++; 269 videoCodec.queueInputBuffer(index, 0, sampleSize, presentationTime, flags); 270 videoExtractor.advance(); 271 } 272 } 273 274 // the frames received here are the frames that are rendered, not the DECODE_ONLY ones, 275 // if the DECODE_ONLY flag behaves correctly, receivedPresentationTimes should exactly 276 // match expectedPresentationTimes 277 // When the codec receives the EOS frame, set the done variable true, which will exit 278 // the loop below, signaling that the codec has finished processing the video and 279 // should now assert on the contents of the lists 280 @Override 281 public void onOutputBufferAvailable(MediaCodec codec, int index, 282 MediaCodec.BufferInfo info) { 283 videoCodec.releaseOutputBuffer(index, false); 284 if ((info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) { 285 done.set(true); 286 } else { 287 receivedPresentationTimes.add(info.presentationTimeUs); 288 } 289 } 290 291 @Override 292 public void onError(MediaCodec codec, MediaCodec.CodecException e) { 293 294 } 295 296 @Override 297 public void onOutputFormatChanged(MediaCodec codec, MediaFormat format) { 298 299 } 300 }); 301 302 videoCodec.configure(videoFormat, getActivity().getSurfaceHolder().getSurface(), null, 0); 303 // start the video the codec with track selected 304 videoCodec.start(); 305 306 // keep looping until the codec receives the EOS frame 307 while (!done.get()) { 308 Thread.sleep(100); 309 } 310 311 videoCodec.stop(); 312 videoCodec.release(); 313 314 Collections.sort(expectedPresentationTimes); 315 assertEquals(expectedPresentationTimes, receivedPresentationTimes); 316 } 317 testTunneledTrickPlay(String fileName)318 private void testTunneledTrickPlay(String fileName) throws Exception { 319 Preconditions.assertTestFileExists(MEDIA_DIR_STRING + fileName); 320 321 // generate the audio session id needed for tunnel mode playback 322 AudioManager audioManager = mContext.getSystemService(AudioManager.class); 323 int audioSessionId = audioManager.generateAudioSessionId(); 324 325 // create the video extractor 326 MediaExtractor videoExtractor = createMediaExtractor(fileName); 327 328 // choose the first track that has the prefix "video/" and select it 329 int videoTrackIndex = getFirstTrackWithMimePrefix(MIME_VIDEO_PREFIX, videoExtractor); 330 videoExtractor.selectTrack(videoTrackIndex); 331 332 // create the video codec for tunneled play 333 MediaFormat videoFormat = videoExtractor.getTrackFormat(videoTrackIndex); 334 videoFormat.setFeatureEnabled(MediaCodecInfo.CodecCapabilities.FEATURE_TunneledPlayback, 335 true); 336 MediaCodecList mcl = new MediaCodecList(MediaCodecList.ALL_CODECS); 337 String codecName = mcl.findDecoderForFormat(videoFormat); 338 Assume.assumeTrue("Codec is not supported on this device", 339 codecName != null); 340 videoFormat.setInteger(MediaFormat.KEY_AUDIO_SESSION_ID, audioSessionId); 341 MediaCodec videoCodec = MediaCodec.createByCodecName(codecName); 342 343 // create the audio extractor 344 MediaExtractor audioExtractor = createMediaExtractor(fileName); 345 346 // choose the first track that has the prefix "audio/" and select it 347 int audioTrackIndex = getFirstTrackWithMimePrefix(MIME_AUDIO_PREFIX, audioExtractor); 348 audioExtractor.selectTrack(audioTrackIndex); 349 350 // create the audio codec 351 MediaFormat audioFormat = audioExtractor.getTrackFormat(audioTrackIndex); 352 String mime = audioFormat.getString(MediaFormat.KEY_MIME); 353 MediaCodec audioCodec = MediaCodec.createDecoderByType(mime); 354 355 // audio track used by audio codec 356 AudioTrack audioTrack = createAudioTrack(audioFormat, audioSessionId); 357 358 List<Long> expectedPresentationTimes = new ArrayList<>(); 359 360 videoCodec.setCallback(new MediaCodec.Callback() { 361 int mDecodeOnlyCounter = 0; 362 363 // before queueing a frame, check if it is the last frame and set the EOS flag 364 // to the frame if that's the case. If the frame is to be only decoded 365 // (every other frame), then set the DECODE_ONLY flag to the frame. Only frames not 366 // tagged with EOS or DECODE_ONLY are expected to be rendered and added 367 // to expectedPresentationTimes 368 @Override 369 public void onInputBufferAvailable(MediaCodec codec, int index) { 370 ByteBuffer inputBuffer = videoCodec.getInputBuffer(index); 371 int sampleSize = videoExtractor.readSampleData(inputBuffer, 0); 372 long presentationTime = videoExtractor.getSampleTime(); 373 int flags = 0; 374 if (sampleSize < 0) { 375 flags = MediaCodec.BUFFER_FLAG_END_OF_STREAM; 376 sampleSize = 0; 377 } else if (mDecodeOnlyCounter % 2 == 0) { 378 flags = MediaCodec.BUFFER_FLAG_DECODE_ONLY; 379 } else { 380 expectedPresentationTimes.add(presentationTime); 381 } 382 mDecodeOnlyCounter++; 383 videoCodec.queueInputBuffer(index, 0, sampleSize, presentationTime, flags); 384 videoExtractor.advance(); 385 } 386 387 // nothing to do here - in tunneled mode, the frames are rendered directly by the 388 // hardware, they are not sent back to the codec for extra processing 389 @Override 390 public void onOutputBufferAvailable(MediaCodec codec, int index, 391 MediaCodec.BufferInfo info) { 392 Assert.fail("onOutputBufferAvailable should not be called in tunnel mode."); 393 } 394 395 @Override 396 public void onError(MediaCodec codec, MediaCodec.CodecException e) { 397 Assert.fail("Encountered unexpected error while decoding video: " + e.getMessage()); 398 } 399 400 @Override 401 public void onOutputFormatChanged(MediaCodec codec, MediaFormat format) { 402 403 } 404 }); 405 videoCodec.configure(videoFormat, getActivity().getSurfaceHolder().getSurface(), null, 0); 406 AtomicBoolean done = new AtomicBoolean(false); 407 // Since data is written to AudioTrack in a blocking manner, run it in a separate thread to 408 // not block other operations. 409 HandlerThread audioThread = new HandlerThread("audioThread"); 410 audioThread.start(); 411 audioCodec.setCallback(new AudioCallback(audioCodec, audioExtractor, audioTrack, done), 412 new Handler(audioThread.getLooper())); 413 audioCodec.configure(audioFormat, null, null, 0); 414 415 List<Long> renderedPresentationTimes = new ArrayList<>(); 416 417 // a listener on rendered frames, if it is the last frame (EOS), then set the boolean to 418 // true and exit the loop below, else add the frame to renderedPresentationTimes 419 // to expectedPresentationTimes should be equal at the end of the test 420 videoCodec.setOnFrameRenderedListener((codec, presentationTimeUs, nanoTime) -> { 421 if (presentationTimeUs == EOS_TIMESTAMP_TUNNEL_MODE) { 422 done.set(true); 423 } else { 424 renderedPresentationTimes.add(presentationTimeUs); 425 } 426 }, new Handler(Looper.getMainLooper())); 427 428 // start media playback 429 videoCodec.start(); 430 audioCodec.start(); 431 audioTrack.play(); 432 433 // keep looping until the codec receives the EOS frame 434 while (!done.get()) { 435 Thread.sleep(100); 436 } 437 audioTrack.stop(); 438 audioTrack.release(); 439 videoCodec.stop(); 440 videoCodec.release(); 441 audioCodec.stop(); 442 audioCodec.release(); 443 444 Collections.sort(expectedPresentationTimes); 445 Collections.sort(renderedPresentationTimes); 446 assertEquals(expectedPresentationTimes, renderedPresentationTimes); 447 } 448 sleepUntil(Supplier<Boolean> supplier, Duration maxWait)449 private void sleepUntil(Supplier<Boolean> supplier, Duration maxWait) throws Exception { 450 final long deadLineMs = System.currentTimeMillis() + maxWait.toMillis(); 451 do { 452 Thread.sleep(50); 453 } while (!supplier.get() && System.currentTimeMillis() < deadLineMs); 454 } 455 testTunneledPerfectSeek(String fileName, final boolean initialPeek)456 private void testTunneledPerfectSeek(String fileName, 457 final boolean initialPeek) throws Exception { 458 Preconditions.assertTestFileExists(MEDIA_DIR_STRING + fileName); 459 460 // generate the audio session id needed for tunnel mode playback 461 AudioManager audioManager = mContext.getSystemService(AudioManager.class); 462 int audioSessionId = audioManager.generateAudioSessionId(); 463 464 // create the video extractor 465 MediaExtractor videoExtractor = createMediaExtractor(fileName); 466 467 // choose the first track that has the prefix "video/" and select it 468 int videoTrackIndex = getFirstTrackWithMimePrefix(MIME_VIDEO_PREFIX, videoExtractor); 469 videoExtractor.selectTrack(videoTrackIndex); 470 471 // create the video codec for tunneled play 472 MediaFormat videoFormat = videoExtractor.getTrackFormat(videoTrackIndex); 473 videoFormat.setFeatureEnabled(MediaCodecInfo.CodecCapabilities.FEATURE_TunneledPlayback, 474 true); 475 MediaCodecList mcl = new MediaCodecList(MediaCodecList.ALL_CODECS); 476 String codecName = mcl.findDecoderForFormat(videoFormat); 477 Assume.assumeTrue("Codec is not supported on this device", 478 codecName != null); 479 videoFormat.setInteger(MediaFormat.KEY_AUDIO_SESSION_ID, audioSessionId); 480 MediaCodec videoCodec = MediaCodec.createByCodecName(codecName); 481 482 // create the audio extractor 483 MediaExtractor audioExtractor = createMediaExtractor(fileName); 484 485 // choose the first track that has the prefix "audio/" and select it 486 int audioTrackIndex = getFirstTrackWithMimePrefix(MIME_AUDIO_PREFIX, audioExtractor); 487 audioExtractor.selectTrack(audioTrackIndex); 488 489 // create the audio codec 490 MediaFormat audioFormat = audioExtractor.getTrackFormat(audioTrackIndex); 491 String mime = audioFormat.getString(MediaFormat.KEY_MIME); 492 MediaCodec audioCodec = MediaCodec.createDecoderByType(mime); 493 494 // audio track used by audio codec 495 AudioTrack audioTrack = createAudioTrack(audioFormat, audioSessionId); 496 497 // Frames at 2s of each file are not key frame 498 AtomicLong seekTime = new AtomicLong(2000 * 1000); 499 videoExtractor.seekTo(seekTime.get(), MediaExtractor.SEEK_TO_PREVIOUS_SYNC); 500 long audioSeekTime = videoExtractor.getSampleTime(); 501 audioExtractor.seekTo(audioSeekTime, MediaExtractor.SEEK_TO_PREVIOUS_SYNC); 502 503 List<Long> expectedPresentationTimes = new ArrayList<>(); 504 AtomicBoolean done = new AtomicBoolean(false); 505 AtomicBoolean hasDecodeOnlyFrames = new AtomicBoolean(false); 506 videoCodec.setCallback(new MediaCodec.Callback() { 507 @Override 508 public void onInputBufferAvailable(MediaCodec codec, int index) { 509 if (!done.get()) { 510 ByteBuffer inputBuffer = codec.getInputBuffer(index); 511 int sampleSize = videoExtractor.readSampleData(inputBuffer, 0); 512 long presentationTime = videoExtractor.getSampleTime(); 513 int flags = 0; 514 if (sampleSize < 0) { 515 flags = MediaCodec.BUFFER_FLAG_END_OF_STREAM; 516 sampleSize = 0; 517 } else if (presentationTime < seekTime.get()) { 518 flags = MediaCodec.BUFFER_FLAG_DECODE_ONLY; 519 hasDecodeOnlyFrames.set(true); 520 } else { 521 expectedPresentationTimes.add(presentationTime); 522 } 523 codec.queueInputBuffer(index, 0, sampleSize, presentationTime, flags); 524 videoExtractor.advance(); 525 } 526 } 527 528 @Override 529 public void onOutputBufferAvailable(MediaCodec codec, int index, 530 MediaCodec.BufferInfo info) { 531 Assert.fail("onOutputBufferAvailable should not be called in tunnel mode."); 532 } 533 534 @Override 535 public void onError(MediaCodec codec, MediaCodec.CodecException e) { 536 Assert.fail("Encountered unexpected error while decoding video: " + e.getMessage()); 537 } 538 539 @Override 540 public void onOutputFormatChanged(MediaCodec codec, MediaFormat format) { 541 542 } 543 }); 544 videoCodec.configure(videoFormat, getActivity().getSurfaceHolder().getSurface(), null, 0); 545 // Since data is written to AudioTrack in a blocking manner, run it in a separate thread to 546 // not block other operations. 547 HandlerThread audioThread = new HandlerThread("audioThread"); 548 audioThread.start(); 549 audioCodec.setCallback(new AudioCallback(audioCodec, audioExtractor, audioTrack, done), 550 new Handler(audioThread.getLooper())); 551 audioCodec.configure(audioFormat, null, null, 0); 552 553 List<Long> renderedPresentationTimes = new ArrayList<>(); 554 // play for 500ms 555 videoCodec.setOnFrameRenderedListener((codec, presentationTimeUs, nanoTime) -> { 556 renderedPresentationTimes.add(presentationTimeUs); 557 if (presentationTimeUs >= seekTime.get() + 500 * 1000) { 558 done.set(true); 559 } 560 }, new Handler(Looper.getMainLooper())); 561 562 AtomicBoolean firstTunnelFrameReady = new AtomicBoolean(false); 563 videoCodec.setOnFirstTunnelFrameReadyListener(new Handler(Looper.getMainLooper()), 564 (codec) -> { 565 firstTunnelFrameReady.set(true); 566 }); 567 568 boolean isPeeking = setKeyTunnelPeek(videoCodec, initialPeek ? 1 : 0); 569 // start media playback 570 videoCodec.start(); 571 audioCodec.start(); 572 573 // When video codecs are started, large chunks of contiguous physical memory need to be 574 // allocated, which, on low-RAM devices, can trigger high CPU usage for moving memory 575 // around to create contiguous space for the video decoder. This can cause an increase in 576 // startup time for playback. 577 ActivityManager activityManager = mContext.getSystemService(ActivityManager.class); 578 final int firstFrameReadyTimeoutSeconds = activityManager.isLowRamDevice() ? 3 : 1; 579 sleepUntil(firstTunnelFrameReady::get, Duration.ofSeconds(firstFrameReadyTimeoutSeconds)); 580 assertTrue(String.format("onFirstTunnelFrameReady not called within %d seconds", 581 firstFrameReadyTimeoutSeconds), firstTunnelFrameReady.get()); 582 583 final int waitForRenderingMs = 1000; 584 // Sleep for 1s here to ensure that either (1) when peek is on, high-latency display 585 // pipelines have enough time to render the first frame, or (2) when peek is off, the 586 // frame isn't rendered after long time. 587 Thread.sleep(waitForRenderingMs); 588 if (isPeeking) { 589 assertEquals(1, renderedPresentationTimes.size()); 590 assertEquals(seekTime.get(), (long) renderedPresentationTimes.get(0)); 591 } else { 592 assertTrue(renderedPresentationTimes.isEmpty()); 593 } 594 595 assertTrue("No DECODE_ONLY frames have been produced, " 596 + "try changing the offset for the seek. To do this, find a timestamp " 597 + "that falls between two sync frames to ensure that there will " 598 + "be a few DECODE_ONLY frames. For example \"ffprobe -show_frames $video\"" 599 + " can be used to list all the frames of a certain video and will show" 600 + " info about key frames and their timestamps.", 601 hasDecodeOnlyFrames.get()); 602 603 // Run the playback to verify that frame at seek time is also rendered when peek is off. 604 audioTrack.play(); 605 while (!done.get()) { 606 Thread.sleep(100); 607 } 608 if (!isPeeking) { 609 assertFalse(renderedPresentationTimes.isEmpty()); 610 assertEquals(seekTime.get(), (long) renderedPresentationTimes.get(0)); 611 } 612 613 audioTrack.pause(); 614 // Just be safe that pause may take some time. 615 Thread.sleep(500); 616 videoCodec.flush(); 617 audioCodec.flush(); 618 audioTrack.flush(); 619 620 // Frames at 7s of each file are not key frame, and there is non-zero key frame before it 621 seekTime.set(7000 * 1000); 622 videoExtractor.seekTo(seekTime.get(), MediaExtractor.SEEK_TO_PREVIOUS_SYNC); 623 audioSeekTime = videoExtractor.getSampleTime(); 624 audioExtractor.seekTo(audioSeekTime, MediaExtractor.SEEK_TO_PREVIOUS_SYNC); 625 expectedPresentationTimes.clear(); 626 renderedPresentationTimes.clear(); 627 628 // a listener on rendered frames, if it is the last frame (EOS), then set the boolean to 629 // true and exit the loop below, else add the frame to renderedPresentationTimes 630 // to expectedPresentationTimes should be equal at the end of the test 631 // renderedPresentationTimes should only contain frames starting with desired seek timestamp 632 videoCodec.setOnFrameRenderedListener((codec, presentationTimeUs, nanoTime) -> { 633 if (presentationTimeUs == EOS_TIMESTAMP_TUNNEL_MODE) { 634 done.set(true); 635 } else { 636 renderedPresentationTimes.add(presentationTimeUs); 637 } 638 }, new Handler(Looper.getMainLooper())); 639 640 // Restart media playback 641 firstTunnelFrameReady.set(false); 642 isPeeking = setKeyTunnelPeek(videoCodec, isPeeking ? 0 : 1); 643 done.set(false); 644 videoCodec.start(); 645 audioCodec.start(); 646 sleepUntil(firstTunnelFrameReady::get, Duration.ofSeconds(firstFrameReadyTimeoutSeconds)); 647 assertTrue(String.format("onFirstTunnelFrameReady not called within %d seconds", 648 firstFrameReadyTimeoutSeconds), firstTunnelFrameReady.get()); 649 650 // Sleep for 1s here to ensure that either (1) when peek is on, high-latency display 651 // pipelines have enough time to render the first frame, or (2) when peek is off, the 652 // frame isn't rendered after long time. 653 Thread.sleep(waitForRenderingMs); 654 if (isPeeking) { 655 assertEquals(1, renderedPresentationTimes.size()); 656 assertEquals(seekTime.get(), (long) renderedPresentationTimes.get(0)); 657 } else { 658 assertTrue(renderedPresentationTimes.isEmpty()); 659 } 660 661 if (!isPeeking) { 662 // First frame should be rendered immediately after setting peek on. 663 isPeeking = setKeyTunnelPeek(videoCodec, 1); 664 // This is long due to high-latency display pipelines on TV devices. 665 Thread.sleep(waitForRenderingMs); 666 assertEquals(1, renderedPresentationTimes.size()); 667 assertEquals(seekTime.get(), (long) renderedPresentationTimes.get(0)); 668 } 669 670 audioTrack.play(); 671 // keep looping until the codec receives the EOS frame 672 while (!done.get()) { 673 Thread.sleep(100); 674 } 675 676 audioTrack.stop(); 677 audioTrack.release(); 678 videoCodec.stop(); 679 videoCodec.release(); 680 audioCodec.stop(); 681 audioCodec.release(); 682 Collections.sort(expectedPresentationTimes); 683 Collections.sort(renderedPresentationTimes); 684 assertEquals(expectedPresentationTimes, renderedPresentationTimes); 685 assertEquals(seekTime.get(), (long) renderedPresentationTimes.get(0)); 686 } 687 688 // 1 is on, 0 is off. setKeyTunnelPeek(MediaCodec videoCodec, int value)689 private boolean setKeyTunnelPeek(MediaCodec videoCodec, int value) { 690 Bundle parameters = new Bundle(); 691 parameters.putInt(MediaCodec.PARAMETER_KEY_TUNNEL_PEEK, value); 692 videoCodec.setParameters(parameters); 693 return value != 0; 694 } 695 696 createAudioTrack(MediaFormat audioFormat, int audioSessionId)697 private AudioTrack createAudioTrack(MediaFormat audioFormat, int audioSessionId) { 698 int sampleRate = audioFormat.getInteger(MediaFormat.KEY_SAMPLE_RATE); 699 int channelCount = audioFormat.getInteger(MediaFormat.KEY_CHANNEL_COUNT); 700 int channelConfig; 701 702 switch (channelCount) { 703 case 1: 704 channelConfig = AudioFormat.CHANNEL_OUT_MONO; 705 break; 706 case 2: 707 channelConfig = AudioFormat.CHANNEL_OUT_STEREO; 708 break; 709 case 6: 710 channelConfig = AudioFormat.CHANNEL_OUT_5POINT1; 711 break; 712 default: 713 throw new IllegalArgumentException(); 714 } 715 716 int minBufferSize = 717 AudioTrack.getMinBufferSize( 718 sampleRate, 719 channelConfig, 720 AudioFormat.ENCODING_PCM_16BIT); 721 AudioAttributes audioAttributes = (new AudioAttributes.Builder()) 722 .setLegacyStreamType(AudioManager.STREAM_MUSIC) 723 .setFlags(AudioAttributes.FLAG_HW_AV_SYNC) 724 .build(); 725 AudioFormat af = (new AudioFormat.Builder()) 726 .setChannelMask(channelConfig) 727 .setEncoding(AudioFormat.ENCODING_PCM_16BIT) 728 .setSampleRate(sampleRate) 729 .build(); 730 return new AudioTrack(audioAttributes, af, 2 * minBufferSize, 731 AudioTrack.MODE_STREAM, audioSessionId); 732 } 733 getFirstTrackWithMimePrefix(String prefix, MediaExtractor videoExtractor)734 private int getFirstTrackWithMimePrefix(String prefix, MediaExtractor videoExtractor) { 735 int trackIndex = -1; 736 for (int i = 0; i < videoExtractor.getTrackCount(); ++i) { 737 MediaFormat format = videoExtractor.getTrackFormat(i); 738 if (format.getString(MediaFormat.KEY_MIME).startsWith(prefix)) { 739 trackIndex = i; 740 break; 741 } 742 } 743 assertTrue("Video track was not found.", trackIndex >= 0); 744 return trackIndex; 745 } 746 createMediaExtractor(String fileName)747 private MediaExtractor createMediaExtractor(String fileName) throws IOException { 748 MediaExtractor mediaExtractor = new MediaExtractor(); 749 mediaExtractor.setDataSource(MEDIA_DIR_STRING + fileName); 750 return mediaExtractor; 751 } 752 753 private static class AudioCallback extends MediaCodec.Callback { 754 private final MediaCodec mAudioCodec; 755 private final MediaExtractor mAudioExtractor; 756 private final AudioTrack mAudioTrack; 757 private final AtomicBoolean mDone; 758 AudioCallback(MediaCodec audioCodec, MediaExtractor audioExtractor, AudioTrack audioTrack, AtomicBoolean done)759 AudioCallback(MediaCodec audioCodec, MediaExtractor audioExtractor, 760 AudioTrack audioTrack, AtomicBoolean done) { 761 this.mAudioCodec = audioCodec; 762 this.mAudioExtractor = audioExtractor; 763 this.mAudioTrack = audioTrack; 764 this.mDone = done; 765 } 766 767 @Override onInputBufferAvailable(MediaCodec codec, int index)768 public void onInputBufferAvailable(MediaCodec codec, int index) { 769 ByteBuffer audioInputBuffer = mAudioCodec.getInputBuffer(index); 770 int audioSampleSize = mAudioExtractor.readSampleData(audioInputBuffer, 0); 771 long presentationTime = mAudioExtractor.getSampleTime(); 772 int flags = 0; 773 if (audioSampleSize < 0) { 774 flags = MediaCodec.BUFFER_FLAG_END_OF_STREAM; 775 audioSampleSize = 0; 776 presentationTime = 0; 777 } 778 mAudioCodec.queueInputBuffer(index, 0, audioSampleSize, presentationTime, flags); 779 mAudioExtractor.advance(); 780 } 781 782 @Override onOutputBufferAvailable(MediaCodec codec, int index, MediaCodec.BufferInfo info)783 public void onOutputBufferAvailable(MediaCodec codec, int index, 784 MediaCodec.BufferInfo info) { 785 // TODO(b/291959069): Remove this once stale callbacks aren't fired by MediaCodec 786 if (mDone.get()) { 787 return; 788 } 789 ByteBuffer outputBuffer = mAudioCodec.getOutputBuffer(index); 790 byte[] audioArray = new byte[info.size]; 791 outputBuffer.get(audioArray); 792 outputBuffer.clear(); 793 long presentationTimeUs = info.presentationTimeUs; 794 mAudioCodec.releaseOutputBuffer(index, false); 795 796 ByteBuffer audioData = ByteBuffer.wrap(audioArray); 797 while (audioData.remaining() > 0 && !mDone.get()) { 798 int written = mAudioTrack.write(audioData, audioData.remaining(), 799 AudioTrack.WRITE_BLOCKING, presentationTimeUs * 1000); 800 if (written >= 0) { 801 // When audio track is not in playing state, the write operation does not 802 // block in WRITE_BLOCKING mode. And when audio track is full, the audio 803 // data can not be fully written. Must sleep here to wait for free spaces. 804 try { 805 Thread.sleep(50); 806 } catch (InterruptedException ignored) { 807 } 808 } else { 809 Assert.fail("AudioTrack write failure."); 810 } 811 } 812 } 813 814 @Override onError(MediaCodec codec, MediaCodec.CodecException e)815 public void onError(MediaCodec codec, MediaCodec.CodecException e) { 816 817 } 818 819 @Override onOutputFormatChanged(MediaCodec codec, MediaFormat format)820 public void onOutputFormatChanged(MediaCodec codec, MediaFormat format) { 821 } 822 } 823 } 824