1 /* 2 * Copyright (C) 2013 The Android Open Source Project 3 * 4 * Licensed under the Apache License, Version 2.0 (the "License"); 5 * you may not use this file except in compliance with the License. 6 * You may obtain a copy of the License at 7 * 8 * http://www.apache.org/licenses/LICENSE-2.0 9 * 10 * Unless required by applicable law or agreed to in writing, software 11 * distributed under the License is distributed on an "AS IS" BASIS, 12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 * See the License for the specific language governing permissions and 14 * limitations under the License. 15 */ 16 17 package android.media.muxer.cts; 18 19 import static com.android.media.editing.flags.Flags.FLAG_MUXER_MP4_ENABLE_APV; 20 import static com.android.media.extractor.flags.Flags.FLAG_EXTRACTOR_MP4_ENABLE_APV; 21 22 import static org.junit.Assert.assertArrayEquals; 23 import static org.junit.Assert.assertEquals; 24 import static org.junit.Assert.assertNotNull; 25 import static org.junit.Assert.assertNull; 26 import static org.junit.Assert.assertTrue; 27 import static org.junit.Assert.fail; 28 29 import android.content.Context; 30 import android.content.res.AssetFileDescriptor; 31 import android.media.MediaCodec; 32 import android.media.MediaCodec.BufferInfo; 33 import android.media.MediaExtractor; 34 import android.media.MediaFormat; 35 import android.media.MediaMetadataRetriever; 36 import android.media.MediaMuxer; 37 import android.os.Build; 38 import android.os.ParcelFileDescriptor; 39 import android.platform.test.annotations.AppModeFull; 40 import android.platform.test.annotations.RequiresFlagsEnabled; 41 import android.platform.test.flag.junit.CheckFlagsRule; 42 import android.platform.test.flag.junit.DeviceFlagsValueProvider; 43 import android.util.Log; 44 45 import androidx.test.ext.junit.runners.AndroidJUnit4; 46 import androidx.test.platform.app.InstrumentationRegistry; 47 48 import com.android.compatibility.common.util.MediaUtils; 49 import com.android.compatibility.common.util.Preconditions; 50 51 import com.google.android.exoplayer2.Format; 52 import com.google.android.exoplayer2.MediaItem; 53 import com.google.android.exoplayer2.MetadataRetriever; 54 import com.google.android.exoplayer2.source.TrackGroupArray; 55 import com.google.android.exoplayer2.util.Util; 56 import com.google.android.exoplayer2.video.ColorInfo; 57 58 import org.junit.Rule; 59 import org.junit.Test; 60 import org.junit.runner.RunWith; 61 62 import java.io.File; 63 import java.io.FileNotFoundException; 64 import java.io.IOException; 65 import java.nio.ByteBuffer; 66 import java.util.HashMap; 67 import java.util.HashSet; 68 import java.util.Vector; 69 import java.util.concurrent.ExecutionException; 70 import java.util.function.Function; 71 import java.util.stream.IntStream; 72 73 // runs in both CTS and MCTS 74 @AppModeFull(reason = "No interaction with system server") 75 @RunWith(AndroidJUnit4.class) 76 public class MediaMuxerTest { 77 private static final String TAG = "MediaMuxerTest"; 78 private static final boolean VERBOSE = false; 79 private static final int MAX_SAMPLE_SIZE = 1024 * 1024; 80 private static final float LATITUDE = 0.0000f; 81 private static final float LONGITUDE = -180.0f; 82 private static final float BAD_LATITUDE = 91.0f; 83 private static final float BAD_LONGITUDE = -181.0f; 84 private static final float TOLERANCE = 0.0002f; 85 private static final long OFFSET_TIME_US = 29 * 60 * 1000000L; // 29 minutes 86 private static final String MEDIA_DIR = WorkDir.getMediaDirString(); 87 88 private final boolean mAndroid11 = Build.VERSION.SDK_INT >= Build.VERSION_CODES.R; 89 90 @Rule 91 public final CheckFlagsRule mCheckFlagsRule = DeviceFlagsValueProvider.createCheckFlagsRule(); 92 getContext()93 private Context getContext() { 94 return InstrumentationRegistry.getInstrumentation().getContext(); 95 } 96 getAssetFileDescriptorFor(final String res)97 private AssetFileDescriptor getAssetFileDescriptorFor(final String res) 98 throws FileNotFoundException { 99 Preconditions.assertTestFileExists(MEDIA_DIR + res); 100 File inpFile = new File(MEDIA_DIR + res); 101 ParcelFileDescriptor parcelFD = 102 ParcelFileDescriptor.open(inpFile, ParcelFileDescriptor.MODE_READ_ONLY); 103 return new AssetFileDescriptor(parcelFD, 0, parcelFD.getStatSize()); 104 } 105 106 @Test testWebmOutput()107 public void testWebmOutput() throws Exception { 108 final String source = 109 "video_480x360_webm_vp9_333kbps_25fps_vorbis_stereo_128kbps_48000hz.webm"; 110 String outputFilePath = File.createTempFile("testWebmOutput", ".webm") 111 .getAbsolutePath(); 112 cloneAndVerify( 113 source, 114 outputFilePath, 115 2, 116 90, 117 MediaMuxer.OutputFormat.MUXER_OUTPUT_WEBM, 118 false /* signalEos */); 119 } 120 121 @Test testHEICOutputInHEIF()122 public void testHEICOutputInHEIF() throws Exception { 123 final String source = "sample_heif.heic"; 124 String outputFilePath = File.createTempFile("testHEICOutputInHEIF", ".heic") 125 .getAbsolutePath(); 126 cloneAndVerify( 127 source, 128 outputFilePath, 129 4 /* expectedTrackCount */, 130 0 /* degrees */, 131 MediaMuxer.OutputFormat.MUXER_OUTPUT_HEIF, 132 false /* signalEos */); 133 } 134 135 @Test testAVIFOutputInHEIF()136 public void testAVIFOutputInHEIF() throws Exception { 137 final String source = "sample_avif.avif"; 138 String outputFilePath = File.createTempFile("testAVIFOutputInHEIF", ".avif") 139 .getAbsolutePath(); 140 cloneAndVerify( 141 source, 142 outputFilePath, 143 1 /* expectedTrackCount */, 144 0 /* degrees */, 145 MediaMuxer.OutputFormat.MUXER_OUTPUT_HEIF, 146 false /* signalEos */); 147 } 148 149 @Test testAV1OutputInMP4()150 public void testAV1OutputInMP4() throws Exception { 151 final String source = 152 "video_1280x720_mp4_av1_2000kbps_30fps_aac_stereo_128kbps_44100hz.mp4"; 153 String outputFilePath = File.createTempFile("testAV1OutputInMP4", ".mp4").getAbsolutePath(); 154 cloneAndVerify( 155 source, 156 outputFilePath, 157 2 /* expectedTrackCount */, 158 0 /* degrees */, 159 MediaMuxer.OutputFormat.MUXER_OUTPUT_MPEG_4, 160 true /* signalEos */); 161 } 162 163 @Test testAV1VideoOnlyOutputInMP4()164 public void testAV1VideoOnlyOutputInMP4() throws Exception { 165 final String source = "video_1280x720_mp4_av1_2000kbps_30fps.mp4"; 166 String outputFilePath = 167 File.createTempFile("testAV1VideoOnlyOutputInMP4", ".mp4").getAbsolutePath(); 168 cloneAndVerify( 169 source, 170 outputFilePath, 171 1 /* expectedTrackCount */, 172 0 /* degrees */, 173 MediaMuxer.OutputFormat.MUXER_OUTPUT_MPEG_4, 174 true /* signalEos */); 175 } 176 177 @Test 178 @RequiresFlagsEnabled({FLAG_EXTRACTOR_MP4_ENABLE_APV, FLAG_MUXER_MP4_ENABLE_APV}) testAPVVideoOnlyOutputInMP4()179 public void testAPVVideoOnlyOutputInMP4() throws Exception { 180 final String source = "pattern_640x480_30fps_8213kbps_apv_10bit.mp4"; 181 String outputFilePath = 182 File.createTempFile("testAPVVideoOnlyOutputInMP4", ".mp4").getAbsolutePath(); 183 cloneAndVerify( 184 source, 185 outputFilePath, 186 1 /* expectedTrackCount */, 187 0 /* degrees */, 188 MediaMuxer.OutputFormat.MUXER_OUTPUT_MPEG_4, 189 true /* signalEos */); 190 } 191 192 /** 193 * Test: make sure the muxer handles dovi profile 8.4 video track only file correctly. 194 */ 195 @Test testDolbyVisionVideoOnlyP8()196 public void testDolbyVisionVideoOnlyP8() throws Exception { 197 final String source = "video_dovi_1920x1080_60fps_dvhe_08_04.mp4"; 198 String outputFilePath = File.createTempFile("MediaMuxerTest_dolbyvisionP8videoOnly", ".mp4") 199 .getAbsolutePath(); 200 try { 201 cloneAndVerify( 202 source, 203 outputFilePath, 204 2 /* expectedTrackCount */, 205 180 /* degrees */, 206 MediaMuxer.OutputFormat.MUXER_OUTPUT_MPEG_4, 207 false /* signalEos */, 208 MediaMuxerTest::filterOutNonDolbyVisionFormat); 209 } finally { 210 new File(outputFilePath).delete(); 211 } 212 } 213 214 /** 215 * Test: make sure the muxer handles dovi profile 9.2 video track only file correctly. 216 */ 217 @Test testDolbyVisionVideoOnlyP9()218 public void testDolbyVisionVideoOnlyP9() throws Exception { 219 final String source = "video_dovi_1920x1080_60fps_dvav_09_02.mp4"; 220 String outputFilePath = File.createTempFile("MediaMuxerTest_dolbyvisionP9videoOnly", ".mp4") 221 .getAbsolutePath(); 222 try { 223 cloneAndVerify( 224 source, 225 outputFilePath, 226 2 /* expectedTrackCount */, 227 180 /* degrees */, 228 MediaMuxer.OutputFormat.MUXER_OUTPUT_MPEG_4, 229 false /* signalEos */, 230 MediaMuxerTest::filterOutNonDolbyVisionFormat); 231 } finally { 232 new File(outputFilePath).delete(); 233 } 234 } 235 filterOutNonDolbyVisionFormat(MediaFormat format)236 private static MediaFormat filterOutNonDolbyVisionFormat(MediaFormat format) { 237 String mime = format.getString(MediaFormat.KEY_MIME); 238 return mime.equals(MediaFormat.MIMETYPE_VIDEO_DOLBY_VISION) ? format : null; 239 } 240 241 /** 242 * Test: makes sure if audio and video muxing using MPEG4Writer works well when there are frame 243 * drops as in b/63590381 and b/64949961 while B Frames encoding is enabled. 244 */ 245 @Test testSimulateAudioBVideoFramesDropIssues()246 public void testSimulateAudioBVideoFramesDropIssues() throws Exception { 247 final String source = "video_h264_main_b_frames.mp4"; 248 String outputFilePath = File.createTempFile( 249 "MediaMuxerTest_testSimulateAudioBVideoFramesDropIssues", ".mp4").getAbsolutePath(); 250 try { 251 simulateVideoFramesDropIssuesAndMux(source, outputFilePath, 2 /* track index */, 252 MediaMuxer.OutputFormat.MUXER_OUTPUT_MPEG_4); 253 verifyAFewSamplesTimestamp(source, outputFilePath); 254 verifySamplesMatch(source, outputFilePath, 66667 /* sample around 0 sec */, 0); 255 verifySamplesMatch( 256 source, outputFilePath, 8033333 /* sample around 8 sec */, OFFSET_TIME_US); 257 } finally { 258 new File(outputFilePath).delete(); 259 } 260 } 261 262 /** 263 * Test: makes sure muxing works well when video with B Frames are muxed using MPEG4Writer 264 * and a few frames drop. 265 */ 266 @Test testTimestampsBVideoOnlyFramesDropOnce()267 public void testTimestampsBVideoOnlyFramesDropOnce() throws Exception { 268 final String source = "video_480x360_mp4_h264_bframes_495kbps_30fps_editlist.mp4"; 269 String outputFilePath = File.createTempFile( 270 "MediaMuxerTest_testTimestampsBVideoOnlyFramesDropOnce", ".mp4").getAbsolutePath(); 271 try { 272 HashSet<Integer> samplesDropSet = new HashSet<Integer>(); 273 // Drop frames from sample index 56 to 76, I frame at 56. 274 IntStream.rangeClosed(56, 76).forEach(samplesDropSet::add); 275 // No start offsets for any track. 276 cloneMediaWithSamplesDropAndStartOffsets(source, outputFilePath, 277 MediaMuxer.OutputFormat.MUXER_OUTPUT_MPEG_4, samplesDropSet, null); 278 verifyTSWithSamplesDropAndStartOffset( 279 source, true /* has B frames */, outputFilePath, samplesDropSet, null); 280 } finally { 281 new File(outputFilePath).delete(); 282 } 283 } 284 285 /** 286 * Test: makes sure if video muxing while framedrops occurs twice using MPEG4Writer 287 * works with B Frames. 288 */ 289 @Test testTimestampsBVideoOnlyFramesDropTwice()290 public void testTimestampsBVideoOnlyFramesDropTwice() throws Exception { 291 final String source = "video_480x360_mp4_h264_bframes_495kbps_30fps_editlist.mp4"; 292 String outputFilePath = File.createTempFile( 293 "MediaMuxerTest_testTimestampsBVideoOnlyFramesDropTwice", ".mp4").getAbsolutePath(); 294 try { 295 HashSet<Integer> samplesDropSet = new HashSet<Integer>(); 296 // Drop frames with sample index 57 to 67, P frame at 57. 297 IntStream.rangeClosed(57, 67).forEach(samplesDropSet::add); 298 // Drop frames with sample index 173 to 200, B frame at 173. 299 IntStream.rangeClosed(173, 200).forEach(samplesDropSet::add); 300 // No start offsets for any track. 301 cloneMediaWithSamplesDropAndStartOffsets(source, outputFilePath, 302 MediaMuxer.OutputFormat.MUXER_OUTPUT_MPEG_4, samplesDropSet, null); 303 verifyTSWithSamplesDropAndStartOffset( 304 source, true /* has B frames */, outputFilePath, samplesDropSet, null); 305 } finally { 306 new File(outputFilePath).delete(); 307 } 308 } 309 310 /** 311 * Test: makes sure if audio/video muxing while framedrops once using MPEG4Writer 312 * works with B Frames. 313 */ 314 @Test testTimestampsAudioBVideoFramesDropOnce()315 public void testTimestampsAudioBVideoFramesDropOnce() throws Exception { 316 final String source = "video_h264_main_b_frames.mp4"; 317 String outputFilePath = File.createTempFile( 318 "MediaMuxerTest_testTimestampsAudioBVideoFramesDropOnce", ".mp4").getAbsolutePath(); 319 try { 320 HashSet<Integer> samplesDropSet = new HashSet<Integer>(); 321 // Drop frames from sample index 56 to 76, I frame at 56. 322 IntStream.rangeClosed(56, 76).forEach(samplesDropSet::add); 323 // No start offsets for any track. 324 cloneMediaWithSamplesDropAndStartOffsets(source, outputFilePath, 325 MediaMuxer.OutputFormat.MUXER_OUTPUT_MPEG_4, samplesDropSet, null); 326 verifyTSWithSamplesDropAndStartOffset( 327 source, true /* has B frames */, outputFilePath, samplesDropSet, null); 328 } finally { 329 new File(outputFilePath).delete(); 330 } 331 } 332 333 /** 334 * Test: makes sure if audio/video muxing while framedrops twice using MPEG4Writer 335 * works with B Frames. 336 */ 337 @Test testTimestampsAudioBVideoFramesDropTwice()338 public void testTimestampsAudioBVideoFramesDropTwice() throws Exception { 339 final String source = "video_h264_main_b_frames.mp4"; 340 String outputFilePath = File.createTempFile( 341 "MediaMuxerTest_testTimestampsAudioBVideoFramesDropTwice", ".mp4").getAbsolutePath(); 342 try { 343 HashSet<Integer> samplesDropSet = new HashSet<Integer>(); 344 // Drop frames with sample index 57 to 67, P frame at 57. 345 IntStream.rangeClosed(57, 67).forEach(samplesDropSet::add); 346 // Drop frames with sample index 173 to 200, B frame at 173. 347 IntStream.rangeClosed(173, 200).forEach(samplesDropSet::add); 348 // No start offsets for any track. 349 cloneMediaWithSamplesDropAndStartOffsets(source, outputFilePath, 350 MediaMuxer.OutputFormat.MUXER_OUTPUT_MPEG_4, samplesDropSet, null); 351 verifyTSWithSamplesDropAndStartOffset( 352 source, true /* has B frames */, outputFilePath, samplesDropSet, null); 353 } finally { 354 new File(outputFilePath).delete(); 355 } 356 } 357 358 /** 359 * Test: makes sure if audio/video muxing using MPEG4Writer works with B Frames 360 * when video frames start later than audio. 361 */ 362 @Test testTimestampsAudioBVideoStartOffsetVideo()363 public void testTimestampsAudioBVideoStartOffsetVideo() throws Exception { 364 Vector<Integer> startOffsetUsVect = new Vector<Integer>(); 365 // Video starts at 400000us. 366 startOffsetUsVect.add(400000); 367 // Audio starts at 0us. 368 startOffsetUsVect.add(0); 369 checkTimestampsAudioBVideoDiffStartOffsets(startOffsetUsVect); 370 } 371 372 /** 373 * Test: makes sure if audio/video muxing using MPEG4Writer works with B Frames 374 * when video and audio samples start after zero, video later than audio. 375 */ 376 @Test testTimestampsAudioBVideoStartOffsetVideoAudio()377 public void testTimestampsAudioBVideoStartOffsetVideoAudio() throws Exception { 378 Vector<Integer> startOffsetUsVect = new Vector<Integer>(); 379 // Video starts at 400000us. 380 startOffsetUsVect.add(400000); 381 // Audio starts at 200000us. 382 startOffsetUsVect.add(200000); 383 checkTimestampsAudioBVideoDiffStartOffsets(startOffsetUsVect); 384 } 385 386 /** 387 * Test: makes sure if audio/video muxing using MPEG4Writer works with B Frames 388 * when video and audio samples start after zero, audio later than video. 389 */ 390 @Test testTimestampsAudioBVideoStartOffsetAudioVideo()391 public void testTimestampsAudioBVideoStartOffsetAudioVideo() throws Exception { 392 if (!MediaUtils.check(mAndroid11, "test needs Android 11")) return; 393 394 Vector<Integer> startOffsetUsVect = new Vector<Integer>(); 395 // Video starts at 200000us. 396 startOffsetUsVect.add(200000); 397 // Audio starts at 400000us. 398 startOffsetUsVect.add(400000); 399 checkTimestampsAudioBVideoDiffStartOffsets(startOffsetUsVect); 400 } 401 402 /** 403 * Test: makes sure if audio/video muxing using MPEG4Writer works with B Frames 404 * when video starts after zero and audio starts before zero. 405 */ 406 @Test testTimestampsAudioBVideoStartOffsetNegativeAudioVideo()407 public void testTimestampsAudioBVideoStartOffsetNegativeAudioVideo() throws Exception { 408 if (!MediaUtils.check(mAndroid11, "test needs Android 11")) return; 409 410 Vector<Integer> startOffsetUsVect = new Vector<Integer>(); 411 // Video starts at 200000us. 412 startOffsetUsVect.add(200000); 413 // Audio starts at -23220us, multiple of duration of one frame (1024/44100hz) 414 startOffsetUsVect.add(-23220); 415 checkTimestampsAudioBVideoDiffStartOffsets(startOffsetUsVect); 416 } 417 418 /** 419 * Test: makes sure if audio/video muxing using MPEG4Writer works with B Frames when audio 420 * samples start later than video. 421 */ 422 @Test testTimestampsAudioBVideoStartOffsetAudio()423 public void testTimestampsAudioBVideoStartOffsetAudio() throws Exception { 424 if (!MediaUtils.check(mAndroid11, "test needs Android 11")) return; 425 426 Vector<Integer> startOffsetUsVect = new Vector<Integer>(); 427 // Video starts at 0us. 428 startOffsetUsVect.add(0); 429 // Audio starts at 400000us. 430 startOffsetUsVect.add(400000); 431 checkTimestampsAudioBVideoDiffStartOffsets(startOffsetUsVect); 432 } 433 434 /** 435 * Test: make sure if audio/video muxing works good with different start offsets for 436 * audio and video, audio later than video at 0us. 437 */ 438 @Test testTimestampsStartOffsetAudio()439 public void testTimestampsStartOffsetAudio() throws Exception { 440 if (!MediaUtils.check(mAndroid11, "test needs Android 11")) return; 441 442 Vector<Integer> startOffsetUsVect = new Vector<Integer>(); 443 // Video starts at 0us. 444 startOffsetUsVect.add(0); 445 // Audio starts at 500000us. 446 startOffsetUsVect.add(500000); 447 checkTimestampsWithStartOffsets(startOffsetUsVect); 448 } 449 450 /** 451 * Test: make sure if audio/video muxing works good with different start offsets for 452 * audio and video, video later than audio at 0us. 453 */ 454 @Test testTimestampsStartOffsetVideo()455 public void testTimestampsStartOffsetVideo() throws Exception { 456 if (!MediaUtils.check(mAndroid11, "test needs Android 11")) return; 457 458 Vector<Integer> startOffsetUsVect = new Vector<Integer>(); 459 // Video starts at 500000us. 460 startOffsetUsVect.add(500000); 461 // Audio starts at 0us. 462 startOffsetUsVect.add(0); 463 checkTimestampsWithStartOffsets(startOffsetUsVect); 464 } 465 466 /** 467 * Test: make sure if audio/video muxing works good with different start offsets for 468 * audio and video, audio later than video, positive offsets for both. 469 */ 470 @Test testTimestampsStartOffsetVideoAudio()471 public void testTimestampsStartOffsetVideoAudio() throws Exception { 472 if (!MediaUtils.check(mAndroid11, "test needs Android 11")) return; 473 474 Vector<Integer> startOffsetUsVect = new Vector<Integer>(); 475 // Video starts at 250000us. 476 startOffsetUsVect.add(250000); 477 // Audio starts at 500000us. 478 startOffsetUsVect.add(500000); 479 checkTimestampsWithStartOffsets(startOffsetUsVect); 480 } 481 482 /** 483 * Test: make sure if audio/video muxing works good with different start offsets for 484 * audio and video, video later than audio, positive offets for both. 485 */ 486 @Test testTimestampsStartOffsetAudioVideo()487 public void testTimestampsStartOffsetAudioVideo() throws Exception { 488 if (!MediaUtils.check(mAndroid11, "test needs Android 11")) return; 489 490 Vector<Integer> startOffsetUsVect = new Vector<Integer>(); 491 // Video starts at 500000us. 492 startOffsetUsVect.add(500000); 493 // Audio starts at 250000us. 494 startOffsetUsVect.add(250000); 495 checkTimestampsWithStartOffsets(startOffsetUsVect); 496 } 497 498 /** 499 * Test: make sure if audio/video muxing works good with different start offsets for 500 * audio and video, video later than audio, audio before zero. 501 */ 502 @Test testTimestampsStartOffsetNegativeAudioVideo()503 public void testTimestampsStartOffsetNegativeAudioVideo() throws Exception { 504 if (!MediaUtils.check(mAndroid11, "test needs Android 11")) return; 505 506 Vector<Integer> startOffsetUsVect = new Vector<Integer>(); 507 // Video starts at 50000us. 508 startOffsetUsVect.add(50000); 509 // Audio starts at -23220us, multiple of duration of one frame (1024/44100hz) 510 startOffsetUsVect.add(-23220); 511 checkTimestampsWithStartOffsets(startOffsetUsVect); 512 } 513 514 @Test testAdditionOfHdrStaticMetadata()515 public void testAdditionOfHdrStaticMetadata() throws Exception { 516 String outputFilePath = 517 File.createTempFile("MediaMuxerTest_testAdditionOfHdrStaticMetadata", ".mp4") 518 .getAbsolutePath(); 519 // HDR static metadata encoding the following information (format defined in CTA-861.3 - 520 // Static Metadata Descriptor, includes descriptor ID): 521 // Mastering display color primaries: 522 // R: x=0.677980 y=0.321980, G: x=0.245000 y=0.703000, B: x=0.137980 y=0.052000, 523 // White point: x=0.312680 y=0.328980 524 // Mastering display luminance min: 0.0000 cd/m2, max: 1000 cd/m2 525 // Maximum Content Light Level: 1100 cd/m2 526 // Maximum Frame-Average Light Level: 180 cd/m2 527 byte[] inputHdrStaticMetadata = 528 Util.getBytesFromHexString("006b84e33eda2f4e89f31a280a123d4140e80300004c04b400"); 529 Function<MediaFormat, MediaFormat> staticMetadataAdditionFunction = 530 (mediaFormat) -> { 531 if (!mediaFormat.getString(MediaFormat.KEY_MIME).startsWith("video/")) { 532 return mediaFormat; 533 } 534 MediaFormat result = new MediaFormat(mediaFormat); 535 result.setByteBuffer( 536 MediaFormat.KEY_HDR_STATIC_INFO, 537 ByteBuffer.wrap(inputHdrStaticMetadata)); 538 return result; 539 }; 540 try { 541 cloneMediaUsingMuxer( 542 /* srcMedia= */ "video_h264_main_b_frames.mp4", 543 outputFilePath, 544 /* expectedTrackCount= */ 2, 545 /* degrees= */ 0, 546 MediaMuxer.OutputFormat.MUXER_OUTPUT_MPEG_4, 547 /* signalEOS= */ false, 548 staticMetadataAdditionFunction); 549 assertArrayEquals( 550 inputHdrStaticMetadata, getVideoColorInfo(outputFilePath).hdrStaticInfo); 551 } finally { 552 new File(outputFilePath).delete(); 553 } 554 } 555 556 @Test testAdditionOfInvalidHdrStaticMetadataIsIgnored()557 public void testAdditionOfInvalidHdrStaticMetadataIsIgnored() throws Exception { 558 String outputFilePath = 559 File.createTempFile( 560 "MediaMuxerTest_testAdditionOfInvalidHdrStaticMetadataIsIgnored", 561 ".mp4") 562 .getAbsolutePath(); 563 Function<MediaFormat, MediaFormat> staticMetadataAdditionFunction = 564 (mediaFormat) -> { 565 MediaFormat result = new MediaFormat(mediaFormat); 566 // The input static info should be ignored, because its size is invalid (26 vs 567 // expected 25). 568 result.setByteBuffer( 569 MediaFormat.KEY_HDR_STATIC_INFO, ByteBuffer.allocateDirect(26)); 570 return result; 571 }; 572 try { 573 cloneMediaUsingMuxer( 574 /* srcMedia= */ "video_h264_main_b_frames.mp4", 575 outputFilePath, 576 /* expectedTrackCount= */ 2, 577 /* degrees= */ 0, 578 MediaMuxer.OutputFormat.MUXER_OUTPUT_MPEG_4, 579 /* signalEOS= */ false, 580 staticMetadataAdditionFunction); 581 assertNull(getVideoColorInfo(outputFilePath)); 582 } finally { 583 new File(outputFilePath).delete(); 584 } 585 } 586 587 /** 588 * Test: makes sure if audio/video muxing using MPEG4Writer works with B Frames 589 * when video and audio samples start after different times. 590 */ checkTimestampsAudioBVideoDiffStartOffsets(Vector<Integer> startOffsetUs)591 private void checkTimestampsAudioBVideoDiffStartOffsets(Vector<Integer> startOffsetUs) 592 throws Exception { 593 MPEG4CheckTimestampsAudioBVideoDiffStartOffsets(startOffsetUs); 594 // TODO: uncomment webm testing once bugs related to timestamps in webmwriter are fixed. 595 // WebMCheckTimestampsAudioBVideoDiffStartOffsets(startOffsetUsVect); 596 } 597 MPEG4CheckTimestampsAudioBVideoDiffStartOffsets(Vector<Integer> startOffsetUs)598 private void MPEG4CheckTimestampsAudioBVideoDiffStartOffsets(Vector<Integer> startOffsetUs) 599 throws Exception { 600 if (VERBOSE) { 601 Log.v(TAG, "MPEG4CheckTimestampsAudioBVideoDiffStartOffsets"); 602 } 603 final String source = "video_h264_main_b_frames.mp4"; 604 String outputFilePath = File.createTempFile( 605 "MediaMuxerTest_testTimestampsAudioBVideoDiffStartOffsets", ".mp4").getAbsolutePath(); 606 try { 607 cloneMediaWithSamplesDropAndStartOffsets(source, outputFilePath, 608 MediaMuxer.OutputFormat.MUXER_OUTPUT_MPEG_4, null, startOffsetUs); 609 verifyTSWithSamplesDropAndStartOffset( 610 source, true /* has B frames */, outputFilePath, null, startOffsetUs); 611 } finally { 612 new File(outputFilePath).delete(); 613 } 614 } 615 616 /* 617 * Check if timestamps are written consistently across all formats supported by MediaMuxer. 618 */ checkTimestampsWithStartOffsets(Vector<Integer> startOffsetUsVect)619 private void checkTimestampsWithStartOffsets(Vector<Integer> startOffsetUsVect) 620 throws Exception { 621 MPEG4CheckTimestampsWithStartOffsets(startOffsetUsVect); 622 // TODO: uncomment webm testing once bugs related to timestamps in webmwriter are fixed. 623 // WebMCheckTimestampsWithStartOffsets(startOffsetUsVect); 624 // TODO: need to add other formats, OGG, AAC, AMR 625 } 626 627 /** 628 * Make sure if audio/video muxing using MPEG4Writer works good with different start 629 * offsets for audio and video. 630 */ MPEG4CheckTimestampsWithStartOffsets(Vector<Integer> startOffsetUsVect)631 private void MPEG4CheckTimestampsWithStartOffsets(Vector<Integer> startOffsetUsVect) 632 throws Exception { 633 if (VERBOSE) { 634 Log.v(TAG, "MPEG4CheckTimestampsWithStartOffsets"); 635 } 636 final String source = "video_480x360_mp4_h264_500kbps_30fps_aac_stereo_128kbps_44100hz.mp4"; 637 String outputFilePath = 638 File.createTempFile("MediaMuxerTest_MPEG4CheckTimestampsWithStartOffsets", ".mp4") 639 .getAbsolutePath(); 640 try { 641 cloneMediaWithSamplesDropAndStartOffsets(source, outputFilePath, 642 MediaMuxer.OutputFormat.MUXER_OUTPUT_MPEG_4, null, startOffsetUsVect); 643 verifyTSWithSamplesDropAndStartOffset( 644 source, false /* no B frames */, outputFilePath, null, startOffsetUsVect); 645 } finally { 646 new File(outputFilePath).delete(); 647 } 648 } 649 650 /** 651 * Make sure if audio/video muxing using WebMWriter works good with different start 652 * offsets for audio and video. 653 */ WebMCheckTimestampsWithStartOffsets(Vector<Integer> startOffsetUsVect)654 private void WebMCheckTimestampsWithStartOffsets(Vector<Integer> startOffsetUsVect) 655 throws Exception { 656 if (VERBOSE) { 657 Log.v(TAG, "WebMCheckTimestampsWithStartOffsets"); 658 } 659 final String source = 660 "video_480x360_webm_vp9_333kbps_25fps_vorbis_stereo_128kbps_48000hz.webm"; 661 String outputFilePath = 662 File.createTempFile("MediaMuxerTest_WebMCheckTimestampsWithStartOffsets", ".webm") 663 .getAbsolutePath(); 664 try { 665 cloneMediaWithSamplesDropAndStartOffsets(source, outputFilePath, 666 MediaMuxer.OutputFormat.MUXER_OUTPUT_WEBM, null, startOffsetUsVect); 667 verifyTSWithSamplesDropAndStartOffset( 668 source, false /* no B frames */, outputFilePath, null, startOffsetUsVect); 669 } finally { 670 new File(outputFilePath).delete(); 671 } 672 } 673 674 /** Clones a media file and then compares against the source file to make sure they match. */ cloneAndVerify( final String srcMedia, String outputMediaFile, int expectedTrackCount, int degrees, int fmt, boolean signalEos)675 private void cloneAndVerify( 676 final String srcMedia, 677 String outputMediaFile, 678 int expectedTrackCount, 679 int degrees, 680 int fmt, 681 boolean signalEos) 682 throws IOException { 683 cloneAndVerify( 684 srcMedia, 685 outputMediaFile, 686 expectedTrackCount, 687 degrees, 688 fmt, 689 signalEos, 690 Function.identity()); 691 } 692 693 /** 694 * Clones a given file using MediaMuxer and verifies the output matches the input. 695 * 696 * <p>See {@link #cloneMediaUsingMuxer} for information about the parameters. 697 */ cloneAndVerify( final String srcMedia, String outputMediaFile, int expectedTrackCount, int degrees, int fmt, boolean signalEos, Function<MediaFormat, MediaFormat> muxerInputTrackFormatTransformer)698 private void cloneAndVerify( 699 final String srcMedia, 700 String outputMediaFile, 701 int expectedTrackCount, 702 int degrees, 703 int fmt, 704 boolean signalEos, 705 Function<MediaFormat, MediaFormat> muxerInputTrackFormatTransformer) 706 throws IOException { 707 try { 708 cloneMediaUsingMuxer( 709 srcMedia, 710 outputMediaFile, 711 expectedTrackCount, 712 degrees, 713 fmt, 714 signalEos, 715 muxerInputTrackFormatTransformer); 716 if (fmt == MediaMuxer.OutputFormat.MUXER_OUTPUT_MPEG_4 || 717 fmt == MediaMuxer.OutputFormat.MUXER_OUTPUT_3GPP) { 718 verifyAttributesMatch(srcMedia, outputMediaFile, degrees); 719 verifyLocationInFile(outputMediaFile); 720 } 721 // Verify timestamp of all samples. 722 verifyTSWithSamplesDropAndStartOffset( 723 srcMedia, false /* no B frames */,outputMediaFile, null, null); 724 } finally { 725 new File(outputMediaFile).delete(); 726 } 727 } 728 729 /** 730 * Clones a given file using MediaMuxer. 731 * 732 * @param srcMedia Input file path passed to extractor 733 * @param dstMediaPath Output file path passed to muxer 734 * @param expectedTrackCount Expected number of tracks in the input file 735 * @param degrees orientation hint in degrees 736 * @param fmt one of the values defined in {@link MediaMuxer.OutputFormat}. 737 * @param signalEos explicitly signal EOS by sending an empty packet after muxing. Only works 738 * fmt is MUXER_OUTPUT_MPEG_4. 739 * @param muxerInputTrackFormatTransformer Function applied on the MediaMuxer input formats. If 740 * the function returns null for a given MediaFormat, the corresponding track is discarded 741 * and not passed to MediaMuxer. 742 * @throws IOException if muxer failed to open output file for write. 743 */ cloneMediaUsingMuxer( final String srcMedia, String dstMediaPath, int expectedTrackCount, int degrees, int fmt, boolean signalEos, Function<MediaFormat, MediaFormat> muxerInputTrackFormatTransformer)744 private void cloneMediaUsingMuxer( 745 final String srcMedia, 746 String dstMediaPath, 747 int expectedTrackCount, 748 int degrees, 749 int fmt, 750 boolean signalEos, 751 Function<MediaFormat, MediaFormat> muxerInputTrackFormatTransformer) 752 throws IOException { 753 // Set up MediaExtractor to read from the source. 754 AssetFileDescriptor srcFd = getAssetFileDescriptorFor(srcMedia); 755 MediaExtractor extractor = new MediaExtractor(); 756 extractor.setDataSource(srcFd.getFileDescriptor(), srcFd.getStartOffset(), 757 srcFd.getLength()); 758 759 int trackCount = extractor.getTrackCount(); 760 assertEquals("wrong number of tracks", expectedTrackCount, trackCount); 761 762 // Set up MediaMuxer for the destination. 763 MediaMuxer muxer; 764 muxer = new MediaMuxer(dstMediaPath, fmt); 765 766 // Set up the tracks. 767 HashMap<Integer, Integer> indexMap = new HashMap<Integer, Integer>(trackCount); 768 for (int i = 0; i < trackCount; i++) { 769 MediaFormat format = extractor.getTrackFormat(i); 770 MediaFormat muxedFormat = muxerInputTrackFormatTransformer.apply(format); 771 if (muxedFormat != null) { 772 extractor.selectTrack(i); 773 int dstIndex = muxer.addTrack(muxedFormat); 774 indexMap.put(i, dstIndex); 775 } 776 } 777 778 // Copy the samples from MediaExtractor to MediaMuxer. 779 boolean sawEOS = false; 780 int bufferSize = MAX_SAMPLE_SIZE; 781 int frameCount = 0; 782 int offset = 100; 783 784 ByteBuffer dstBuf = ByteBuffer.allocate(bufferSize); 785 BufferInfo bufferInfo = new BufferInfo(); 786 787 if (degrees >= 0) { 788 muxer.setOrientationHint(degrees); 789 } 790 791 if (fmt == MediaMuxer.OutputFormat.MUXER_OUTPUT_MPEG_4 || 792 fmt == MediaMuxer.OutputFormat.MUXER_OUTPUT_3GPP) { 793 // Test setLocation out of bound cases 794 try { 795 muxer.setLocation(BAD_LATITUDE, LONGITUDE); 796 fail("setLocation succeeded with bad argument: [" + BAD_LATITUDE + "," + LONGITUDE 797 + "]"); 798 } catch (IllegalArgumentException e) { 799 // Expected 800 } 801 try { 802 muxer.setLocation(LATITUDE, BAD_LONGITUDE); 803 fail("setLocation succeeded with bad argument: [" + LATITUDE + "," + BAD_LONGITUDE 804 + "]"); 805 } catch (IllegalArgumentException e) { 806 // Expected 807 } 808 809 muxer.setLocation(LATITUDE, LONGITUDE); 810 } 811 812 muxer.start(); 813 while (!sawEOS) { 814 bufferInfo.offset = offset; 815 bufferInfo.size = extractor.readSampleData(dstBuf, offset); 816 817 if (bufferInfo.size < 0) { 818 if (VERBOSE) { 819 Log.d(TAG, "saw input EOS."); 820 } 821 sawEOS = true; 822 bufferInfo.size = 0; 823 } else { 824 bufferInfo.presentationTimeUs = extractor.getSampleTime(); 825 bufferInfo.flags = extractor.getSampleFlags(); 826 int trackIndex = extractor.getSampleTrackIndex(); 827 828 muxer.writeSampleData(indexMap.get(trackIndex), dstBuf, 829 bufferInfo); 830 extractor.advance(); 831 832 frameCount++; 833 if (VERBOSE) { 834 Log.d(TAG, "Frame (" + frameCount + ") " + 835 "PresentationTimeUs:" + bufferInfo.presentationTimeUs + 836 " Flags:" + bufferInfo.flags + 837 " TrackIndex:" + trackIndex + 838 " Size(KB) " + bufferInfo.size / 1024); 839 } 840 } 841 } 842 843 // By explicitly passing in an EOS buffer with the correct timestamp, we can control the 844 // duration of the last sample so that the duration of the produced file matches the input 845 // file. This is needed for input files whose last sample's duration doesn't match the 846 // preceding one, because the MP4 MediaMuxer uses the duration of the sample second to last 847 // for the last sample by default. 848 if (signalEos) { 849 assertEquals( 850 "signalEos is only supported for MP4", 851 fmt, 852 MediaMuxer.OutputFormat.MUXER_OUTPUT_MPEG_4); 853 // Signal EOS with the correct duration for the last frame. 854 bufferInfo.size = 0; 855 bufferInfo.flags = MediaCodec.BUFFER_FLAG_END_OF_STREAM; 856 for (int i = 0; i < trackCount; i++) { 857 MediaFormat format = extractor.getTrackFormat(i); 858 bufferInfo.presentationTimeUs = format.getLong(MediaFormat.KEY_DURATION, 0); 859 if (bufferInfo.presentationTimeUs != 0) { 860 muxer.writeSampleData(indexMap.get(i), dstBuf, bufferInfo); 861 } 862 } 863 } 864 865 muxer.stop(); 866 muxer.release(); 867 extractor.release(); 868 srcFd.close(); 869 return; 870 } 871 872 /** 873 * Compares some attributes using MediaMetadataRetriever to make sure the 874 * cloned media file matches the source file. 875 */ verifyAttributesMatch(final String srcMedia, String testMediaPath, int degrees)876 private void verifyAttributesMatch(final String srcMedia, String testMediaPath, 877 int degrees) throws IOException { 878 AssetFileDescriptor testFd = getAssetFileDescriptorFor(srcMedia); 879 880 MediaMetadataRetriever retrieverSrc = new MediaMetadataRetriever(); 881 retrieverSrc.setDataSource(testFd.getFileDescriptor(), 882 testFd.getStartOffset(), testFd.getLength()); 883 884 MediaMetadataRetriever retrieverTest = new MediaMetadataRetriever(); 885 retrieverTest.setDataSource(testMediaPath); 886 887 String testDegrees = retrieverTest.extractMetadata( 888 MediaMetadataRetriever.METADATA_KEY_VIDEO_ROTATION); 889 if (testDegrees != null) { 890 assertEquals("Different degrees", degrees, 891 Integer.parseInt(testDegrees)); 892 } 893 894 String heightSrc = retrieverSrc.extractMetadata( 895 MediaMetadataRetriever.METADATA_KEY_VIDEO_HEIGHT); 896 String heightTest = retrieverTest.extractMetadata( 897 MediaMetadataRetriever.METADATA_KEY_VIDEO_HEIGHT); 898 assertEquals("Different height", heightSrc, 899 heightTest); 900 901 String widthSrc = retrieverSrc.extractMetadata( 902 MediaMetadataRetriever.METADATA_KEY_VIDEO_WIDTH); 903 String widthTest = retrieverTest.extractMetadata( 904 MediaMetadataRetriever.METADATA_KEY_VIDEO_WIDTH); 905 assertEquals("Different width", widthSrc, 906 widthTest); 907 908 //TODO: need to check each individual track's duration also. 909 String durationSrc = retrieverSrc.extractMetadata( 910 MediaMetadataRetriever.METADATA_KEY_DURATION); 911 String durationTest = retrieverTest.extractMetadata( 912 MediaMetadataRetriever.METADATA_KEY_DURATION); 913 assertEquals("Different duration", durationSrc, 914 durationTest); 915 916 retrieverSrc.release(); 917 retrieverTest.release(); 918 testFd.close(); 919 } 920 verifyLocationInFile(String fileName)921 private void verifyLocationInFile(String fileName) throws IOException { 922 MediaMetadataRetriever retriever = new MediaMetadataRetriever(); 923 retriever.setDataSource(fileName); 924 String location = retriever.extractMetadata(MediaMetadataRetriever.METADATA_KEY_LOCATION); 925 assertNotNull("No location information found in file " + fileName, location); 926 927 928 // parsing String location and recover the location information in floats 929 // Make sure the tolerance is very small - due to rounding errors. 930 931 // Trim the trailing slash, if any. 932 int lastIndex = location.lastIndexOf('/'); 933 if (lastIndex != -1) { 934 location = location.substring(0, lastIndex); 935 } 936 937 // Get the position of the -/+ sign in location String, which indicates 938 // the beginning of the longitude. 939 int minusIndex = location.lastIndexOf('-'); 940 int plusIndex = location.lastIndexOf('+'); 941 942 assertTrue("+ or - is not found or found only at the beginning [" + location + "]", 943 (minusIndex > 0 || plusIndex > 0)); 944 int index = Math.max(minusIndex, plusIndex); 945 946 float latitude = Float.parseFloat(location.substring(0, index)); 947 float longitude = Float.parseFloat(location.substring(index)); 948 assertTrue("Incorrect latitude: " + latitude + " [" + location + "]", 949 Math.abs(latitude - LATITUDE) <= TOLERANCE); 950 assertTrue("Incorrect longitude: " + longitude + " [" + location + "]", 951 Math.abs(longitude - LONGITUDE) <= TOLERANCE); 952 retriever.release(); 953 } 954 955 /** 956 * Uses 2 MediaExtractor, seeking to the same position, reads the sample and 957 * makes sure the samples match. 958 */ verifySamplesMatch(final String srcMedia, String testMediaPath, int seekToUs, long offsetTimeUs)959 private void verifySamplesMatch(final String srcMedia, String testMediaPath, int seekToUs, 960 long offsetTimeUs) throws IOException { 961 AssetFileDescriptor testFd = getAssetFileDescriptorFor(srcMedia); 962 MediaExtractor extractorSrc = new MediaExtractor(); 963 extractorSrc.setDataSource(testFd.getFileDescriptor(), 964 testFd.getStartOffset(), testFd.getLength()); 965 int trackCount = extractorSrc.getTrackCount(); 966 final int videoTrackIndex = 0; 967 968 MediaExtractor extractorTest = new MediaExtractor(); 969 extractorTest.setDataSource(testMediaPath); 970 971 assertEquals("wrong number of tracks", trackCount, 972 extractorTest.getTrackCount()); 973 974 // Make sure the format is the same and select them 975 for (int i = 0; i < trackCount; i++) { 976 MediaFormat formatSrc = extractorSrc.getTrackFormat(i); 977 MediaFormat formatTest = extractorTest.getTrackFormat(i); 978 979 String mimeIn = formatSrc.getString(MediaFormat.KEY_MIME); 980 String mimeOut = formatTest.getString(MediaFormat.KEY_MIME); 981 if (!(mimeIn.equals(mimeOut))) { 982 fail("format didn't match on track No." + i + 983 formatSrc.toString() + "\n" + formatTest.toString()); 984 } 985 extractorSrc.selectTrack(videoTrackIndex); 986 extractorTest.selectTrack(videoTrackIndex); 987 988 // Pick a time and try to compare the frame. 989 extractorSrc.seekTo(seekToUs, MediaExtractor.SEEK_TO_CLOSEST_SYNC); 990 extractorTest.seekTo(seekToUs + offsetTimeUs, MediaExtractor.SEEK_TO_CLOSEST_SYNC); 991 992 int bufferSize = MAX_SAMPLE_SIZE; 993 ByteBuffer byteBufSrc = ByteBuffer.allocate(bufferSize); 994 ByteBuffer byteBufTest = ByteBuffer.allocate(bufferSize); 995 996 int srcBufSize = extractorSrc.readSampleData(byteBufSrc, 0); 997 int testBufSize = extractorTest.readSampleData(byteBufTest, 0); 998 999 if (!(byteBufSrc.equals(byteBufTest))) { 1000 if (VERBOSE) { 1001 Log.d(TAG, 1002 "srcTrackIndex:" + extractorSrc.getSampleTrackIndex() 1003 + " testTrackIndex:" + extractorTest.getSampleTrackIndex()); 1004 Log.d(TAG, 1005 "srcTSus:" + extractorSrc.getSampleTime() 1006 + " testTSus:" + extractorTest.getSampleTime()); 1007 Log.d(TAG, "srcBufSize:" + srcBufSize + "testBufSize:" + testBufSize); 1008 } 1009 fail("byteBuffer didn't match"); 1010 } 1011 extractorSrc.unselectTrack(i); 1012 extractorTest.unselectTrack(i); 1013 } 1014 extractorSrc.release(); 1015 extractorTest.release(); 1016 testFd.close(); 1017 } 1018 1019 /** 1020 * Using MediaMuxer and MediaExtractor to mux a media file from another file while skipping 1021 * some video frames as in the issues b/63590381 and b/64949961. 1022 */ simulateVideoFramesDropIssuesAndMux(final String srcMedia, String dstMediaPath, int expectedTrackCount, int fmt)1023 private void simulateVideoFramesDropIssuesAndMux(final String srcMedia, String dstMediaPath, 1024 int expectedTrackCount, int fmt) throws IOException { 1025 // Set up MediaExtractor to read from the source. 1026 AssetFileDescriptor srcFd = getAssetFileDescriptorFor(srcMedia); 1027 MediaExtractor extractor = new MediaExtractor(); 1028 extractor.setDataSource(srcFd.getFileDescriptor(), srcFd.getStartOffset(), 1029 srcFd.getLength()); 1030 1031 int trackCount = extractor.getTrackCount(); 1032 assertEquals("wrong number of tracks", expectedTrackCount, trackCount); 1033 1034 // Set up MediaMuxer for the destination. 1035 MediaMuxer muxer; 1036 muxer = new MediaMuxer(dstMediaPath, fmt); 1037 1038 // Set up the tracks. 1039 HashMap<Integer, Integer> indexMap = new HashMap<Integer, Integer>(trackCount); 1040 1041 for (int i = 0; i < trackCount; i++) { 1042 extractor.selectTrack(i); 1043 MediaFormat format = extractor.getTrackFormat(i); 1044 int dstIndex = muxer.addTrack(format); 1045 indexMap.put(i, dstIndex); 1046 } 1047 1048 // Copy the samples from MediaExtractor to MediaMuxer. 1049 boolean sawEOS = false; 1050 int bufferSize = MAX_SAMPLE_SIZE; 1051 int sampleCount = 0; 1052 int offset = 0; 1053 int videoSampleCount = 0; 1054 // Counting frame index values starting from 1 1055 final int muxAllTypeVideoFramesUntilIndex = 136; // I/P/B frames passed as it is until this 1056 final int muxAllTypeVideoFramesFromIndex = 171; // I/P/B frames passed as it is from this 1057 final int pFrameBeforeARandomBframeIndex = 137; 1058 final int bFrameAfterPFrameIndex = pFrameBeforeARandomBframeIndex+1; 1059 1060 ByteBuffer dstBuf = ByteBuffer.allocate(bufferSize); 1061 BufferInfo bufferInfo = new BufferInfo(); 1062 1063 muxer.start(); 1064 while (!sawEOS) { 1065 bufferInfo.offset = 0; 1066 bufferInfo.size = extractor.readSampleData(dstBuf, offset); 1067 if (bufferInfo.size < 0) { 1068 if (VERBOSE) { 1069 Log.d(TAG, "saw input EOS."); 1070 } 1071 sawEOS = true; 1072 bufferInfo.size = 0; 1073 } else { 1074 bufferInfo.presentationTimeUs = extractor.getSampleTime(); 1075 bufferInfo.flags = extractor.getSampleFlags(); 1076 int trackIndex = extractor.getSampleTrackIndex(); 1077 // Video track at index 0, skip some video frames while muxing. 1078 if (trackIndex == 0) { 1079 ++videoSampleCount; 1080 if (VERBOSE) { 1081 Log.v(TAG, "videoSampleCount : " + videoSampleCount); 1082 } 1083 if (videoSampleCount <= muxAllTypeVideoFramesUntilIndex 1084 || videoSampleCount == bFrameAfterPFrameIndex) { 1085 // Write frame as it is. 1086 muxer.writeSampleData(indexMap.get(trackIndex), dstBuf, bufferInfo); 1087 } else if (videoSampleCount == pFrameBeforeARandomBframeIndex 1088 || videoSampleCount >= muxAllTypeVideoFramesFromIndex) { 1089 // Adjust time stamp for this P frame to a few frames later, say ~5seconds 1090 bufferInfo.presentationTimeUs += OFFSET_TIME_US; 1091 muxer.writeSampleData(indexMap.get(trackIndex), dstBuf, bufferInfo); 1092 } else { 1093 // Skip frames after bFrameAfterPFrameIndex 1094 // and before muxAllTypeVideoFramesFromIndex. 1095 if (VERBOSE) { 1096 Log.i(TAG, "skipped this frame"); 1097 } 1098 } 1099 } else { 1100 // write audio data as it is continuously 1101 muxer.writeSampleData(indexMap.get(trackIndex), dstBuf, bufferInfo); 1102 } 1103 extractor.advance(); 1104 sampleCount++; 1105 if (VERBOSE) { 1106 Log.d(TAG, "Frame (" + sampleCount + ") " + 1107 "PresentationTimeUs:" + bufferInfo.presentationTimeUs + 1108 " Flags:" + bufferInfo.flags + 1109 " TrackIndex:" + trackIndex + 1110 " Size(bytes) " + bufferInfo.size ); 1111 } 1112 } 1113 } 1114 1115 muxer.stop(); 1116 muxer.release(); 1117 extractor.release(); 1118 srcFd.close(); 1119 1120 return; 1121 } 1122 1123 /** 1124 * Uses two MediaExtractor's and checks whether timestamps of first few and another few 1125 * from last sync frame matches 1126 */ verifyAFewSamplesTimestamp(final String srcMedia, String testMediaPath)1127 private void verifyAFewSamplesTimestamp(final String srcMedia, String testMediaPath) 1128 throws IOException { 1129 final int numFramesTSCheck = 10; // Num frames to be checked for its timestamps 1130 1131 AssetFileDescriptor srcFd = getAssetFileDescriptorFor(srcMedia); 1132 MediaExtractor extractorSrc = new MediaExtractor(); 1133 extractorSrc.setDataSource(srcFd.getFileDescriptor(), 1134 srcFd.getStartOffset(), srcFd.getLength()); 1135 MediaExtractor extractorTest = new MediaExtractor(); 1136 extractorTest.setDataSource(testMediaPath); 1137 1138 int trackCount = extractorSrc.getTrackCount(); 1139 for (int i = 0; i < trackCount; i++) { 1140 MediaFormat format = extractorSrc.getTrackFormat(i); 1141 extractorSrc.selectTrack(i); 1142 extractorTest.selectTrack(i); 1143 if (format.getString(MediaFormat.KEY_MIME).startsWith("video/")) { 1144 // Check time stamps for numFramesTSCheck frames from 33333us. 1145 checkNumFramesTimestamp(33333, 0, numFramesTSCheck, extractorSrc, extractorTest); 1146 // Check time stamps for numFramesTSCheck frames from 9333333 - 1147 // sync frame after framedrops at index 172 of video track. 1148 checkNumFramesTimestamp( 1149 9333333, OFFSET_TIME_US, numFramesTSCheck, extractorSrc, extractorTest); 1150 } else if (format.getString(MediaFormat.KEY_MIME).startsWith("audio/")) { 1151 // Check timestamps for all audio frames. Test file has 427 audio frames. 1152 checkNumFramesTimestamp(0, 0, 427, extractorSrc, extractorTest); 1153 } 1154 extractorSrc.unselectTrack(i); 1155 extractorTest.unselectTrack(i); 1156 } 1157 1158 extractorSrc.release(); 1159 extractorTest.release(); 1160 srcFd.close(); 1161 } 1162 checkNumFramesTimestamp(long seekTimeUs, long offsetTimeUs, int numFrames, MediaExtractor extractorSrc, MediaExtractor extractorTest)1163 private void checkNumFramesTimestamp(long seekTimeUs, long offsetTimeUs, int numFrames, 1164 MediaExtractor extractorSrc, MediaExtractor extractorTest) { 1165 long srcSampleTimeUs = -1; 1166 long testSampleTimeUs = -1; 1167 extractorSrc.seekTo(seekTimeUs, MediaExtractor.SEEK_TO_CLOSEST_SYNC); 1168 extractorTest.seekTo(seekTimeUs + offsetTimeUs, MediaExtractor.SEEK_TO_CLOSEST_SYNC); 1169 while (numFrames-- > 0 ) { 1170 srcSampleTimeUs = extractorSrc.getSampleTime(); 1171 testSampleTimeUs = extractorTest.getSampleTime(); 1172 if (srcSampleTimeUs == -1 || testSampleTimeUs == -1) { 1173 fail("either of tracks reached end of stream"); 1174 } 1175 if ((srcSampleTimeUs + offsetTimeUs) != testSampleTimeUs) { 1176 if (VERBOSE) { 1177 Log.d(TAG, "srcTrackIndex:" + extractorSrc.getSampleTrackIndex() + 1178 " testTrackIndex:" + extractorTest.getSampleTrackIndex()); 1179 Log.d(TAG, "srcTSus:" + srcSampleTimeUs + " testTSus:" + testSampleTimeUs); 1180 } 1181 fail("timestamps didn't match"); 1182 } 1183 extractorSrc.advance(); 1184 extractorTest.advance(); 1185 } 1186 } 1187 1188 /** 1189 * Using MediaMuxer and MediaExtractor to mux a media file from another file while skipping 1190 * 0 or more video frames and desired start offsets for each track. 1191 * startOffsetUsVect : order of tracks is the same as in the input file 1192 */ cloneMediaWithSamplesDropAndStartOffsets(final String srcMedia, String dstMediaPath, int fmt, HashSet<Integer> samplesDropSet, Vector<Integer> startOffsetUsVect)1193 private void cloneMediaWithSamplesDropAndStartOffsets(final String srcMedia, String dstMediaPath, 1194 int fmt, HashSet<Integer> samplesDropSet, Vector<Integer> startOffsetUsVect) 1195 throws IOException { 1196 // Set up MediaExtractor to read from the source. 1197 AssetFileDescriptor srcFd = getAssetFileDescriptorFor(srcMedia); 1198 MediaExtractor extractor = new MediaExtractor(); 1199 extractor.setDataSource(srcFd.getFileDescriptor(), srcFd.getStartOffset(), 1200 srcFd.getLength()); 1201 1202 int trackCount = extractor.getTrackCount(); 1203 1204 // Set up MediaMuxer for the destination. 1205 MediaMuxer muxer; 1206 muxer = new MediaMuxer(dstMediaPath, fmt); 1207 1208 // Set up the tracks. 1209 HashMap<Integer, Integer> indexMap = new HashMap<Integer, Integer>(trackCount); 1210 1211 int videoTrackIndex = 100; 1212 int videoStartOffsetUs = 0; 1213 int audioTrackIndex = 100; 1214 int audioStartOffsetUs = 0; 1215 for (int i = 0; i < trackCount; i++) { 1216 extractor.selectTrack(i); 1217 MediaFormat format = extractor.getTrackFormat(i); 1218 int dstIndex = muxer.addTrack(format); 1219 indexMap.put(i, dstIndex); 1220 if (format.getString(MediaFormat.KEY_MIME).startsWith("video/")) { 1221 videoTrackIndex = i; 1222 // Make sure there's an entry for video track. 1223 if (startOffsetUsVect != null && (videoTrackIndex < startOffsetUsVect.size())) { 1224 videoStartOffsetUs = startOffsetUsVect.get(videoTrackIndex); 1225 } 1226 } 1227 if (format.getString(MediaFormat.KEY_MIME).startsWith("audio/")) { 1228 audioTrackIndex = i; 1229 // Make sure there's an entry for audio track. 1230 if (startOffsetUsVect != null && (audioTrackIndex < startOffsetUsVect.size())) { 1231 audioStartOffsetUs = startOffsetUsVect.get(audioTrackIndex); 1232 } 1233 } 1234 } 1235 1236 // Copy the samples from MediaExtractor to MediaMuxer. 1237 boolean sawEOS = false; 1238 int bufferSize = MAX_SAMPLE_SIZE; 1239 int sampleCount = 0; 1240 int offset = 0; 1241 int videoSampleCount = 0; 1242 1243 ByteBuffer dstBuf = ByteBuffer.allocate(bufferSize); 1244 BufferInfo bufferInfo = new BufferInfo(); 1245 1246 muxer.start(); 1247 while (!sawEOS) { 1248 bufferInfo.offset = 0; 1249 bufferInfo.size = extractor.readSampleData(dstBuf, offset); 1250 if (bufferInfo.size < 0) { 1251 if (VERBOSE) { 1252 Log.d(TAG, "saw input EOS."); 1253 } 1254 sawEOS = true; 1255 bufferInfo.size = 0; 1256 } else { 1257 bufferInfo.presentationTimeUs = extractor.getSampleTime(); 1258 bufferInfo.flags = extractor.getSampleFlags(); 1259 int trackIndex = extractor.getSampleTrackIndex(); 1260 if (VERBOSE) { 1261 Log.v(TAG, "TrackIndex:" + trackIndex + " PresentationTimeUs:" + 1262 bufferInfo.presentationTimeUs + " Flags:" + bufferInfo.flags + 1263 " Size(bytes)" + bufferInfo.size); 1264 } 1265 if (trackIndex == videoTrackIndex) { 1266 ++videoSampleCount; 1267 if (VERBOSE) { 1268 Log.v(TAG, "videoSampleCount : " + videoSampleCount); 1269 } 1270 if (samplesDropSet == null || (!samplesDropSet.contains(videoSampleCount))) { 1271 // Write video frame with start offset adjustment. 1272 bufferInfo.presentationTimeUs += videoStartOffsetUs; 1273 muxer.writeSampleData(indexMap.get(trackIndex), dstBuf, bufferInfo); 1274 } 1275 else { 1276 if (VERBOSE) { 1277 Log.v(TAG, "skipped this frame"); 1278 } 1279 } 1280 } else { 1281 // write audio sample with start offset adjustment. 1282 bufferInfo.presentationTimeUs += audioStartOffsetUs; 1283 muxer.writeSampleData(indexMap.get(trackIndex), dstBuf, bufferInfo); 1284 } 1285 extractor.advance(); 1286 sampleCount++; 1287 if (VERBOSE) { 1288 Log.i(TAG, "Sample (" + sampleCount + ")" + 1289 " TrackIndex:" + trackIndex + 1290 " PresentationTimeUs:" + bufferInfo.presentationTimeUs + 1291 " Flags:" + bufferInfo.flags + 1292 " Size(bytes)" + bufferInfo.size ); 1293 } 1294 } 1295 } 1296 1297 muxer.stop(); 1298 muxer.release(); 1299 extractor.release(); 1300 srcFd.close(); 1301 1302 return; 1303 } 1304 1305 /* 1306 * Uses MediaExtractors and checks whether timestamps of all samples except in samplesDropSet 1307 * and with start offsets adjustments for each track match. 1308 */ verifyTSWithSamplesDropAndStartOffset(final String srcMedia, boolean hasBframes, String testMediaPath, HashSet<Integer> samplesDropSet, Vector<Integer> startOffsetUsVect)1309 private void verifyTSWithSamplesDropAndStartOffset(final String srcMedia, boolean hasBframes, 1310 String testMediaPath, HashSet<Integer> samplesDropSet, 1311 Vector<Integer> startOffsetUsVect) throws IOException { 1312 AssetFileDescriptor srcFd = getAssetFileDescriptorFor(srcMedia); 1313 MediaExtractor extractorSrc = new MediaExtractor(); 1314 extractorSrc.setDataSource(srcFd.getFileDescriptor(), 1315 srcFd.getStartOffset(), srcFd.getLength()); 1316 MediaExtractor extractorTest = new MediaExtractor(); 1317 extractorTest.setDataSource(testMediaPath); 1318 1319 int videoTrackIndex = -1; 1320 int videoStartOffsetUs = 0; 1321 int minStartOffsetUs = Integer.MAX_VALUE; 1322 int trackCount = extractorSrc.getTrackCount(); 1323 1324 /* 1325 * When all track's start offsets are positive, MPEG4Writer makes the start timestamp of the 1326 * earliest track as zero and adjusts all other tracks' timestamp accordingly. 1327 */ 1328 // TODO: need to confirm if the above logic holds good with all others writers we support. 1329 if (startOffsetUsVect != null) { 1330 for (int startOffsetUs : startOffsetUsVect) { 1331 minStartOffsetUs = Math.min(startOffsetUs, minStartOffsetUs); 1332 } 1333 } else { 1334 minStartOffsetUs = 0; 1335 } 1336 1337 if (minStartOffsetUs < 0) { 1338 /* 1339 * Atleast one of the start offsets were negative. We have some test cases with negative 1340 * offsets for audio, minStartOffset has to be reset as Writer won't adjust any of the 1341 * track's timestamps. 1342 */ 1343 minStartOffsetUs = 0; 1344 } 1345 1346 // Select video track. 1347 for (int i = 0; i < trackCount; i++) { 1348 MediaFormat format = extractorSrc.getTrackFormat(i); 1349 if (format.getString(MediaFormat.KEY_MIME).startsWith("video/")) { 1350 videoTrackIndex = i; 1351 if (startOffsetUsVect != null && videoTrackIndex < startOffsetUsVect.size()) { 1352 videoStartOffsetUs = startOffsetUsVect.get(videoTrackIndex); 1353 } 1354 extractorSrc.selectTrack(videoTrackIndex); 1355 extractorTest.selectTrack(videoTrackIndex); 1356 checkVideoSamplesTimeStamps(extractorSrc, hasBframes, extractorTest, samplesDropSet, 1357 videoStartOffsetUs - minStartOffsetUs); 1358 extractorSrc.unselectTrack(videoTrackIndex); 1359 extractorTest.unselectTrack(videoTrackIndex); 1360 } 1361 } 1362 1363 int audioTrackIndex = -1; 1364 int audioSampleCount = 0; 1365 int audioStartOffsetUs = 0; 1366 //select audio track 1367 for (int i = 0; i < trackCount; i++) { 1368 MediaFormat format = extractorSrc.getTrackFormat(i); 1369 if (format.getString(MediaFormat.KEY_MIME).startsWith("audio/")) { 1370 audioTrackIndex = i; 1371 if (startOffsetUsVect != null && audioTrackIndex < startOffsetUsVect.size()) { 1372 audioStartOffsetUs = startOffsetUsVect.get(audioTrackIndex); 1373 } 1374 extractorSrc.selectTrack(audioTrackIndex); 1375 extractorTest.selectTrack(audioTrackIndex); 1376 checkAudioSamplesTimestamps( 1377 extractorSrc, extractorTest, audioStartOffsetUs - minStartOffsetUs); 1378 } 1379 } 1380 1381 extractorSrc.release(); 1382 extractorTest.release(); 1383 srcFd.close(); 1384 } 1385 1386 // Check timestamps of all video samples. checkVideoSamplesTimeStamps(MediaExtractor extractorSrc, boolean hasBFrames, MediaExtractor extractorTest, HashSet<Integer> samplesDropSet, int videoStartOffsetUs)1387 private void checkVideoSamplesTimeStamps(MediaExtractor extractorSrc, boolean hasBFrames, 1388 MediaExtractor extractorTest, HashSet<Integer> samplesDropSet, int videoStartOffsetUs) { 1389 long srcSampleTimeUs = -1; 1390 long testSampleTimeUs = -1; 1391 boolean srcAdvance = false; 1392 boolean testAdvance = false; 1393 int videoSampleCount = 0; 1394 1395 extractorSrc.seekTo(0, MediaExtractor.SEEK_TO_CLOSEST_SYNC); 1396 extractorTest.seekTo(0, MediaExtractor.SEEK_TO_CLOSEST_SYNC); 1397 1398 if (VERBOSE) { 1399 Log.v(TAG, "srcTrackIndex:" + extractorSrc.getSampleTrackIndex() + 1400 " testTrackIndex:" + extractorTest.getSampleTrackIndex()); 1401 Log.v(TAG, "videoStartOffsetUs:" + videoStartOffsetUs); 1402 } 1403 1404 do { 1405 ++videoSampleCount; 1406 srcSampleTimeUs = extractorSrc.getSampleTime(); 1407 testSampleTimeUs = extractorTest.getSampleTime(); 1408 if (VERBOSE) { 1409 Log.v(TAG, "videoSampleCount:" + videoSampleCount); 1410 Log.i(TAG, "srcTSus:" + srcSampleTimeUs + " testTSus:" + testSampleTimeUs); 1411 } 1412 if (samplesDropSet == null || !samplesDropSet.contains(videoSampleCount)) { 1413 if (srcSampleTimeUs == -1 || testSampleTimeUs == -1) { 1414 if (VERBOSE) { 1415 Log.v(TAG, "srcUs:" + srcSampleTimeUs + "testUs:" + testSampleTimeUs); 1416 } 1417 fail("either source or test track reached end of stream"); 1418 } 1419 /* Stts values within 0.1ms(100us) difference are fudged to save too many 1420 * stts entries in MPEG4Writer. 1421 */ 1422 else if (Math.abs(srcSampleTimeUs + videoStartOffsetUs - testSampleTimeUs) > 100) { 1423 if (VERBOSE) { 1424 Log.v(TAG, "Fail:video timestamps didn't match"); 1425 Log.v(TAG, 1426 "srcTrackIndex:" + extractorSrc.getSampleTrackIndex() 1427 + " testTrackIndex:" + extractorTest.getSampleTrackIndex()); 1428 Log.v(TAG, "srcTSus:" + srcSampleTimeUs + " testTSus:" + testSampleTimeUs); 1429 } 1430 fail("video timestamps didn't match"); 1431 } 1432 testAdvance = extractorTest.advance(); 1433 } 1434 srcAdvance = extractorSrc.advance(); 1435 } while (srcAdvance && testAdvance); 1436 if (srcAdvance != testAdvance) { 1437 if (VERBOSE) { 1438 Log.v(TAG, "videoSampleCount:" + videoSampleCount); 1439 } 1440 fail("either video track has not reached its last sample"); 1441 } 1442 } 1443 checkAudioSamplesTimestamps(MediaExtractor extractorSrc, MediaExtractor extractorTest, int audioStartOffsetUs)1444 private void checkAudioSamplesTimestamps(MediaExtractor extractorSrc, 1445 MediaExtractor extractorTest, int audioStartOffsetUs) { 1446 long srcSampleTimeUs = -1; 1447 long testSampleTimeUs = -1; 1448 boolean srcAdvance = false; 1449 boolean testAdvance = false; 1450 int audioSampleCount = 0; 1451 1452 extractorSrc.seekTo(0, MediaExtractor.SEEK_TO_CLOSEST_SYNC); 1453 if (audioStartOffsetUs >= 0) { 1454 // Added edit list support for maintaining only the diff in start offsets of tracks. 1455 // TODO: Remove this once we add support for preserving absolute timestamps as well. 1456 extractorTest.seekTo(0, MediaExtractor.SEEK_TO_CLOSEST_SYNC); 1457 } else { 1458 extractorTest.seekTo(audioStartOffsetUs, MediaExtractor.SEEK_TO_CLOSEST_SYNC); 1459 } 1460 if (VERBOSE) { 1461 Log.v(TAG, "audioStartOffsetUs:" + audioStartOffsetUs); 1462 Log.v(TAG, "srcTrackIndex:" + extractorSrc.getSampleTrackIndex() + 1463 " testTrackIndex:" + extractorTest.getSampleTrackIndex()); 1464 } 1465 // Check timestamps of all audio samples. 1466 do { 1467 ++audioSampleCount; 1468 srcSampleTimeUs = extractorSrc.getSampleTime(); 1469 testSampleTimeUs = extractorTest.getSampleTime(); 1470 if (VERBOSE) { 1471 Log.v(TAG, "audioSampleCount:" + audioSampleCount); 1472 Log.v(TAG, "srcTSus:" + srcSampleTimeUs + " testTSus:" + testSampleTimeUs); 1473 } 1474 1475 if (srcSampleTimeUs == -1 || testSampleTimeUs == -1) { 1476 if (VERBOSE) { 1477 Log.v(TAG, "srcTSus:" + srcSampleTimeUs + " testTSus:" + testSampleTimeUs); 1478 } 1479 fail("either source or test track reached end of stream"); 1480 } 1481 // > 1us to ignore any round off errors. 1482 else if (Math.abs(srcSampleTimeUs + audioStartOffsetUs - testSampleTimeUs) > 1) { 1483 fail("audio timestamps didn't match"); 1484 } 1485 testAdvance = extractorTest.advance(); 1486 srcAdvance = extractorSrc.advance(); 1487 } while (srcAdvance && testAdvance); 1488 if (srcAdvance != testAdvance) { 1489 fail("either audio track has not reached its last sample"); 1490 } 1491 } 1492 1493 /** Returns the static HDR metadata in the given {@code file}, or null if not present. */ getVideoColorInfo(String path)1494 private ColorInfo getVideoColorInfo(String path) 1495 throws ExecutionException, InterruptedException { 1496 TrackGroupArray trackGroupArray = 1497 MetadataRetriever.retrieveMetadata(getContext(), MediaItem.fromUri(path)).get(); 1498 for (int i = 0; i < trackGroupArray.length; i++) { 1499 Format format = trackGroupArray.get(i).getFormat(0); 1500 if (format.sampleMimeType.startsWith("video/")) { 1501 return format.colorInfo; 1502 } 1503 } 1504 return null; 1505 } 1506 } 1507