xref: /aosp_15_r20/cts/tests/tests/media/codec/src/android/media/codec/cts/EncodeDecodeTest.java (revision b7c941bb3fa97aba169d73cee0bed2de8ac964bf)
1 /*
2  * Copyright (C) 2013 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 package android.media.codec.cts;
18 
19 import static org.junit.Assert.assertEquals;
20 import static org.junit.Assert.assertTrue;
21 import static org.junit.Assert.fail;
22 import static org.junit.Assume.assumeTrue;
23 
24 import android.graphics.ImageFormat;
25 import android.media.Image;
26 import android.media.MediaCodec;
27 import android.media.MediaCodecInfo;
28 import android.media.MediaCodecList;
29 import android.media.MediaFormat;
30 import android.media.cts.InputSurface;
31 import android.media.cts.InputSurfaceInterface;
32 import android.media.cts.MediaCodecWrapper;
33 import android.media.cts.NdkMediaCodec;
34 import android.media.cts.OutputSurface;
35 import android.media.cts.SdkMediaCodec;
36 import android.media.cts.TestArgs;
37 import android.media.cts.TestUtils;
38 import android.opengl.GLES20;
39 import android.os.Build;
40 import android.platform.test.annotations.PlatinumTest;
41 import android.platform.test.annotations.Presubmit;
42 import android.util.Log;
43 
44 import androidx.test.filters.SmallTest;
45 
46 import com.android.compatibility.common.util.ApiLevelUtil;
47 import com.android.compatibility.common.util.ApiTest;
48 import com.android.compatibility.common.util.MediaUtils;
49 
50 import org.junit.Before;
51 import org.junit.Test;
52 import org.junit.runner.RunWith;
53 import org.junit.runners.Parameterized;
54 
55 import java.io.FileOutputStream;
56 import java.io.IOException;
57 import java.nio.ByteBuffer;
58 import java.util.ArrayList;
59 import java.util.Arrays;
60 import java.util.Collection;
61 import java.util.List;
62 
63 import javax.microedition.khronos.opengles.GL10;
64 
65 /**
66  * Generates a series of video frames, encodes them, decodes them, and tests for significant
67  * divergence from the original.
68  * <p>
69  * We copy the data from the encoder's output buffers to the decoder's input buffers, running
70  * them in parallel.  The first buffer output for video/avc contains codec configuration data,
71  * which we must carefully forward to the decoder.
72  * <p>
73  * An alternative approach would be to save the output of the decoder as an mpeg4 video
74  * file, and read it back in from disk.  The data we're generating is just an elementary
75  * stream, so we'd need to perform additional steps to make that happen.
76  */
77 @Presubmit
78 @SmallTest
79 @PlatinumTest(focusArea = "media")
80 @RunWith(Parameterized.class)
81 public class EncodeDecodeTest {
82     private static final String TAG = "EncodeDecodeTest";
83     private static final boolean VERBOSE = false;           // lots of logging
84     private static final boolean DEBUG_SAVE_FILE = false;   // save copy of encoded movie
85     private static final String DEBUG_FILE_NAME_BASE = "/sdcard/test.";
86     private static final boolean IS_AFTER_T = ApiLevelUtil.isAfter(Build.VERSION_CODES.TIRAMISU);
87 
88     // parameters for the encoder
89     private static final int FRAME_RATE = 15;               // 15fps
90     private static final int IFRAME_INTERVAL = 10;          // 10 seconds between I-frames
91 
92     // movie length, in frames
93     private static final int NUM_FRAMES = 30;               // two seconds of video
94 
95     private static final int TEST_Y = 120;                  // YUV values for colored rect
96     private static final int TEST_U = 160;
97     private static final int TEST_V = 200;
98     private static final int TEST_R0 = 0;                   // RGB equivalent of {0,0,0} (BT.601)
99     private static final int TEST_G0 = 136;
100     private static final int TEST_B0 = 0;
101     private static final int TEST_R1 = 236;                 // RGB equivalent of {120,160,200} (BT.601)
102     private static final int TEST_G1 = 50;
103     private static final int TEST_B1 = 186;
104     private static final int TEST_R0_BT709 = 0;             // RGB equivalent of {0,0,0} (BT.709)
105     private static final int TEST_G0_BT709 = 77;
106     private static final int TEST_B0_BT709 = 0;
107     private static final int TEST_R1_BT709 = 250;           // RGB equivalent of {120,160,200} (BT.709)
108     private static final int TEST_G1_BT709 = 76;
109     private static final int TEST_B1_BT709 = 189;
110     private static final boolean USE_NDK = true;
111 
112     // component names
113     private final String mEncoderName;
114     private final String mDecoderName;
115     // mime
116     private final String mMimeType;
117     // size of a frame, in pixels
118     private final int mWidth;
119     private final int mHeight;
120     // bit rate, in bits per second
121     private final int mBitRate;
122     // validate YUV->RGB decoded frames against BT.601 and/or BT.709
123     private boolean mAllowBT601 = true;
124     private boolean mAllowBT709 = false;
125 
126     // largest color component delta seen (i.e. actual vs. expected)
127     private int mLargestColorDelta;
128 
prepareParamList(List<Object[]> exhaustiveArgsList)129     static private List<Object[]> prepareParamList(List<Object[]> exhaustiveArgsList) {
130         final List<Object[]> argsList = new ArrayList<>();
131         int argLength = exhaustiveArgsList.get(0).length;
132         MediaCodecList mcl = new MediaCodecList(MediaCodecList.REGULAR_CODECS);
133         for (Object[] arg : exhaustiveArgsList) {
134             String mediaType = (String)arg[0];
135             if (TestArgs.shouldSkipMediaType(mediaType)) {
136                 continue;
137             }
138 
139             MediaFormat format = MediaFormat.createVideoFormat(mediaType, (Integer)arg[1],
140                     (Integer)arg[2]);
141 
142             String[] encoderNames = MediaUtils.getEncoderNamesForMime(mediaType);
143             String[] decoderNames = MediaUtils.getDecoderNamesForMime(mediaType);
144             // First pair of decoder and encoder that supports given format is chosen
145             outerLoop:
146             for (String decoder : decoderNames) {
147                 if (TestArgs.shouldSkipCodec(decoder)) {
148                     continue;
149                 }
150                 for (String encoder : encoderNames) {
151                     if (TestArgs.shouldSkipCodec(encoder)) {
152                         continue;
153                     }
154                     if (MediaUtils.supports(encoder, format) &&
155                             MediaUtils.supports(decoder, format)) {
156                         Object[] testArgs = new Object[argLength + 2];
157                         testArgs[0] = encoder;
158                         testArgs[1] = decoder;
159                         System.arraycopy(arg, 0, testArgs, 2, argLength);
160                         argsList.add(testArgs);
161                         // Test only the first codecs that support given format.
162                         // Remove the following break statement to test all codecs on the device.
163                         break outerLoop;
164                     }
165                 }
166             }
167         }
168         return argsList;
169     }
170 
171     @Before
shouldSkip()172     public void shouldSkip() {
173         MediaFormat format = MediaFormat.createVideoFormat(mMimeType, mWidth, mHeight);
174         format.setInteger(MediaFormat.KEY_BIT_RATE, mBitRate);
175         format.setInteger(MediaFormat.KEY_FRAME_RATE, FRAME_RATE);
176         format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, IFRAME_INTERVAL);
177         assumeTrue(MediaUtils.supports(mEncoderName, format));
178         assumeTrue(MediaUtils.supports(mDecoderName, format));
179     }
180 
181     @Parameterized.Parameters(name = "{index}_{0}_{1}")
input()182     public static Collection<Object[]> input() {
183         final List<Object[]> exhaustiveArgsList = Arrays.asList(new Object[][]{
184                 // Mime, width, height, bit-rate, allow bt601, allow bt709
185                 {MediaFormat.MIMETYPE_VIDEO_AVC, 176, 144, 1000000, true, false},
186                 {MediaFormat.MIMETYPE_VIDEO_AVC, 320, 240, 2000000, true, false},
187                 {MediaFormat.MIMETYPE_VIDEO_AVC, 1280, 720, 6000000, true, true},
188                 {MediaFormat.MIMETYPE_VIDEO_VP8, 176, 144, 1000000, true, false},
189                 {MediaFormat.MIMETYPE_VIDEO_VP8, 320, 240, 2000000, true, false},
190                 {MediaFormat.MIMETYPE_VIDEO_VP8, 1280, 720, 6000000, true, true},
191         });
192         return prepareParamList(exhaustiveArgsList);
193     }
194 
EncodeDecodeTest(String encoder, String decoder, String mimeType, int width, int height, int bitRate, boolean allowBT601, boolean allowBT709)195     public EncodeDecodeTest(String encoder, String decoder, String mimeType, int width, int height,
196             int bitRate, boolean allowBT601, boolean allowBT709) {
197         if ((width % 16) != 0 || (height % 16) != 0) {
198             Log.w(TAG, "WARNING: width or height not multiple of 16");
199         }
200         mEncoderName = encoder;
201         mDecoderName = decoder;
202         mMimeType = mimeType;
203         mWidth = width;
204         mHeight = height;
205         mBitRate = bitRate;
206         mAllowBT601 = allowBT601;
207         mAllowBT709 = allowBT709;
208     }
209 
210     /** Wraps testEncodeDecodeVideoFromBuffer(true) */
211     private static class BufferToSurfaceWrapper implements Runnable {
212         private Throwable mThrowable;
213         private EncodeDecodeTest mTest;
214 
BufferToSurfaceWrapper(EncodeDecodeTest test)215         private BufferToSurfaceWrapper(EncodeDecodeTest test) {
216             mTest = test;
217         }
218 
219         @Override
run()220         public void run() {
221             try {
222                 mTest.encodeDecodeVideoFromBuffer(true);
223             } catch (Throwable th) {
224                 mThrowable = th;
225             }
226         }
227 
228         /**
229          * Entry point.
230          */
runTest(EncodeDecodeTest obj)231         public static void runTest(EncodeDecodeTest obj) throws Throwable {
232             BufferToSurfaceWrapper wrapper = new BufferToSurfaceWrapper(obj);
233             Thread th = new Thread(wrapper, "codec test");
234             th.start();
235             th.join();
236             if (wrapper.mThrowable != null) {
237                 throw wrapper.mThrowable;
238             }
239         }
240     }
241 
242     /** Wraps testEncodeDecodeVideoFromSurfaceToSurface() */
243     private static class SurfaceToSurfaceWrapper implements Runnable {
244         private Throwable mThrowable;
245         private EncodeDecodeTest mTest;
246         private boolean mUsePersistentInput;
247         private boolean mUseNdk;
248 
SurfaceToSurfaceWrapper(EncodeDecodeTest test, boolean persistent, boolean useNdk)249         private SurfaceToSurfaceWrapper(EncodeDecodeTest test, boolean persistent, boolean useNdk) {
250             mTest = test;
251             mUsePersistentInput = persistent;
252             mUseNdk = useNdk;
253         }
254 
255         @Override
run()256         public void run() {
257             InputSurfaceInterface inputSurface = null;
258             try {
259                 if (!mUsePersistentInput) {
260                     mTest.encodeDecodeVideoFromSurfaceToSurface(null, mUseNdk);
261                 } else {
262                     Log.d(TAG, "creating persistent surface");
263                     if (mUseNdk) {
264                         inputSurface = NdkMediaCodec.createPersistentInputSurface();
265                     } else {
266                         inputSurface = new InputSurface(MediaCodec.createPersistentInputSurface());
267                     }
268 
269                     for (int i = 0; i < 3; i++) {
270                         Log.d(TAG, "test persistent surface - round " + i);
271                         mTest.encodeDecodeVideoFromSurfaceToSurface(inputSurface, mUseNdk);
272                     }
273                 }
274             } catch (Throwable th) {
275                 mThrowable = th;
276             } finally {
277                 if (inputSurface != null) {
278                     inputSurface.release();
279                 }
280             }
281         }
282 
283         /**
284          * Entry point.
285          */
runTest(EncodeDecodeTest obj, boolean persisent, boolean useNdk)286         public static void runTest(EncodeDecodeTest obj, boolean persisent, boolean useNdk)
287                 throws Throwable {
288             // Few cuttlefish specific color conversion issues were fixed after Android T.
289             if (MediaUtils.onCuttlefish()) {
290                 assumeTrue("Color conversion related tests are not valid on cuttlefish releases "
291                         + "through android T", IS_AFTER_T);
292             }
293             // Pre Android U, this test only checked the 1st codec (which is usually a hardware
294             // codec) and software codecs exercised a problem in the underlying graphis code.
295             // So we will only run this for CTS mode or if we're on versions after Android T
296             // (where the graphics code is fixed)
297             if (TestUtils.isMtsMode()) {
298                 assumeTrue("Color conversion related tests are skipped in MTS on releases "
299                         + "through android T", IS_AFTER_T);
300             }
301 
302             SurfaceToSurfaceWrapper wrapper =
303                     new SurfaceToSurfaceWrapper(obj, persisent, useNdk);
304             Thread th = new Thread(wrapper, "codec test");
305             th.start();
306             th.join();
307             if (wrapper.mThrowable != null) {
308                 throw wrapper.mThrowable;
309             }
310         }
311     }
312 
313     /**
314      * Tests encoding and subsequently decoding video from frames generated into a buffer.
315      * <p>
316      * We encode several frames of a video test pattern using MediaCodec, then decode the
317      * output with MediaCodec and do some simple checks.
318      * <p>
319      * See http://b.android.com/37769 for a discussion of input format pitfalls.
320      */
encodeDecodeVideoFromBuffer(boolean toSurface)321     private void encodeDecodeVideoFromBuffer(boolean toSurface) throws Exception {
322         MediaCodec encoder = null;
323         MediaCodec decoder = null;
324 
325         mLargestColorDelta = -1;
326 
327         try {
328             // We avoid the device-specific limitations on width and height by using values that
329             // are multiples of 16, which all tested devices seem to be able to handle.
330             MediaFormat format = MediaFormat.createVideoFormat(mMimeType, mWidth, mHeight);
331 
332             // Create a MediaCodec for the desired codec, then configure it as an encoder with
333             // our desired properties.
334             encoder = MediaCodec.createByCodecName(mEncoderName);
335 
336             int colorFormat = selectColorFormat(encoder.getCodecInfo(), mMimeType);
337             if (VERBOSE) Log.d(TAG, "found colorFormat: " + colorFormat);
338 
339             // Set some properties.  Failing to specify some of these can cause the MediaCodec
340             // configure() call to throw an unhelpful exception.
341             format.setInteger(MediaFormat.KEY_COLOR_FORMAT, colorFormat);
342             format.setInteger(MediaFormat.KEY_BIT_RATE, mBitRate);
343             format.setInteger(MediaFormat.KEY_FRAME_RATE, FRAME_RATE);
344             format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, IFRAME_INTERVAL);
345             if (VERBOSE) Log.d(TAG, "format: " + format);
346 
347             encoder.configure(format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
348             encoder.start();
349 
350             // Create a MediaCodec for the decoder, just based on the MIME type.  The various
351             // format details will be passed through the csd-0 meta-data later on.
352             decoder = MediaCodec.createByCodecName(mDecoderName);
353             if (VERBOSE) Log.d(TAG, "got decoder: " + decoder.getName());
354 
355             doEncodeDecodeVideoFromBuffer(encoder, colorFormat, decoder, toSurface);
356         } finally {
357             if (VERBOSE) Log.d(TAG, "releasing codecs");
358             if (encoder != null) {
359                 encoder.stop();
360                 encoder.release();
361             }
362             if (decoder != null) {
363                 decoder.stop();
364                 decoder.release();
365             }
366 
367             Log.i(TAG, "Largest color delta: " + mLargestColorDelta);
368         }
369     }
370 
371     /**
372      * Tests encoding and subsequently decoding video from frames generated into a buffer.
373      * <p>
374      * We encode several frames of a video test pattern using MediaCodec, then decode the
375      * output with MediaCodec and do some simple checks.
376      */
encodeDecodeVideoFromSurfaceToSurface(InputSurfaceInterface inSurf, boolean useNdk)377     private void encodeDecodeVideoFromSurfaceToSurface(InputSurfaceInterface inSurf, boolean useNdk) throws Exception {
378         MediaCodecWrapper encoder = null;
379         MediaCodec decoder = null;
380         InputSurfaceInterface inputSurface = inSurf;
381         OutputSurface outputSurface = null;
382 
383         mLargestColorDelta = -1;
384 
385         try {
386             // We avoid the device-specific limitations on width and height by using values that
387             // are multiples of 16, which all tested devices seem to be able to handle.
388             MediaFormat format = MediaFormat.createVideoFormat(mMimeType, mWidth, mHeight);
389 
390             int colorFormat = MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface;
391 
392             // Set some properties.  Failing to specify some of these can cause the MediaCodec
393             // configure() call to throw an unhelpful exception.
394             format.setInteger(MediaFormat.KEY_COLOR_FORMAT, colorFormat);
395             format.setInteger(MediaFormat.KEY_BIT_RATE, mBitRate);
396             format.setInteger(MediaFormat.KEY_FRAME_RATE, FRAME_RATE);
397             format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, IFRAME_INTERVAL);
398 
399             // Set color parameters
400             format.setInteger(MediaFormat.KEY_COLOR_RANGE, MediaFormat.COLOR_RANGE_LIMITED);
401             format.setInteger(MediaFormat.KEY_COLOR_STANDARD, MediaFormat.COLOR_STANDARD_BT601_PAL);
402             format.setInteger(MediaFormat.KEY_COLOR_TRANSFER, MediaFormat.COLOR_TRANSFER_SDR_VIDEO);
403 
404             if (VERBOSE) Log.d(TAG, "format: " + format);
405 
406             // Create the output surface.
407             outputSurface = new OutputSurface(mWidth, mHeight);
408 
409             decoder = MediaCodec.createByCodecName(mDecoderName);
410             if (VERBOSE) Log.d(TAG, "got decoder: " + decoder.getName());
411             decoder.configure(format, outputSurface.getSurface(), null, 0);
412             decoder.start();
413 
414             // Create a MediaCodec for the desired codec, then configure it as an encoder with
415             // our desired properties.  Request a Surface to use for input.
416             if (useNdk) {
417                 encoder = new NdkMediaCodec(mEncoderName);
418             }else {
419                 encoder = new SdkMediaCodec(MediaCodec.createByCodecName(mEncoderName));
420             }
421             encoder.configure(format, MediaCodec.CONFIGURE_FLAG_ENCODE);
422             if (inSurf != null) {
423                 Log.d(TAG, "using persistent surface");
424                 encoder.setInputSurface(inputSurface);
425                 inputSurface.updateSize(mWidth, mHeight);
426             } else {
427                 inputSurface = encoder.createInputSurface();
428             }
429             encoder.start();
430 
431             doEncodeDecodeVideoFromSurfaceToSurface(encoder, inputSurface, decoder, outputSurface);
432         } finally {
433             if (VERBOSE) Log.d(TAG, "releasing codecs");
434             if (inSurf == null && inputSurface != null) {
435                 inputSurface.release();
436             }
437             if (outputSurface != null) {
438                 outputSurface.release();
439             }
440             if (encoder != null) {
441                 encoder.stop();
442                 encoder.release();
443             }
444             if (decoder != null) {
445                 decoder.stop();
446                 decoder.release();
447             }
448 
449             Log.i(TAG, "Largest color delta: " + mLargestColorDelta);
450         }
451     }
452 
453     /**
454      * Returns a color format that is supported by the codec and by this test code.  If no
455      * match is found, this throws a test failure -- the set of formats known to the test
456      * should be expanded for new platforms.
457      */
selectColorFormat(MediaCodecInfo codecInfo, String mimeType)458     private static int selectColorFormat(MediaCodecInfo codecInfo, String mimeType) {
459         MediaCodecInfo.CodecCapabilities capabilities = codecInfo.getCapabilitiesForType(mimeType);
460         for (int i = 0; i < capabilities.colorFormats.length; i++) {
461             int colorFormat = capabilities.colorFormats[i];
462             if (isRecognizedFormat(colorFormat)) {
463                 return colorFormat;
464             }
465         }
466         fail("couldn't find a good color format for " + codecInfo.getName() + " / " + mimeType);
467         return 0;   // not reached
468     }
469 
470     /**
471      * Returns true if this is a color format that this test code understands (i.e. we know how
472      * to read and generate frames in this format).
473      */
isRecognizedFormat(int colorFormat)474     private static boolean isRecognizedFormat(int colorFormat) {
475         switch (colorFormat) {
476             // these are the formats we know how to handle for this test
477             case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420Planar:
478             case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420PackedPlanar:
479             case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420SemiPlanar:
480             case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420PackedSemiPlanar:
481             case MediaCodecInfo.CodecCapabilities.COLOR_TI_FormatYUV420PackedSemiPlanar:
482                 return true;
483             default:
484                 return false;
485         }
486     }
487 
488     /**
489      * Returns true if the specified color format is semi-planar YUV.  Throws an exception
490      * if the color format is not recognized (e.g. not YUV).
491      */
isSemiPlanarYUV(int colorFormat)492     private static boolean isSemiPlanarYUV(int colorFormat) {
493         switch (colorFormat) {
494             case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420Planar:
495             case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420PackedPlanar:
496                 return false;
497             case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420SemiPlanar:
498             case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420PackedSemiPlanar:
499             case MediaCodecInfo.CodecCapabilities.COLOR_TI_FormatYUV420PackedSemiPlanar:
500                 return true;
501             default:
502                 throw new RuntimeException("unknown format " + colorFormat);
503         }
504     }
505 
506     /**
507      * Does the actual work for encoding frames from buffers of byte[].
508      */
doEncodeDecodeVideoFromBuffer(MediaCodec encoder, int encoderColorFormat, MediaCodec decoder, boolean toSurface)509     private void doEncodeDecodeVideoFromBuffer(MediaCodec encoder, int encoderColorFormat,
510             MediaCodec decoder, boolean toSurface) {
511         final int TIMEOUT_USEC = 10000;
512         ByteBuffer[] encoderInputBuffers = encoder.getInputBuffers();
513         ByteBuffer[] encoderOutputBuffers = encoder.getOutputBuffers();
514         ByteBuffer[] decoderInputBuffers = null;
515         ByteBuffer[] decoderOutputBuffers = null;
516         MediaCodec.BufferInfo decoderInfo = new MediaCodec.BufferInfo();
517         MediaCodec.BufferInfo encoderInfo = new MediaCodec.BufferInfo();
518         MediaFormat decoderOutputFormat = null;
519         int generateIndex = 0;
520         int checkIndex = 0;
521         int badFrames = 0;
522         boolean decoderConfigured = false;
523         OutputSurface outputSurface = null;
524 
525         // The size of a frame of video data, in the formats we handle, is stride*sliceHeight
526         // for Y, and (stride/2)*(sliceHeight/2) for each of the Cb and Cr channels.  Application
527         // of algebra and assuming that stride==width and sliceHeight==height yields:
528         byte[] frameData = new byte[mWidth * mHeight * 3 / 2];
529 
530         // Just out of curiosity.
531         long rawSize = 0;
532         long encodedSize = 0;
533 
534         // Save a copy to disk.  Useful for debugging the test.  Note this is a raw elementary
535         // stream, not a .mp4 file, so not all players will know what to do with it.
536         FileOutputStream outputStream = null;
537         if (DEBUG_SAVE_FILE) {
538             String fileName = DEBUG_FILE_NAME_BASE + mWidth + "x" + mHeight + ".mp4";
539             try {
540                 outputStream = new FileOutputStream(fileName);
541                 Log.d(TAG, "encoded output will be saved as " + fileName);
542             } catch (IOException ioe) {
543                 Log.w(TAG, "Unable to create debug output file " + fileName);
544                 throw new RuntimeException(ioe);
545             }
546         }
547 
548         if (toSurface) {
549             outputSurface = new OutputSurface(mWidth, mHeight);
550         }
551 
552         // Loop until the output side is done.
553         boolean inputDone = false;
554         boolean encoderDone = false;
555         boolean outputDone = false;
556         int encoderStatus = -1;
557         while (!outputDone) {
558             if (VERBOSE) Log.d(TAG, "loop");
559 
560 
561             // If we're not done submitting frames, generate a new one and submit it.  By
562             // doing this on every loop we're working to ensure that the encoder always has
563             // work to do.
564             //
565             // We don't really want a timeout here, but sometimes there's a delay opening
566             // the encoder device, so a short timeout can keep us from spinning hard.
567             if (!inputDone) {
568                 int inputBufIndex = encoder.dequeueInputBuffer(TIMEOUT_USEC);
569                 if (VERBOSE) Log.d(TAG, "inputBufIndex=" + inputBufIndex);
570                 if (inputBufIndex >= 0) {
571                     long ptsUsec = computePresentationTime(generateIndex);
572                     if (generateIndex == NUM_FRAMES) {
573                         // Send an empty frame with the end-of-stream flag set.  If we set EOS
574                         // on a frame with data, that frame data will be ignored, and the
575                         // output will be short one frame.
576                         encoder.queueInputBuffer(inputBufIndex, 0, 0, ptsUsec,
577                                 MediaCodec.BUFFER_FLAG_END_OF_STREAM);
578                         inputDone = true;
579                         if (VERBOSE) Log.d(TAG, "sent input EOS (with zero-length frame)");
580                     } else {
581                         generateFrame(generateIndex, encoderColorFormat, frameData);
582 
583                         ByteBuffer inputBuf = encoder.getInputBuffer(inputBufIndex);
584                         // the buffer should be sized to hold one full frame
585                         assertTrue(inputBuf.capacity() >= frameData.length);
586                         inputBuf.clear();
587                         inputBuf.put(frameData);
588 
589                         encoder.queueInputBuffer(inputBufIndex, 0, frameData.length, ptsUsec, 0);
590                         if (VERBOSE) Log.d(TAG, "submitted frame " + generateIndex + " to enc");
591                     }
592                     generateIndex++;
593                 } else {
594                     // either all in use, or we timed out during initial setup
595                     if (VERBOSE) Log.d(TAG, "input buffer not available");
596                 }
597             }
598 
599             // Check for output from the encoder.  If there's no output yet, we either need to
600             // provide more input, or we need to wait for the encoder to work its magic.  We
601             // can't actually tell which is the case, so if we can't get an output buffer right
602             // away we loop around and see if it wants more input.
603             //
604             // Once we get EOS from the encoder, we don't need to do this anymore.
605             if (!encoderDone) {
606                 MediaCodec.BufferInfo info = encoderInfo;
607                 if (encoderStatus < 0) {
608                     encoderStatus = encoder.dequeueOutputBuffer(info, TIMEOUT_USEC);
609                 }
610                 if (encoderStatus == MediaCodec.INFO_TRY_AGAIN_LATER) {
611                     // no output available yet
612                     if (VERBOSE) Log.d(TAG, "no output from encoder available");
613                 } else if (encoderStatus == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
614                     // not expected for an encoder
615                     encoderOutputBuffers = encoder.getOutputBuffers();
616                     if (VERBOSE) Log.d(TAG, "encoder output buffers changed");
617                 } else if (encoderStatus == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
618                     // expected on API 18+
619                     MediaFormat newFormat = encoder.getOutputFormat();
620                     if (VERBOSE) Log.d(TAG, "encoder output format changed: " + newFormat);
621                 } else if (encoderStatus < 0) {
622                     fail("unexpected result from encoder.dequeueOutputBuffer: " + encoderStatus);
623                 } else { // encoderStatus >= 0
624                     ByteBuffer encodedData = encoder.getOutputBuffer(encoderStatus);
625                     if (encodedData == null) {
626                         fail("encoderOutputBuffer " + encoderStatus + " was null");
627                     }
628 
629                     // It's usually necessary to adjust the ByteBuffer values to match BufferInfo.
630                     encodedData.position(info.offset);
631                     encodedData.limit(info.offset + info.size);
632 
633                     boolean releaseBuffer = false;
634                     if (!decoderConfigured) {
635                         // Codec config info.  Only expected on first packet.  One way to
636                         // handle this is to manually stuff the data into the MediaFormat
637                         // and pass that to configure().  We do that here to exercise the API.
638                         // For codecs that don't have codec config data (such as VP8),
639                         // initialize the decoder before trying to decode the first packet.
640                         assertTrue((info.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0 ||
641                                    mMimeType.equals(MediaFormat.MIMETYPE_VIDEO_VP8));
642                         MediaFormat format =
643                                 MediaFormat.createVideoFormat(mMimeType, mWidth, mHeight);
644                         if ((info.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0)
645                             format.setByteBuffer("csd-0", encodedData);
646                         decoder.configure(format, toSurface ? outputSurface.getSurface() : null,
647                                 null, 0);
648                         decoder.start();
649                         decoderInputBuffers = decoder.getInputBuffers();
650                         decoderOutputBuffers = decoder.getOutputBuffers();
651                         decoderConfigured = true;
652                         if (VERBOSE) Log.d(TAG, "decoder configured (" + info.size + " bytes)");
653                     }
654                     if ((info.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) == 0) {
655                         // Get a decoder input buffer
656                         assertTrue(decoderConfigured);
657                         int inputBufIndex = decoder.dequeueInputBuffer(TIMEOUT_USEC);
658                         if (inputBufIndex >= 0) {
659                             ByteBuffer inputBuf = decoderInputBuffers[inputBufIndex];
660                             inputBuf.clear();
661                             inputBuf.put(encodedData);
662                             decoder.queueInputBuffer(inputBufIndex, 0, info.size,
663                                     info.presentationTimeUs, info.flags);
664 
665                             encoderDone = (info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0;
666                             if (VERBOSE) Log.d(TAG, "passed " + info.size + " bytes to decoder"
667                                     + (encoderDone ? " (EOS)" : ""));
668                             releaseBuffer = true;
669                         }
670                     } else {
671                         releaseBuffer = true;
672                     }
673                     if (releaseBuffer) {
674                         encodedSize += info.size;
675                         if (outputStream != null) {
676                             byte[] data = new byte[info.size];
677                             encodedData.position(info.offset);
678                             encodedData.get(data);
679                             try {
680                                 outputStream.write(data);
681                             } catch (IOException ioe) {
682                                 Log.w(TAG, "failed writing debug data to file");
683                                 throw new RuntimeException(ioe);
684                             }
685                         }
686                         encoder.releaseOutputBuffer(encoderStatus, false);
687                         encoderStatus = -1;
688                     }
689 
690                 }
691             }
692 
693             // Check for output from the decoder.  We want to do this on every loop to avoid
694             // the possibility of stalling the pipeline.  We use a short timeout to avoid
695             // burning CPU if the decoder is hard at work but the next frame isn't quite ready.
696             //
697             // If we're decoding to a Surface, we'll get notified here as usual but the
698             // ByteBuffer references will be null.  The data is sent to Surface instead.
699             if (decoderConfigured) {
700                 MediaCodec.BufferInfo info = decoderInfo;
701                 int decoderStatus = decoder.dequeueOutputBuffer(info, TIMEOUT_USEC);
702                 if (decoderStatus == MediaCodec.INFO_TRY_AGAIN_LATER) {
703                     // no output available yet
704                     if (VERBOSE) Log.d(TAG, "no output from decoder available");
705                 } else if (decoderStatus == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
706                     // The storage associated with the direct ByteBuffer may already be unmapped,
707                     // so attempting to access data through the old output buffer array could
708                     // lead to a native crash.
709                     if (VERBOSE) Log.d(TAG, "decoder output buffers changed");
710                     decoderOutputBuffers = decoder.getOutputBuffers();
711                 } else if (decoderStatus == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
712                     // this happens before the first frame is returned
713                     decoderOutputFormat = decoder.getOutputFormat();
714                     if (VERBOSE) Log.d(TAG, "decoder output format changed: " +
715                             decoderOutputFormat);
716                 } else if (decoderStatus < 0) {
717                     fail("unexpected result from decoder.dequeueOutputBuffer: " + decoderStatus);
718                 } else {  // decoderStatus >= 0
719                     if (!toSurface) {
720                         ByteBuffer outputFrame = decoderOutputBuffers[decoderStatus];
721                         Image outputImage = (checkIndex % 2 == 0) ? null : decoder.getOutputImage(decoderStatus);
722 
723                         outputFrame.position(info.offset);
724                         outputFrame.limit(info.offset + info.size);
725 
726                         rawSize += info.size;
727                         if (info.size == 0) {
728                             if (VERBOSE) Log.d(TAG, "got empty frame");
729                         } else {
730                             if (VERBOSE) Log.d(TAG, "decoded, checking frame " + checkIndex);
731                             assertEquals("Wrong time stamp", computePresentationTime(checkIndex),
732                                     info.presentationTimeUs);
733                             if (!checkFrame(checkIndex++, decoderOutputFormat, outputFrame, outputImage)) {
734                                 badFrames++;
735                             }
736                         }
737                         if (outputImage != null) {
738                             outputImage.close();
739                         }
740 
741                         if ((info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
742                             if (VERBOSE) Log.d(TAG, "output EOS");
743                             outputDone = true;
744                         }
745                         decoder.releaseOutputBuffer(decoderStatus, false /*render*/);
746                     } else {
747                         if (VERBOSE) Log.d(TAG, "surface decoder given buffer " + decoderStatus +
748                                 " (size=" + info.size + ")");
749                         rawSize += info.size;
750                         if ((info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
751                             if (VERBOSE) Log.d(TAG, "output EOS");
752                             outputDone = true;
753                         }
754 
755                         boolean doRender = (info.size != 0);
756 
757                         // As soon as we call releaseOutputBuffer, the buffer will be forwarded
758                         // to SurfaceTexture to convert to a texture.  The API doesn't guarantee
759                         // that the texture will be available before the call returns, so we
760                         // need to wait for the onFrameAvailable callback to fire.
761                         decoder.releaseOutputBuffer(decoderStatus, doRender);
762                         if (doRender) {
763                             if (VERBOSE) Log.d(TAG, "awaiting frame " + checkIndex);
764                             assertEquals("Wrong time stamp", computePresentationTime(checkIndex),
765                                     info.presentationTimeUs);
766                             outputSurface.awaitNewImage();
767                             outputSurface.drawImage();
768                             if (!checkSurfaceFrame(checkIndex++)) {
769                                 badFrames++;
770                             }
771                         }
772                     }
773                 }
774             }
775         }
776 
777         if (VERBOSE) Log.d(TAG, "decoded " + checkIndex + " frames at "
778                 + mWidth + "x" + mHeight + ": raw=" + rawSize + ", enc=" + encodedSize);
779         if (outputStream != null) {
780             try {
781                 outputStream.close();
782             } catch (IOException ioe) {
783                 Log.w(TAG, "failed closing debug file");
784                 throw new RuntimeException(ioe);
785             }
786         }
787 
788         if (outputSurface != null) {
789             outputSurface.release();
790         }
791 
792         if (checkIndex != NUM_FRAMES) {
793             fail("expected " + NUM_FRAMES + " frames, only decoded " + checkIndex);
794         }
795         if (badFrames != 0) {
796             fail("Found " + badFrames + " bad frames");
797         }
798     }
799 
800     /**
801      * Does the actual work for encoding and decoding from Surface to Surface.
802      */
doEncodeDecodeVideoFromSurfaceToSurface(MediaCodecWrapper encoder, InputSurfaceInterface inputSurface, MediaCodec decoder, OutputSurface outputSurface)803     private void doEncodeDecodeVideoFromSurfaceToSurface(MediaCodecWrapper encoder,
804             InputSurfaceInterface inputSurface, MediaCodec decoder,
805             OutputSurface outputSurface) {
806         final int TIMEOUT_USEC = 10000;
807         ByteBuffer[] encoderOutputBuffers = encoder.getOutputBuffers();
808         ByteBuffer[] decoderInputBuffers = decoder.getInputBuffers();
809         MediaCodec.BufferInfo info = new MediaCodec.BufferInfo();
810         int generateIndex = 0;
811         int checkIndex = 0;
812         int badFrames = 0;
813 
814         // Save a copy to disk.  Useful for debugging the test.  Note this is a raw elementary
815         // stream, not a .mp4 file, so not all players will know what to do with it.
816         FileOutputStream outputStream = null;
817         if (DEBUG_SAVE_FILE) {
818             String fileName = DEBUG_FILE_NAME_BASE + mWidth + "x" + mHeight + ".mp4";
819             try {
820                 outputStream = new FileOutputStream(fileName);
821                 Log.d(TAG, "encoded output will be saved as " + fileName);
822             } catch (IOException ioe) {
823                 Log.w(TAG, "Unable to create debug output file " + fileName);
824                 throw new RuntimeException(ioe);
825             }
826         }
827 
828         // Loop until the output side is done.
829         boolean inputDone = false;
830         boolean encoderDone = false;
831         boolean outputDone = false;
832         while (!outputDone) {
833             if (VERBOSE) Log.d(TAG, "loop");
834 
835             // If we're not done submitting frames, generate a new one and submit it.  The
836             // eglSwapBuffers call will block if the input is full.
837             if (!inputDone) {
838                 if (generateIndex == NUM_FRAMES) {
839                     // Send an empty frame with the end-of-stream flag set.
840                     if (VERBOSE) Log.d(TAG, "signaling input EOS");
841                     encoder.signalEndOfInputStream();
842                     inputDone = true;
843                 } else {
844                     inputSurface.makeCurrent();
845                     generateSurfaceFrame(generateIndex);
846                     inputSurface.setPresentationTime(computePresentationTime(generateIndex) * 1000);
847                     if (VERBOSE) Log.d(TAG, "inputSurface swapBuffers");
848                     inputSurface.swapBuffers();
849                 }
850                 generateIndex++;
851             }
852 
853             // Assume output is available.  Loop until both assumptions are false.
854             boolean decoderOutputAvailable = true;
855             boolean encoderOutputAvailable = !encoderDone;
856             while (decoderOutputAvailable || encoderOutputAvailable) {
857                 // Start by draining any pending output from the decoder.  It's important to
858                 // do this before we try to stuff any more data in.
859                 int decoderStatus = decoder.dequeueOutputBuffer(info, TIMEOUT_USEC);
860                 if (decoderStatus == MediaCodec.INFO_TRY_AGAIN_LATER) {
861                     // no output available yet
862                     if (VERBOSE) Log.d(TAG, "no output from decoder available");
863                     decoderOutputAvailable = false;
864                 } else if (decoderStatus == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
865                     if (VERBOSE) Log.d(TAG, "decoder output buffers changed (but we don't care)");
866                 } else if (decoderStatus == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
867                     // this happens before the first frame is returned
868                     MediaFormat decoderOutputFormat = decoder.getOutputFormat();
869                     if (VERBOSE) Log.d(TAG, "decoder output format changed: " +
870                             decoderOutputFormat);
871                 } else if (decoderStatus < 0) {
872                     fail("unexpected result from decoder.dequeueOutputBuffer: " + decoderStatus);
873                 } else {  // decoderStatus >= 0
874                     if (VERBOSE) Log.d(TAG, "surface decoder given buffer " + decoderStatus +
875                             " (size=" + info.size + ")");
876                     if ((info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
877                         if (VERBOSE) Log.d(TAG, "output EOS");
878                         outputDone = true;
879                     }
880 
881                     // The ByteBuffers are null references, but we still get a nonzero size for
882                     // the decoded data.
883                     boolean doRender = (info.size != 0);
884 
885                     // As soon as we call releaseOutputBuffer, the buffer will be forwarded
886                     // to SurfaceTexture to convert to a texture.  The API doesn't guarantee
887                     // that the texture will be available before the call returns, so we
888                     // need to wait for the onFrameAvailable callback to fire.  If we don't
889                     // wait, we risk dropping frames.
890                     outputSurface.makeCurrent();
891                     decoder.releaseOutputBuffer(decoderStatus, doRender);
892                     if (doRender) {
893                         assertEquals("Wrong time stamp", computePresentationTime(checkIndex),
894                                 info.presentationTimeUs);
895                         if (VERBOSE) Log.d(TAG, "awaiting frame " + checkIndex);
896                         outputSurface.awaitNewImage();
897                         outputSurface.drawImage();
898                         if (!checkSurfaceFrame(checkIndex++)) {
899                             badFrames++;
900                         }
901                     }
902                 }
903                 if (decoderStatus != MediaCodec.INFO_TRY_AGAIN_LATER) {
904                     // Continue attempts to drain output.
905                     continue;
906                 }
907 
908                 // Decoder is drained, check to see if we've got a new buffer of output from
909                 // the encoder.
910                 if (!encoderDone) {
911                     int encoderStatus = encoder.dequeueOutputBuffer(info, TIMEOUT_USEC);
912                     if (encoderStatus == MediaCodec.INFO_TRY_AGAIN_LATER) {
913                         // no output available yet
914                         if (VERBOSE) Log.d(TAG, "no output from encoder available");
915                         encoderOutputAvailable = false;
916                     } else if (encoderStatus == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
917                         // not expected for an encoder
918                         encoderOutputBuffers = encoder.getOutputBuffers();
919                         if (VERBOSE) Log.d(TAG, "encoder output buffers changed");
920                     } else if (encoderStatus == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
921                         // expected on API 18+
922                         String newFormat = encoder.getOutputFormatString();
923                         if (VERBOSE) Log.d(TAG, "encoder output format changed: " + newFormat);
924                     } else if (encoderStatus < 0) {
925                         fail("unexpected result from encoder.dequeueOutputBuffer: " + encoderStatus);
926                     } else { // encoderStatus >= 0
927                         ByteBuffer encodedData = encoder.getOutputBuffer(encoderStatus);
928                         if (encodedData == null) {
929                             fail("encoderOutputBuffer " + encoderStatus + " was null");
930                         }
931 
932                         // It's usually necessary to adjust the ByteBuffer values to match BufferInfo.
933                         encodedData.position(info.offset);
934                         encodedData.limit(info.offset + info.size);
935 
936                         if (outputStream != null) {
937                             byte[] data = new byte[info.size];
938                             encodedData.get(data);
939                             encodedData.position(info.offset);
940                             try {
941                                 outputStream.write(data);
942                             } catch (IOException ioe) {
943                                 Log.w(TAG, "failed writing debug data to file");
944                                 throw new RuntimeException(ioe);
945                             }
946                         }
947 
948                         // Get a decoder input buffer, blocking until it's available.  We just
949                         // drained the decoder output, so we expect there to be a free input
950                         // buffer now or in the near future (i.e. this should never deadlock
951                         // if the codec is meeting requirements).
952                         //
953                         // The first buffer of data we get will have the BUFFER_FLAG_CODEC_CONFIG
954                         // flag set; the decoder will see this and finish configuring itself.
955                         int inputBufIndex = decoder.dequeueInputBuffer(-1);
956                         ByteBuffer inputBuf = decoderInputBuffers[inputBufIndex];
957                         inputBuf.clear();
958                         inputBuf.put(encodedData);
959                         decoder.queueInputBuffer(inputBufIndex, 0, info.size,
960                                 info.presentationTimeUs, info.flags);
961 
962                         // If everything from the encoder has been passed to the decoder, we
963                         // can stop polling the encoder output.  (This just an optimization.)
964                         if ((info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
965                             encoderDone = true;
966                             encoderOutputAvailable = false;
967                         }
968                         if (VERBOSE) Log.d(TAG, "passed " + info.size + " bytes to decoder"
969                                 + (encoderDone ? " (EOS)" : ""));
970 
971                         encoder.releaseOutputBuffer(encoderStatus, false);
972                     }
973                 }
974             }
975         }
976 
977         if (outputStream != null) {
978             try {
979                 outputStream.close();
980             } catch (IOException ioe) {
981                 Log.w(TAG, "failed closing debug file");
982                 throw new RuntimeException(ioe);
983             }
984         }
985 
986         if (checkIndex != NUM_FRAMES) {
987             fail("expected " + NUM_FRAMES + " frames, only decoded " + checkIndex);
988         }
989         if (badFrames != 0) {
990             fail("Found " + badFrames + " bad frames");
991         }
992     }
993 
994 
995     /**
996      * Generates data for frame N into the supplied buffer.  We have an 8-frame animation
997      * sequence that wraps around.  It looks like this:
998      * <pre>
999      *   0 1 2 3
1000      *   7 6 5 4
1001      * </pre>
1002      * We draw one of the eight rectangles and leave the rest set to the zero-fill color.
1003      */
generateFrame(int frameIndex, int colorFormat, byte[] frameData)1004     private void generateFrame(int frameIndex, int colorFormat, byte[] frameData) {
1005         final int HALF_WIDTH = mWidth / 2;
1006         boolean semiPlanar = isSemiPlanarYUV(colorFormat);
1007 
1008         // Set to zero.  In YUV this is a dull green.
1009         Arrays.fill(frameData, (byte) 0);
1010 
1011         int startX, startY;
1012 
1013         frameIndex %= 8;
1014         //frameIndex = (frameIndex / 8) % 8;    // use this instead for debug -- easier to see
1015         if (frameIndex < 4) {
1016             startX = frameIndex * (mWidth / 4);
1017             startY = 0;
1018         } else {
1019             startX = (7 - frameIndex) * (mWidth / 4);
1020             startY = mHeight / 2;
1021         }
1022 
1023         for (int y = startY + (mHeight/2) - 1; y >= startY; --y) {
1024             for (int x = startX + (mWidth/4) - 1; x >= startX; --x) {
1025                 if (semiPlanar) {
1026                     // full-size Y, followed by UV pairs at half resolution
1027                     // e.g. Nexus 4 OMX.qcom.video.encoder.avc COLOR_FormatYUV420SemiPlanar
1028                     // e.g. Galaxy Nexus OMX.TI.DUCATI1.VIDEO.H264E
1029                     //        OMX_TI_COLOR_FormatYUV420PackedSemiPlanar
1030                     frameData[y * mWidth + x] = (byte) TEST_Y;
1031                     if ((x & 0x01) == 0 && (y & 0x01) == 0) {
1032                         frameData[mWidth*mHeight + y * HALF_WIDTH + x] = (byte) TEST_U;
1033                         frameData[mWidth*mHeight + y * HALF_WIDTH + x + 1] = (byte) TEST_V;
1034                     }
1035                 } else {
1036                     // full-size Y, followed by quarter-size U and quarter-size V
1037                     // e.g. Nexus 10 OMX.Exynos.AVC.Encoder COLOR_FormatYUV420Planar
1038                     // e.g. Nexus 7 OMX.Nvidia.h264.encoder COLOR_FormatYUV420Planar
1039                     frameData[y * mWidth + x] = (byte) TEST_Y;
1040                     if ((x & 0x01) == 0 && (y & 0x01) == 0) {
1041                         frameData[mWidth*mHeight + (y/2) * HALF_WIDTH + (x/2)] = (byte) TEST_U;
1042                         frameData[mWidth*mHeight + HALF_WIDTH * (mHeight / 2) +
1043                                   (y/2) * HALF_WIDTH + (x/2)] = (byte) TEST_V;
1044                     }
1045                 }
1046             }
1047         }
1048     }
1049 
1050     /**
1051      * Performs a simple check to see if the frame is more or less right.
1052      * <p>
1053      * See {@link #generateFrame} for a description of the layout.  The idea is to sample
1054      * one pixel from the middle of the 8 regions, and verify that the correct one has
1055      * the non-background color.  We can't know exactly what the video encoder has done
1056      * with our frames, so we just check to see if it looks like more or less the right thing.
1057      *
1058      * @return true if the frame looks good
1059      */
checkFrame(int frameIndex, MediaFormat format, ByteBuffer frameData, Image image)1060     private boolean checkFrame(int frameIndex, MediaFormat format, ByteBuffer frameData, Image image) {
1061         // Check for color formats we don't understand.  There is no requirement for video
1062         // decoders to use a "mundane" format, so we just give a pass on proprietary formats.
1063         // e.g. Nexus 4 0x7FA30C03 OMX_QCOM_COLOR_FormatYUV420PackedSemiPlanar64x32Tile2m8ka
1064         int colorFormat = format.getInteger(MediaFormat.KEY_COLOR_FORMAT);
1065         if (!isRecognizedFormat(colorFormat)) {
1066             Log.d(TAG, "unable to check frame contents for colorFormat=" +
1067                     Integer.toHexString(colorFormat));
1068             return true;
1069         }
1070 
1071         boolean frameFailed = false;
1072         boolean semiPlanar = isSemiPlanarYUV(colorFormat);
1073         int width = format.getInteger(MediaFormat.KEY_STRIDE,
1074                 format.getInteger(MediaFormat.KEY_WIDTH));
1075         int height = format.getInteger(MediaFormat.KEY_SLICE_HEIGHT,
1076                 format.getInteger(MediaFormat.KEY_HEIGHT));
1077         int halfWidth = width / 2;
1078         int cropLeft = format.getInteger("crop-left");
1079         int cropRight = format.getInteger("crop-right");
1080         int cropTop = format.getInteger("crop-top");
1081         int cropBottom = format.getInteger("crop-bottom");
1082         if (image != null) {
1083             cropLeft = image.getCropRect().left;
1084             cropRight = image.getCropRect().right - 1;
1085             cropTop = image.getCropRect().top;
1086             cropBottom = image.getCropRect().bottom - 1;
1087         }
1088         int cropWidth = cropRight - cropLeft + 1;
1089         int cropHeight = cropBottom - cropTop + 1;
1090 
1091         assertEquals(mWidth, cropWidth);
1092         assertEquals(mHeight, cropHeight);
1093 
1094         for (int i = 0; i < 8; i++) {
1095             int x, y;
1096             if (i < 4) {
1097                 x = i * (mWidth / 4) + (mWidth / 8);
1098                 y = mHeight / 4;
1099             } else {
1100                 x = (7 - i) * (mWidth / 4) + (mWidth / 8);
1101                 y = (mHeight * 3) / 4;
1102             }
1103 
1104             y += cropTop;
1105             x += cropLeft;
1106 
1107             int testY, testU, testV;
1108             if (image != null) {
1109                 Image.Plane[] planes = image.getPlanes();
1110                 if (planes.length == 3 && image.getFormat() == ImageFormat.YUV_420_888) {
1111                     testY = planes[0].getBuffer().get(y * planes[0].getRowStride() + x * planes[0].getPixelStride()) & 0xff;
1112                     testU = planes[1].getBuffer().get((y/2) * planes[1].getRowStride() + (x/2) * planes[1].getPixelStride()) & 0xff;
1113                     testV = planes[2].getBuffer().get((y/2) * planes[2].getRowStride() + (x/2) * planes[2].getPixelStride()) & 0xff;
1114                 } else {
1115                     testY = testU = testV = 0;
1116                 }
1117             } else {
1118                 int off = frameData.position();
1119                 if (semiPlanar) {
1120                     // Galaxy Nexus uses OMX_TI_COLOR_FormatYUV420PackedSemiPlanar
1121                     testY = frameData.get(off + y * width + x) & 0xff;
1122                     testU = frameData.get(off + width*height + 2*(y/2) * halfWidth + 2*(x/2)) & 0xff;
1123                     testV = frameData.get(off + width*height + 2*(y/2) * halfWidth + 2*(x/2) + 1) & 0xff;
1124                 } else {
1125                     // Nexus 10, Nexus 7 use COLOR_FormatYUV420Planar
1126                     testY = frameData.get(off + y * width + x) & 0xff;
1127                     testU = frameData.get(off + width*height + (y/2) * halfWidth + (x/2)) & 0xff;
1128                     testV = frameData.get(off + width*height + halfWidth * (height / 2) +
1129                             (y/2) * halfWidth + (x/2)) & 0xff;
1130                 }
1131             }
1132 
1133             int expY, expU, expV;
1134             if (i == frameIndex % 8) {
1135                 // colored rect
1136                 expY = TEST_Y;
1137                 expU = TEST_U;
1138                 expV = TEST_V;
1139             } else {
1140                 // should be our zeroed-out buffer
1141                 expY = expU = expV = 0;
1142             }
1143             if (!isColorClose(testY, expY) ||
1144                     !isColorClose(testU, expU) ||
1145                     !isColorClose(testV, expV)) {
1146                 Log.w(TAG, "Bad frame " + frameIndex + " (rect=" + i + ": yuv=" + testY +
1147                         "," + testU + "," + testV + " vs. expected " + expY + "," + expU +
1148                         "," + expV + ")");
1149                 frameFailed = true;
1150             }
1151         }
1152 
1153         return !frameFailed;
1154     }
1155 
1156     /**
1157      * Generates a frame of data using GL commands.
1158      */
generateSurfaceFrame(int frameIndex)1159     private void generateSurfaceFrame(int frameIndex) {
1160         frameIndex %= 8;
1161 
1162         int startX, startY;
1163         if (frameIndex < 4) {
1164             // (0,0) is bottom-left in GL
1165             startX = frameIndex * (mWidth / 4);
1166             startY = mHeight / 2;
1167         } else {
1168             startX = (7 - frameIndex) * (mWidth / 4);
1169             startY = 0;
1170         }
1171 
1172         GLES20.glDisable(GLES20.GL_SCISSOR_TEST);
1173         GLES20.glClearColor(TEST_R0 / 255.0f, TEST_G0 / 255.0f, TEST_B0 / 255.0f, 1.0f);
1174         GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
1175         GLES20.glEnable(GLES20.GL_SCISSOR_TEST);
1176         GLES20.glScissor(startX, startY, mWidth / 4, mHeight / 2);
1177         GLES20.glClearColor(TEST_R1 / 255.0f, TEST_G1 / 255.0f, TEST_B1 / 255.0f, 1.0f);
1178         GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
1179     }
1180 
1181     /**
1182      * Checks the frame for correctness.  Similar to {@link #checkFrame}, but uses GL to
1183      * read pixels from the current surface.
1184      *
1185      * @return true if the frame looks good
1186      */
checkSurfaceFrame(int frameIndex)1187     private boolean checkSurfaceFrame(int frameIndex) {
1188         ByteBuffer pixelBuf = ByteBuffer.allocateDirect(4); // TODO - reuse this
1189         boolean frameFailed = false;
1190 
1191         for (int i = 0; i < 8; i++) {
1192             // Note the coordinates are inverted on the Y-axis in GL.
1193             int x, y;
1194             if (i < 4) {
1195                 x = i * (mWidth / 4) + (mWidth / 8);
1196                 y = (mHeight * 3) / 4;
1197             } else {
1198                 x = (7 - i) * (mWidth / 4) + (mWidth / 8);
1199                 y = mHeight / 4;
1200             }
1201 
1202             GLES20.glReadPixels(x, y, 1, 1, GL10.GL_RGBA, GL10.GL_UNSIGNED_BYTE, pixelBuf);
1203             int r = pixelBuf.get(0) & 0xff;
1204             int g = pixelBuf.get(1) & 0xff;
1205             int b = pixelBuf.get(2) & 0xff;
1206             //Log.d(TAG, "GOT(" + frameIndex + "/" + i + "): r=" + r + " g=" + g + " b=" + b);
1207 
1208             int expR, expG, expB, expR_bt709, expG_bt709, expB_bt709;
1209             if (i == frameIndex % 8) {
1210                 // colored rect
1211                 expR = TEST_R1;
1212                 expG = TEST_G1;
1213                 expB = TEST_B1;
1214                 expR_bt709 = TEST_R1_BT709;
1215                 expG_bt709 = TEST_G1_BT709;
1216                 expB_bt709 = TEST_B1_BT709;
1217             } else {
1218                 // zero background color
1219                 expR = TEST_R0;
1220                 expG = TEST_G0;
1221                 expB = TEST_B0;
1222                 expR_bt709 = TEST_R0_BT709;
1223                 expG_bt709 = TEST_G0_BT709;
1224                 expB_bt709 = TEST_B0_BT709;
1225             }
1226 
1227             // Some decoders use BT.709 when converting HD (i.e. >= 720p)
1228             // frames from YUV to RGB, so check against both BT.601 and BT.709
1229             if (mAllowBT601 &&
1230                     isColorClose(r, expR) &&
1231                     isColorClose(g, expG) &&
1232                     isColorClose(b, expB)) {
1233                 // frame OK on BT.601
1234                 mAllowBT709 = false;
1235             } else if (mAllowBT709 &&
1236                            isColorClose(r, expR_bt709) &&
1237                            isColorClose(g, expG_bt709) &&
1238                            isColorClose(b, expB_bt709)) {
1239                 // frame OK on BT.709
1240                 mAllowBT601 = false;
1241             } else {
1242                 Log.w(TAG, "Bad frame " + frameIndex + " (rect=" + i + " @ " + x + " " + y + ": rgb=" + r +
1243                         "," + g + "," + b + " vs. expected " + expR + "," + expG +
1244                         "," + expB + ")");
1245                 frameFailed = true;
1246             }
1247         }
1248 
1249         return !frameFailed;
1250     }
1251 
1252     /**
1253      * Returns true if the actual color value is close to the expected color value.  Updates
1254      * mLargestColorDelta.
1255      */
isColorClose(int actual, int expected)1256     boolean isColorClose(int actual, int expected) {
1257         final int MAX_DELTA = 8;
1258         int delta = Math.abs(actual - expected);
1259         if (delta > mLargestColorDelta) {
1260             mLargestColorDelta = delta;
1261         }
1262         return (delta <= MAX_DELTA);
1263     }
1264 
1265     /**
1266      * Generates the presentation time for frame N, in microseconds.
1267      */
computePresentationTime(int frameIndex)1268     private static long computePresentationTime(int frameIndex) {
1269         return 132 + frameIndex * 1000000 / FRAME_RATE;
1270     }
1271 
1272     /**
1273      * Tests streaming of video through the encoder and decoder.  Data is encoded from
1274      * a series of byte[] buffers and decoded into ByteBuffers.  The output is checked for
1275      * validity.
1276      */
1277     @ApiTest(apis = {"android.media.MediaCodecInfo.CodecCapabilities#COLOR_FormatYUV420Planar",
1278             "android.media.MediaCodecInfo.CodecCapabilities#COLOR_FormatYUV420PackedPlanar",
1279             "android.media.MediaCodecInfo.CodecCapabilities#COLOR_FormatYUV420SemiPlanar",
1280             "android.media.MediaCodecInfo.CodecCapabilities#COLOR_FormatYUV420PackedSemiPlanar",
1281             "android.media.MediaCodecInfo.CodecCapabilities#COLOR_TI_FormatYUV420PackedSemiPlanar"})
1282     @Test
testEncodeDecodeVideoFromBufferToBuffer()1283     public void testEncodeDecodeVideoFromBufferToBuffer() throws Exception {
1284         encodeDecodeVideoFromBuffer(false);
1285     }
1286 
1287     /**
1288      * Tests streaming of video through the encoder and decoder.  Data is encoded from
1289      * a series of byte[] buffers and decoded into Surfaces.  The output is checked for
1290      * validity.
1291      * <p>
1292      * Because of the way SurfaceTexture.OnFrameAvailableListener works, we need to run this
1293      * test on a thread that doesn't have a Looper configured.  If we don't, the test will
1294      * pass, but we won't actually test the output because we'll never receive the "frame
1295      * available" notifications".  The CTS test framework seems to be configuring a Looper on
1296      * the test thread, so we have to hand control off to a new thread for the duration of
1297      * the test.
1298      */
1299     @ApiTest(apis = {"android.media.MediaCodecInfo.CodecCapabilities#COLOR_FormatYUV420Planar",
1300             "android.media.MediaCodecInfo.CodecCapabilities#COLOR_FormatYUV420PackedPlanar",
1301             "android.media.MediaCodecInfo.CodecCapabilities#COLOR_FormatYUV420SemiPlanar",
1302             "android.media.MediaCodecInfo.CodecCapabilities#COLOR_FormatYUV420PackedSemiPlanar",
1303             "android.media.MediaCodecInfo.CodecCapabilities#COLOR_TI_FormatYUV420PackedSemiPlanar",
1304             "android.media.MediaCodecInfo.CodecCapabilities#COLOR_FormatSurface",
1305             "android.opengl.GLES20#glReadPixels",
1306             "android.media.MediaFormat#KEY_COLOR_RANGE",
1307             "android.media.MediaFormat#KEY_COLOR_STANDARD",
1308             "android.media.MediaFormat#KEY_COLOR_TRANSFER"})
1309     @Test
testEncodeDecodeVideoFromBufferToSurface()1310     public void testEncodeDecodeVideoFromBufferToSurface() throws Throwable {
1311         BufferToSurfaceWrapper.runTest(this);
1312     }
1313 
1314     /**
1315      * Tests streaming of AVC through the encoder and decoder.  Data is provided through
1316      * a Surface and decoded onto a Surface.  The output is checked for validity.
1317      */
1318     @ApiTest(apis = {"android.media.MediaCodec#createInputSurface",
1319             "android.media.MediaCodecInfo.CodecCapabilities#COLOR_FormatSurface",
1320             "android.opengl.GLES20#glReadPixels",
1321             "android.media.MediaFormat#KEY_COLOR_RANGE",
1322             "android.media.MediaFormat#KEY_COLOR_STANDARD",
1323             "android.media.MediaFormat#KEY_COLOR_TRANSFER"})
1324     @Test
testEncodeDecodeVideoFromSurfaceToSurface()1325     public void testEncodeDecodeVideoFromSurfaceToSurface() throws Throwable {
1326         SurfaceToSurfaceWrapper.runTest(this, false, false);
1327     }
1328     @ApiTest(apis = {"AMediaCodec_createInputSurface",
1329             "android.media.MediaCodecInfo.CodecCapabilities#COLOR_FormatSurface",
1330             "android.opengl.GLES20#glReadPixels",
1331             "android.media.MediaFormat#KEY_COLOR_RANGE",
1332             "android.media.MediaFormat#KEY_COLOR_STANDARD",
1333             "android.media.MediaFormat#KEY_COLOR_TRANSFER"})
1334     @Test
testEncodeDecodeVideoFromSurfaceToSurfaceNdk()1335     public void testEncodeDecodeVideoFromSurfaceToSurfaceNdk() throws Throwable {
1336         SurfaceToSurfaceWrapper.runTest(this, false, USE_NDK);
1337     }
1338 
1339     /**
1340      * Tests streaming of video through the encoder and decoder.  Data is provided through
1341      * a PersistentSurface and decoded onto a Surface.  The output is checked for validity.
1342      */
1343     @ApiTest(apis = {"android.media.MediaCodec#createPersistentInputSurface",
1344             "android.media.MediaCodec#setInputSurface",
1345             "android.media.MediaCodecInfo.CodecCapabilities#COLOR_FormatSurface",
1346             "android.opengl.GLES20#glReadPixels",
1347             "android.media.MediaFormat#KEY_COLOR_RANGE",
1348             "android.media.MediaFormat#KEY_COLOR_STANDARD",
1349             "android.media.MediaFormat#KEY_COLOR_TRANSFER"})
1350     @Test
testEncodeDecodeVideoFromSurfaceToPersistentSurface()1351     public void testEncodeDecodeVideoFromSurfaceToPersistentSurface() throws Throwable {
1352         SurfaceToSurfaceWrapper.runTest(this, true, false);
1353     }
1354 
1355     @ApiTest(apis = {"AMediaCodec_createPersistentInputSurface",
1356             "AMediaCodec_setInputSurface",
1357             "android.media.MediaCodecInfo.CodecCapabilities#COLOR_FormatSurface",
1358             "android.opengl.GLES20#glReadPixels",
1359             "android.media.MediaFormat#KEY_COLOR_RANGE",
1360             "android.media.MediaFormat#KEY_COLOR_STANDARD",
1361             "android.media.MediaFormat#KEY_COLOR_TRANSFER"})
1362     @Test
testEncodeDecodeVideoFromSurfaceToPersistentSurfaceNdk()1363     public void testEncodeDecodeVideoFromSurfaceToPersistentSurfaceNdk() throws Throwable {
1364         SurfaceToSurfaceWrapper.runTest(this, true, USE_NDK);
1365     }
1366 }
1367