xref: /aosp_15_r20/cts/tests/video/src/android/video/cts/VideoEncoderDecoderTest.java (revision b7c941bb3fa97aba169d73cee0bed2de8ac964bf)
1 /*
2  * Copyright (C) 2013 The Android Open Source Project
3  *
4  * Licensed under the Apache License, Version 2.0 (the "License");
5  * you may not use this file except in compliance with the License.
6  * You may obtain a copy of the License at
7  *
8  *      http://www.apache.org/licenses/LICENSE-2.0
9  *
10  * Unless required by applicable law or agreed to in writing, software
11  * distributed under the License is distributed on an "AS IS" BASIS,
12  * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13  * See the License for the specific language governing permissions and
14  * limitations under the License.
15  */
16 
17 package android.video.cts;
18 
19 import static org.junit.Assert.assertFalse;
20 import static org.junit.Assert.assertNotNull;
21 import static org.junit.Assert.assertTrue;
22 import static org.junit.Assert.fail;
23 
24 import android.graphics.ImageFormat;
25 import android.graphics.Point;
26 import android.media.Image;
27 import android.media.Image.Plane;
28 import android.media.MediaCodec;
29 import android.media.MediaCodec.BufferInfo;
30 import android.media.MediaCodecInfo;
31 import android.media.MediaCodecInfo.CodecCapabilities;
32 import android.media.MediaFormat;
33 import android.media.cts.CodecImage;
34 import android.media.cts.CodecUtils;
35 import android.media.cts.TestArgs;
36 import android.media.cts.TestUtils;
37 import android.media.cts.YUVImage;
38 import android.os.Build;
39 import android.util.Log;
40 import android.util.Pair;
41 
42 import androidx.test.platform.app.InstrumentationRegistry;
43 
44 import com.android.compatibility.common.util.ApiTest;
45 import com.android.compatibility.common.util.DeviceReportLog;
46 import com.android.compatibility.common.util.MediaPerfUtils;
47 import com.android.compatibility.common.util.MediaUtils;
48 import com.android.compatibility.common.util.ResultType;
49 import com.android.compatibility.common.util.ResultUnit;
50 import com.android.compatibility.common.util.Stat;
51 
52 import org.junit.After;
53 import org.junit.Before;
54 import org.junit.Test;
55 import org.junit.runner.RunWith;
56 import org.junit.runners.Parameterized;
57 
58 import java.io.IOException;
59 import java.nio.ByteBuffer;
60 import java.util.ArrayList;
61 import java.util.Arrays;
62 import java.util.Collection;
63 import java.util.LinkedList;
64 import java.util.List;
65 import java.util.Random;
66 
67 /**
68  * This tries to test video encoder / decoder performance by running encoding / decoding
69  * without displaying the raw data. To make things simpler, encoder is used to encode synthetic
70  * data and decoder is used to decode the encoded video. This approach does not work where
71  * there is only decoder. Performance index is total time taken for encoding and decoding
72  * the whole frames.
73  * To prevent sacrificing quality for faster encoding / decoding, randomly selected pixels are
74  * compared with the original image. As the pixel comparison can slow down the decoding process,
75  * only some randomly selected pixels are compared. As there can be only one performance index,
76  * error above certain threshold in pixel value will be treated as an error.
77  */
78 @RunWith(Parameterized.class)
79 public class VideoEncoderDecoderTest {
80     private static final String TAG = "VideoEncoderDecoderTest";
81     private static final String REPORT_LOG_NAME = "CtsVideoTestCases";
82     // this wait time affects fps as too big value will work as a blocker if device fps
83     // is not very high.
84     private static final long VIDEO_CODEC_WAIT_TIME_US = 1000;
85     private static final boolean VERBOSE = false;
86     private static final int MAX_FPS = 30; // measure performance at 30fps, this is relevant for
87                                            // the meaning of bitrate
88 
89     private static final String AVC = MediaFormat.MIMETYPE_VIDEO_AVC;
90     private static final String H263 = MediaFormat.MIMETYPE_VIDEO_H263;
91     private static final String HEVC = MediaFormat.MIMETYPE_VIDEO_HEVC;
92     private static final String MPEG2 = MediaFormat.MIMETYPE_VIDEO_MPEG2;
93     private static final String MPEG4 = MediaFormat.MIMETYPE_VIDEO_MPEG4;
94     private static final String VP8 = MediaFormat.MIMETYPE_VIDEO_VP8;
95     private static final String VP9 = MediaFormat.MIMETYPE_VIDEO_VP9;
96     private static final String AV1 = MediaFormat.MIMETYPE_VIDEO_AV1;
97 
98     // test results:
99 
100     private int mCurrentTestRound = 0;
101     private double[][] mEncoderFrameTimeUsDiff;
102     private double[] mEncoderFpsResults;
103 
104     private double[][] mDecoderFrameTimeUsDiff;
105     private double[] mDecoderFpsResults;
106     private double[] mTotalFpsResults;
107     private double[] mDecoderRmsErrorResults;
108 
109     // i frame interval for encoder
110     private static final int KEY_I_FRAME_INTERVAL = 5;
111     private static final int MAX_TEST_TIMEOUT_MS = 300000;   // 5 minutes
112 
113     private static final int Y_CLAMP_MIN = 16;
114     private static final int Y_CLAMP_MAX = 235;
115     private static final int YUV_PLANE_ADDITIONAL_LENGTH = 200;
116     private ByteBuffer mYBuffer, mYDirectBuffer;
117     private ByteBuffer mUVBuffer, mUVDirectBuffer;
118     private int mSrcColorFormat;
119     private int mDstColorFormat;
120     private int mBufferWidth;
121     private int mBufferHeight;
122     private int mVideoWidth;
123     private int mVideoHeight;
124     private int mVideoStride;
125     private int mVideoVStride;
126     private int mFrameRate;
127 
128     private MediaFormat mEncConfigFormat;
129     private MediaFormat mEncInputFormat;
130     private MediaFormat mEncOutputFormat;
131     private MediaFormat mDecOutputFormat;
132 
133     private LinkedList<Pair<ByteBuffer, BufferInfo>> mEncodedOutputBuffer;
134     // check this many pixels per each decoded frame
135     // checking too many points decreases decoder frame rates a lot.
136     private static final int PIXEL_CHECK_PER_FRAME = 1000;
137     // RMS error in pixel values above this will be treated as error.
138     private static final double PIXEL_RMS_ERROR_MARGIN = 20.0;
139     // offset legitimate timestamps away from 0, so that we'll never confuse them
140     // with a missing or otherwise erroneous timestamp.
141     private static final int TIMESTAMP_OFFSET = 132;
142     private double mRmsErrorMargin;
143     private Random mRandom;
144 
145     private boolean mUpdatedSwCodec = false;
146 
147     private String mMediaType;
148     private int mWidth;
149     private int mHeight;
150     private String mEncoderName;
151     private int mMaxBFrames;
152 
153     private class TestConfig {
154         public boolean mTestPixels = true;
155         public boolean mReportFrameTime = false;
156         public int mTotalFrames = 300;
157         public int mMinNumFrames = 300;
158         public int mMaxTimeMs = 120000;  // 2 minutes
159         public int mMinTimeMs = 10000;   // 10 seconds
160         public int mNumberOfRepeat = 10;
161 
initPerfTest()162         public void initPerfTest() {
163             mTestPixels = false;
164             mTotalFrames = 30000;
165             mMinNumFrames = 3000;
166             mNumberOfRepeat = 2;
167         }
168     }
169 
170     private TestConfig mTestConfig;
171 
isPreferredAbi()172     private static boolean isPreferredAbi() {
173         boolean prefers64Bit = false;
174         if (Build.SUPPORTED_64_BIT_ABIS.length > 0 &&
175                 Build.SUPPORTED_ABIS.length > 0 &&
176                 Build.SUPPORTED_ABIS[0].equals(Build.SUPPORTED_64_BIT_ABIS[0])) {
177             prefers64Bit = true;
178         }
179         return android.os.Process.is64Bit() ? prefers64Bit : !prefers64Bit;
180     }
181 
182     @Before
setUp()183     public void setUp() throws Exception {
184         mEncodedOutputBuffer = new LinkedList<Pair<ByteBuffer, BufferInfo>>();
185         mUpdatedSwCodec =
186                 TestUtils.isUpdatedMainlineModule("com.google.android.media.swcodec");
187         // Use time as a seed, hoping to prevent checking pixels in the same pattern
188         long now = System.currentTimeMillis();
189         mRandom = new Random(now);
190         mTestConfig = new TestConfig();
191     }
192 
193     @After
tearDown()194     public void tearDown() throws Exception {
195         mEncodedOutputBuffer.clear();
196         mEncodedOutputBuffer = null;
197         mYBuffer = null;
198         mUVBuffer = null;
199         mYDirectBuffer = null;
200         mUVDirectBuffer = null;
201         mRandom = null;
202         mTestConfig = null;
203     }
204 
205     /** run performance test. */
perf(String mimeType, int w, int h, String encoder, int maxBFrames)206     private void perf(String mimeType, int w, int h, String encoder, int maxBFrames)
207             throws Exception {
208         doTest(mimeType, w, h, true /* isPerf */, encoder, maxBFrames);
209     }
210 
211     /** run quality test. */
qual(String mimeType, int w, int h, String encoder, int maxBFrames)212     private void qual(String mimeType, int w, int h, String encoder, int maxBFrames)
213             throws Exception {
214         qual(mimeType, w, h, encoder, maxBFrames, PIXEL_RMS_ERROR_MARGIN);
215     }
216 
217     /** run quality test with configurable error. */
qual(String mimeType, int w, int h, String encoder, int maxBFrames, double margin)218     private void qual(String mimeType, int w, int h, String encoder, int maxBFrames, double margin)
219             throws Exception {
220         mRmsErrorMargin = margin;
221         doTest(mimeType, w, h, false /* isPerf */, encoder, maxBFrames);
222     }
223 
prepareParamsList(List<Object[]> testParams, String mediaType, int[] widths, int[] heights)224     static void prepareParamsList(List<Object[]> testParams, String mediaType, int[] widths,
225             int[] heights) {
226         String[] encoderNames = MediaUtils.getEncoderNamesForMime(mediaType);
227         int[] maxBFrames = {0, 2};
228         for (int i = 0; i < widths.length; i++) {
229             MediaFormat format =
230                     MediaFormat.createVideoFormat(mediaType, widths[i], heights[i]);
231             for (String encoder : encoderNames) {
232                 if (TestArgs.shouldSkipCodec(encoder)) {
233                     continue;
234                 }
235                 if (MediaUtils.supports(encoder, format)) {
236                     for (int maxBFrame : maxBFrames) {
237                         if (!mediaType.equals(MediaFormat.MIMETYPE_VIDEO_AVC)
238                                 && !mediaType.equals(MediaFormat.MIMETYPE_VIDEO_HEVC)
239                                 && maxBFrame != 0) {
240                             continue;
241                         }
242                         testParams.add(
243                                 new Object[]{mediaType, widths[i], heights[i], encoder, maxBFrame});
244                     }
245                 }
246             }
247         }
248     }
249 
250     @Parameterized.Parameters(name = "{0}_{3}_{1}x{2}_{4}")
input()251     public static Collection<Object[]> input() throws IOException {
252         final List<Object[]> testParams = new ArrayList<>();
253         final String[] mediaTypes = {AVC, HEVC, MPEG2, MPEG4, VP8, VP9, H263, AV1};
254         for (String mediaType : mediaTypes) {
255             if (mediaType.equals(AVC)) {
256                 int[] widths = {320, 720, 1280, 1920};
257                 int[] heights = {240, 480, 720, 1080};
258                 prepareParamsList(testParams, mediaType, widths, heights);
259             } else if (mediaType.equals(H263)) {
260                 int[] widths = {176, 352, 704, 1408};
261                 int[] heights = {144, 288, 576, 1152};
262                 prepareParamsList(testParams, mediaType, widths, heights);
263             } else if (mediaType.equals(HEVC)) {
264                 int[] widths = {320, 720, 1280, 1920, 3840};
265                 int[] heights = {240, 480, 720, 1080, 2160};
266                 prepareParamsList(testParams, mediaType, widths, heights);
267             } else if (mediaType.equals(MPEG2)) {
268                 int[] widths = {176, 352, 640, 1280, 1920};
269                 int[] heights = {144, 288, 480, 720, 1080};
270                 prepareParamsList(testParams, mediaType, widths, heights);
271             } else if (mediaType.equals(MPEG4)) {
272                 int[] widths = {176, 352, 640, 1280};
273                 int[] heights = {144, 288, 480, 720};
274                 prepareParamsList(testParams, mediaType, widths, heights);
275             } else if (mediaType.equals(VP8)) {
276                 int[] widths = {320, 640, 1280, 1920};
277                 int[] heights = {180, 360, 720, 1080};
278                 prepareParamsList(testParams, mediaType, widths, heights);
279             } else if (mediaType.equals(VP9)) {
280                 int[] widths = {320, 640, 1280, 1920, 3840};
281                 int[] heights = {180, 360, 720, 1080, 2160};
282                 prepareParamsList(testParams, mediaType, widths, heights);
283             } else if (mediaType.equals(AV1)) {
284                 int[] widths = {320, 720, 1280, 1920};
285                 int[] heights = {240, 480, 720, 1080};
286                 prepareParamsList(testParams, mediaType, widths, heights);
287             }
288         }
289         return testParams;
290     }
291 
VideoEncoderDecoderTest(String mediaType, int width, int height, String encoderName, int maxBFrames)292     public VideoEncoderDecoderTest(String mediaType, int width, int height,
293             String encoderName, int maxBFrames) {
294         this.mMediaType = mediaType;
295         this.mWidth = width;
296         this.mHeight = height;
297         this.mEncoderName = encoderName;
298         this.mMaxBFrames = maxBFrames;
299     }
300 
301     @ApiTest(apis = {"VideoCapabilities#getSupportedWidths",
302             "VideoCapabilities#getSupportedHeightsFor",
303             "VideoCapabilities#getSupportedFrameRatesFor",
304             "VideoCapabilities#getBitrateRange",
305             "VideoCapabilities#getAchievableFrameRatesFor",
306             "CodecCapabilities#COLOR_FormatYUV420SemiPlanar",
307             "CodecCapabilities#COLOR_FormatYUV420Planar",
308             "CodecCapabilities#COLOR_FormatYUV420Flexible",
309             "android.media.MediaFormat#KEY_MAX_B_FRAMES"})
310     @Test
testQual()311     public void testQual() throws Exception {
312         if (mMediaType == H263 && (mWidth == 704 || mWidth == 1408)) {
313             qual(mMediaType, mWidth, mHeight, mEncoderName, mMaxBFrames, 25);
314         } else {
315             qual(mMediaType, mWidth, mHeight, mEncoderName, mMaxBFrames);
316         }
317     }
318 
319     @ApiTest(apis = {"VideoCapabilities#getSupportedWidths",
320             "VideoCapabilities#getSupportedHeightsFor",
321             "VideoCapabilities#getSupportedFrameRatesFor",
322             "VideoCapabilities#getBitrateRange",
323             "VideoCapabilities#getAchievableFrameRatesFor",
324             "CodecCapabilities#COLOR_FormatYUV420SemiPlanar",
325             "CodecCapabilities#COLOR_FormatYUV420Planar",
326             "CodecCapabilities#COLOR_FormatYUV420Flexible",
327             "android.media.MediaFormat#KEY_MAX_B_FRAMES"})
328     @Test
testPerf()329     public void testPerf() throws Exception {
330         perf(mMediaType, mWidth, mHeight, mEncoderName, mMaxBFrames);
331     }
332 
isSrcSemiPlanar()333     private boolean isSrcSemiPlanar() {
334         return mSrcColorFormat == CodecCapabilities.COLOR_FormatYUV420SemiPlanar;
335     }
336 
isSrcFlexYUV()337     private boolean isSrcFlexYUV() {
338         return mSrcColorFormat == CodecCapabilities.COLOR_FormatYUV420Flexible;
339     }
340 
isDstSemiPlanar()341     private boolean isDstSemiPlanar() {
342         return mDstColorFormat == CodecCapabilities.COLOR_FormatYUV420SemiPlanar;
343     }
344 
isDstFlexYUV()345     private boolean isDstFlexYUV() {
346         return mDstColorFormat == CodecCapabilities.COLOR_FormatYUV420Flexible;
347     }
348 
getColorFormat(CodecInfo info)349     private static int getColorFormat(CodecInfo info) {
350         if (info.mSupportSemiPlanar) {
351             return CodecCapabilities.COLOR_FormatYUV420SemiPlanar;
352         } else if (info.mSupportPlanar) {
353             return CodecCapabilities.COLOR_FormatYUV420Planar;
354         } else {
355             // FlexYUV must be supported
356             return CodecCapabilities.COLOR_FormatYUV420Flexible;
357         }
358     }
359 
360     private static class RunResult {
361         public final int mNumFrames;
362         public final double mDurationMs;
363         public final double mRmsError;
364 
RunResult()365         RunResult() {
366             mNumFrames = 0;
367             mDurationMs = Double.NaN;
368             mRmsError = Double.NaN;
369         }
370 
RunResult(int numFrames, double durationMs)371         RunResult(int numFrames, double durationMs) {
372             mNumFrames = numFrames;
373             mDurationMs = durationMs;
374             mRmsError = Double.NaN;
375         }
376 
RunResult(int numFrames, double durationMs, double rmsError)377         RunResult(int numFrames, double durationMs, double rmsError) {
378             mNumFrames = numFrames;
379             mDurationMs = durationMs;
380             mRmsError = rmsError;
381         }
382     }
383 
doTest(String mimeType, int w, int h, boolean isPerf, String encoderName, int maxBFrames)384     private void doTest(String mimeType, int w, int h, boolean isPerf, String encoderName,
385             int maxBFrames) throws Exception {
386         if (TestArgs.shouldSkipMediaType(mimeType)) {
387             return;
388         }
389         MediaFormat format = MediaFormat.createVideoFormat(mimeType, w, h);
390 
391         if (isPerf) {
392             mTestConfig.initPerfTest();
393         }
394 
395         if (TestArgs.shouldSkipCodec(encoderName)) {
396             return;
397         }
398         CodecInfo infoEnc = CodecInfo.getSupportedFormatInfo(encoderName, mimeType, w, h, MAX_FPS);
399         assertNotNull(infoEnc);
400 
401         // Skip decoding pass for performance tests as bitstream complexity is not representative
402         String[] decoderNames = null;  // no decoding pass required by default
403         int codingPasses = 1;  // used for time limit. 1 for encoding pass
404         int numRuns = mTestConfig.mNumberOfRepeat;  // used for result array sizing
405         if (!isPerf) {
406             // consider all decoders for quality tests
407             decoderNames = MediaUtils.getDecoderNames(format);
408             if (decoderNames.length == 0) {
409                 MediaUtils.skipTest("No decoders for " + format);
410                 return;
411             }
412             numRuns *= decoderNames.length; // combine each decoder with the encoder
413             codingPasses += decoderNames.length;
414         }
415 
416         // be a bit conservative
417         mTestConfig.mMaxTimeMs = Math.min(
418                 mTestConfig.mMaxTimeMs, MAX_TEST_TIMEOUT_MS / 5 * 4 / codingPasses
419                         / mTestConfig.mNumberOfRepeat);
420         // reduce test-run on non-real devices
421         if (MediaUtils.onFrankenDevice()) {
422             mTestConfig.mMaxTimeMs /= 10;
423         }
424         Log.i(TAG, "current ABI is " + (isPreferredAbi() ? "" : "not ") + "a preferred one");
425 
426         mVideoWidth = w;
427         mVideoHeight = h;
428         mSrcColorFormat = getColorFormat(infoEnc);
429         Log.i(TAG, "Testing video resolution " + w + "x" + h + ": enc format " + mSrcColorFormat);
430 
431         initYUVPlane(w + YUV_PLANE_ADDITIONAL_LENGTH, h + YUV_PLANE_ADDITIONAL_LENGTH);
432 
433         // Adjust total number of frames to prevent OOM.
434         Runtime rt = Runtime.getRuntime();
435         long usedMemory = rt.totalMemory() - rt.freeMemory();
436         mTestConfig.mTotalFrames = Math.min(mTestConfig.mTotalFrames,
437                 (int) (rt.maxMemory() - usedMemory) / 4 * 3 /
438                 (infoEnc.mBitRate / 8 / infoEnc.mFps + 1));
439         Log.i(TAG, "Total testing frames " + mTestConfig.mTotalFrames);
440 
441         mEncoderFrameTimeUsDiff = new double[numRuns][mTestConfig.mTotalFrames - 1];
442         mEncoderFpsResults = new double[numRuns];
443 
444         if (decoderNames != null) {
445             mDecoderFrameTimeUsDiff = new double[numRuns][mTestConfig.mTotalFrames - 1];
446             mDecoderFpsResults = new double[numRuns];
447             mTotalFpsResults = new double[numRuns];
448             mDecoderRmsErrorResults = new double[numRuns];
449         }
450 
451         boolean success = true;
452         int runIx = 0;
453         for (int i = 0; i < mTestConfig.mNumberOfRepeat && success; i++) {
454             mCurrentTestRound = runIx;
455             format = new MediaFormat();
456             format.setString(MediaFormat.KEY_MIME, mimeType);
457             format.setInteger(MediaFormat.KEY_BIT_RATE, infoEnc.mBitRate);
458             format.setInteger(MediaFormat.KEY_BITRATE_MODE,
459                     MediaCodecInfo.EncoderCapabilities.BITRATE_MODE_VBR);
460             format.setInteger(MediaFormat.KEY_WIDTH, w);
461             format.setInteger(MediaFormat.KEY_HEIGHT, h);
462             format.setInteger(MediaFormat.KEY_COLOR_FORMAT, mSrcColorFormat);
463             format.setInteger(MediaFormat.KEY_FRAME_RATE, infoEnc.mFps);
464             mFrameRate = infoEnc.mFps;
465             format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, KEY_I_FRAME_INTERVAL);
466             format.setInteger(MediaFormat.KEY_MAX_B_FRAMES, maxBFrames);
467 
468             RunResult encodingResult =
469                 runEncoder(encoderName, format, mTestConfig.mTotalFrames, i);
470             double encodingTime = encodingResult.mDurationMs;
471             int framesEncoded = encodingResult.mNumFrames;
472 
473             if (decoderNames != null && decoderNames.length > 0) {
474                 for (String decoderName : decoderNames) {
475                     if (TestArgs.shouldSkipCodec(decoderName)) {
476                         continue;
477                     }
478                     CodecInfo infoDec =
479                         CodecInfo.getSupportedFormatInfo(decoderName, mimeType, w, h, MAX_FPS);
480                     assertNotNull(infoDec);
481                     mDstColorFormat = getColorFormat(infoDec);
482 
483                     // re-initialize format for decoder
484                     format = new MediaFormat();
485                     format.setString(MediaFormat.KEY_MIME, mimeType);
486                     format.setInteger(MediaFormat.KEY_WIDTH, w);
487                     format.setInteger(MediaFormat.KEY_HEIGHT, h);
488                     format.setInteger(MediaFormat.KEY_COLOR_FORMAT, mDstColorFormat);
489                     RunResult decoderResult = runDecoder(decoderName, format, i);
490                     if (decoderResult == null) {
491                         success = false;
492                     } else {
493                         double decodingTime = decoderResult.mDurationMs;
494                         mDecoderRmsErrorResults[runIx] = decoderResult.mRmsError;
495                         mEncoderFpsResults[runIx] = framesEncoded / encodingTime;
496                         int framesDecoded = decoderResult.mNumFrames;
497                         mDecoderFpsResults[runIx] = framesDecoded / decodingTime;
498                         if (framesDecoded == framesEncoded) {
499                             mTotalFpsResults[runIx] =
500                                 framesEncoded / (encodingTime + decodingTime);
501                         }
502                     }
503                     ++runIx;
504                 }
505             } else {
506                 mEncoderFpsResults[runIx] = mTestConfig.mTotalFrames / encodingTime;
507                 ++runIx;
508             }
509 
510             // clear things for re-start
511             mEncodedOutputBuffer.clear();
512             // it will be good to clean everything to make every run the same.
513             System.gc();
514         }
515 
516         // log results before verification
517         double[] measuredFps = new double[numRuns];
518         if (isPerf) {
519             for (int i = 0; i < numRuns; i++) {
520                 measuredFps[i] = logPerformanceResults(encoderName, i);
521             }
522         }
523         if (mTestConfig.mTestPixels && decoderNames != null) {
524             logQualityResults(mimeType, encoderName, decoderNames);
525             for (int i = 0; i < numRuns; i++) {
526                 // make sure that rms error is not too big for all runs
527                 if (mDecoderRmsErrorResults[i] >= mRmsErrorMargin) {
528                     fail("rms error is bigger than the limit "
529                             + Arrays.toString(mDecoderRmsErrorResults) + " vs " + mRmsErrorMargin);
530                 }
531             }
532         }
533 
534         if (isPerf) {
535             // allow improvements in mainline-updated google-supplied software codecs.
536             boolean fasterIsOk =  mUpdatedSwCodec & encoderName.startsWith("c2.android.");
537             String error = MediaPerfUtils.verifyAchievableFrameRates(
538                     encoderName, mimeType, w, h, fasterIsOk, maxBFrames > 0, measuredFps);
539             // Performance numbers only make sense on real devices, so skip on non-real devices
540             //
541             // Also ignore verification on non-preferred ABIs due to the possibility of
542             // this being emulated. On some CPU-s 32-bit mode is emulated using big cores
543             // that results in the SW codecs also running much faster (perhaps they are
544             // scheduled for the big cores as well)
545             // TODO: still verify lower bound.
546             if (error != null) {
547                 if (MediaUtils.onFrankenDevice() || Build.IS_EMULATOR
548                         || (infoEnc.mIsSoftware && !isPreferredAbi())) {
549                     // ensure there is data, but don't insist that it is correct
550                     assertFalse(error, error.startsWith("Failed to get "));
551                 } else {
552                     fail("encountered error " + error);
553                 }
554             }
555         }
556         assertTrue(success);
557     }
558 
logQualityResults(String mimeType, String encoderName, String[] decoderNames)559     private void logQualityResults(String mimeType, String encoderName, String[] decoderNames) {
560         String streamName = "video_encoder_decoder_quality";
561         DeviceReportLog log = new DeviceReportLog(REPORT_LOG_NAME, streamName);
562         log.addValue("encoder_name", encoderName, ResultType.NEUTRAL, ResultUnit.NONE);
563         log.addValues("decoder_names", Arrays.asList(decoderNames), ResultType.NEUTRAL, ResultUnit.NONE);
564         log.addValue("mime_type", mimeType, ResultType.NEUTRAL, ResultUnit.NONE);
565         log.addValue("width", mVideoWidth, ResultType.NEUTRAL, ResultUnit.NONE);
566         log.addValue("height", mVideoHeight, ResultType.NEUTRAL, ResultUnit.NONE);
567         log.addValues("encoder_fps", mEncoderFpsResults, ResultType.HIGHER_BETTER,
568                 ResultUnit.FPS);
569         log.addValues("rms_error", mDecoderRmsErrorResults, ResultType.LOWER_BETTER,
570                 ResultUnit.NONE);
571         log.addValues("decoder_fps", mDecoderFpsResults, ResultType.HIGHER_BETTER,
572                 ResultUnit.FPS);
573         log.addValues("encoder_decoder_fps", mTotalFpsResults, ResultType.HIGHER_BETTER,
574                 ResultUnit.FPS);
575         log.addValue("encoder_average_fps", Stat.getAverage(mEncoderFpsResults),
576                 ResultType.HIGHER_BETTER, ResultUnit.FPS);
577         log.addValue("decoder_average_fps", Stat.getAverage(mDecoderFpsResults),
578                 ResultType.HIGHER_BETTER, ResultUnit.FPS);
579         log.setSummary("encoder_decoder_average_fps", Stat.getAverage(mTotalFpsResults),
580                 ResultType.HIGHER_BETTER, ResultUnit.FPS);
581         log.submit(InstrumentationRegistry.getInstrumentation());
582     }
583 
logPerformanceResults(String encoderName, int round)584     private double logPerformanceResults(String encoderName, int round) {
585         String streamName = "video_encoder_performance";
586         DeviceReportLog log = new DeviceReportLog(REPORT_LOG_NAME, streamName);
587         String message = MediaPerfUtils.addPerformanceHeadersToLog(
588                 log, "encoder stats:", round, encoderName,
589                 mEncConfigFormat, mEncInputFormat, mEncOutputFormat);
590         double[] frameTimeUsDiff = mEncoderFrameTimeUsDiff[round];
591         double fps = MediaPerfUtils.addPerformanceStatsToLog(
592                 log, new MediaUtils.Stats(frameTimeUsDiff), message);
593 
594         if (mTestConfig.mReportFrameTime) {
595             double[] msDiff = new double[frameTimeUsDiff.length];
596             double nowUs = 0, lastMs = 0;
597             for (int i = 0; i < frameTimeUsDiff.length; ++i) {
598                 nowUs += frameTimeUsDiff[i];
599                 double nowMs = Math.round(nowUs) / 1000.;
600                 msDiff[i] = Math.round((nowMs - lastMs) * 1000) / 1000.;
601                 lastMs = nowMs;
602             }
603             log.addValues("encoder_raw_diff", msDiff, ResultType.NEUTRAL, ResultUnit.MS);
604         }
605 
606         log.submit(InstrumentationRegistry.getInstrumentation());
607         return fps;
608     }
609 
610     /**
611      * run encoder benchmarking
612      * @param encoderName encoder name
613      * @param format format of media to encode
614      * @param totalFrames total number of frames to encode
615      * @return time taken in ms to encode the frames. This does not include initialization time.
616      */
runEncoder( String encoderName, MediaFormat format, int totalFrames, int runId)617     private RunResult runEncoder(
618             String encoderName, MediaFormat format, int totalFrames, int runId) {
619         MediaCodec codec = null;
620         try {
621             codec = MediaCodec.createByCodecName(encoderName);
622             mEncConfigFormat = format;
623             codec.configure(
624                     format,
625                     null /* surface */,
626                     null /* crypto */,
627                     MediaCodec.CONFIGURE_FLAG_ENCODE);
628         } catch (IllegalStateException e) {
629             Log.e(TAG, "codec '" + encoderName + "' failed configuration.");
630             codec.release();
631             assertTrue("codec '" + encoderName + "' failed configuration.", false);
632         } catch (IOException | NullPointerException e) {
633             Log.i(TAG, "could not find codec for " + format);
634             return new RunResult();
635         }
636         codec.start();
637         mEncInputFormat = codec.getInputFormat();
638         ByteBuffer[] codecOutputBuffers = codec.getOutputBuffers();
639         MediaFormat inputFormat = codec.getInputFormat();
640         mVideoStride = inputFormat.containsKey(MediaFormat.KEY_STRIDE)
641                 ? inputFormat.getInteger(MediaFormat.KEY_STRIDE)
642                 : inputFormat.getInteger(MediaFormat.KEY_WIDTH);
643         mVideoVStride = inputFormat.containsKey(MediaFormat.KEY_SLICE_HEIGHT)
644                 ? inputFormat.getInteger(MediaFormat.KEY_SLICE_HEIGHT)
645                 : inputFormat.getInteger(MediaFormat.KEY_HEIGHT);
646 
647         int numBytesSubmitted = 0;
648         int numBytesDequeued = 0;
649         int inFramesCount = 0;
650         int outFramesCount = 0;
651         long lastOutputTimeUs = 0;
652         long start = System.currentTimeMillis();
653         while (true) {
654             int index;
655 
656             if (inFramesCount < totalFrames) {
657                 index = codec.dequeueInputBuffer(VIDEO_CODEC_WAIT_TIME_US /* timeoutUs */);
658                 if (index != MediaCodec.INFO_TRY_AGAIN_LATER) {
659                     int size;
660                     long elapsedMs = System.currentTimeMillis() - start;
661                     boolean eos = (inFramesCount == totalFrames - 1
662                             || elapsedMs > mTestConfig.mMaxTimeMs
663                             || (elapsedMs > mTestConfig.mMinTimeMs
664                                     && inFramesCount > mTestConfig.mMinNumFrames));
665 
666                     // when encoder only supports flexYUV, use Image only; otherwise,
667                     // use ByteBuffer & Image each on half of the frames to test both
668                     if (isSrcFlexYUV() || inFramesCount % 2 == 0) {
669                         Image image = codec.getInputImage(index);
670                         // image should always be available
671                         assertTrue(image != null);
672                         size = queueInputImageEncoder(
673                                 codec, image, index, inFramesCount,
674                                 eos ? MediaCodec.BUFFER_FLAG_END_OF_STREAM : 0, runId);
675                     } else {
676                         ByteBuffer buffer = codec.getInputBuffer(index);
677                         size = queueInputBufferEncoder(
678                                 codec, buffer, index, inFramesCount,
679                                 eos ? MediaCodec.BUFFER_FLAG_END_OF_STREAM : 0, runId);
680                     }
681                     inFramesCount++;
682                     numBytesSubmitted += size;
683                     if (VERBOSE) {
684                         Log.d(TAG, "queued " + size + " bytes of input data, frame " +
685                                 (inFramesCount - 1));
686                     }
687                 }
688             }
689             MediaCodec.BufferInfo info = new MediaCodec.BufferInfo();
690             index = codec.dequeueOutputBuffer(info, VIDEO_CODEC_WAIT_TIME_US /* timeoutUs */);
691             if (index == MediaCodec.INFO_TRY_AGAIN_LATER) {
692             } else if (index == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
693                 mEncOutputFormat = codec.getOutputFormat();
694             } else if (index == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
695                 codecOutputBuffers = codec.getOutputBuffers();
696             } else if (index >= 0) {
697                 long nowUs = (System.nanoTime() + 500) / 1000;
698                 dequeueOutputBufferEncoder(codec, codecOutputBuffers, index, info);
699                 if ((info.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) == 0) {
700                     int pos = outFramesCount - 1;
701                     if (pos >= 0 && pos < mEncoderFrameTimeUsDiff[mCurrentTestRound].length) {
702                         mEncoderFrameTimeUsDiff[mCurrentTestRound][pos] = nowUs - lastOutputTimeUs;
703                     }
704                     lastOutputTimeUs = nowUs;
705 
706                     numBytesDequeued += info.size;
707                     ++outFramesCount;
708                 }
709                 if ((info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
710                     if (VERBOSE) {
711                         Log.d(TAG, "dequeued output EOS.");
712                     }
713                     break;
714                 }
715                 if (VERBOSE) {
716                     Log.d(TAG, "dequeued " + info.size + " bytes of output data.");
717                 }
718             }
719         }
720         long finish = System.currentTimeMillis();
721         int validDataNum = Math.min(mEncodedOutputBuffer.size() - 1,
722                 mEncoderFrameTimeUsDiff[mCurrentTestRound].length);
723         mEncoderFrameTimeUsDiff[mCurrentTestRound] =
724                 Arrays.copyOf(mEncoderFrameTimeUsDiff[mCurrentTestRound], validDataNum);
725         if (VERBOSE) {
726             Log.d(TAG, "queued a total of " + numBytesSubmitted + "bytes, "
727                     + "dequeued " + numBytesDequeued + " bytes.");
728         }
729         codec.stop();
730         codec.release();
731         codec = null;
732 
733         mEncOutputFormat.setInteger(MediaFormat.KEY_BIT_RATE,
734                 format.getInteger(MediaFormat.KEY_BIT_RATE));
735         mEncOutputFormat.setInteger(MediaFormat.KEY_FRAME_RATE,
736                 format.getInteger(MediaFormat.KEY_FRAME_RATE));
737         if (outFramesCount > 0) {
738             mEncOutputFormat.setInteger(
739                     "actual-bitrate",
740                     (int)(numBytesDequeued * 8. * format.getInteger(MediaFormat.KEY_FRAME_RATE)
741                             / outFramesCount));
742         }
743         return new RunResult(outFramesCount, (finish - start) / 1000.);
744     }
745 
746     /**
747      * Fills input buffer for encoder from YUV buffers.
748      * @return size of enqueued data.
749      */
queueInputBufferEncoder( MediaCodec codec, ByteBuffer buffer, int index, int frameCount, int flags, int runId)750     private int queueInputBufferEncoder(
751             MediaCodec codec, ByteBuffer buffer, int index, int frameCount, int flags, int runId) {
752         buffer.clear();
753 
754         Point origin = getOrigin(frameCount, runId);
755         // Y color first
756         int srcOffsetY = origin.x + origin.y * mBufferWidth;
757         final byte[] yBuffer = mYBuffer.array();
758         for (int i = 0; i < mVideoHeight; i++) {
759             buffer.position(i * mVideoStride);
760             buffer.put(yBuffer, srcOffsetY, mVideoWidth);
761             srcOffsetY += mBufferWidth;
762         }
763         if (isSrcSemiPlanar()) {
764             int srcOffsetU = origin.y / 2 * mBufferWidth + origin.x / 2 * 2;
765             final byte[] uvBuffer = mUVBuffer.array();
766             for (int i = 0; i < mVideoHeight / 2; i++) {
767                 buffer.position(mVideoVStride * mVideoStride + i * mVideoStride);
768                 buffer.put(uvBuffer, srcOffsetU, mVideoWidth);
769                 srcOffsetU += mBufferWidth;
770             }
771         } else {
772             int srcOffsetU = origin.y / 2 * mBufferWidth / 2 + origin.x / 2;
773             int srcOffsetV = srcOffsetU + mBufferWidth / 2 * mBufferHeight / 2;
774             final byte[] uvBuffer = mUVBuffer.array();
775             for (int i = 0; i < mVideoHeight / 2; i++) { //U only
776                 buffer.position(mVideoVStride * mVideoStride + i * mVideoStride / 2);
777                 buffer.put(uvBuffer, srcOffsetU, mVideoWidth / 2);
778                 srcOffsetU += mBufferWidth / 2;
779             }
780             for (int i = 0; i < mVideoHeight / 2; i++) { //V only
781                 buffer.position(mVideoVStride * mVideoStride * 5 / 4 + i * mVideoStride / 2);
782                 buffer.put(uvBuffer, srcOffsetV, mVideoWidth / 2);
783                 srcOffsetV += mBufferWidth / 2;
784             }
785         }
786         // submit till end of the data
787         int size = buffer.position();
788         long ptsUsec = computePresentationTime(frameCount);
789 
790         codec.queueInputBuffer(index, 0 /* offset */, size, ptsUsec /* timeUs */, flags);
791         if (VERBOSE && (frameCount == 0)) {
792             printByteArray("Y ", mYBuffer.array(), 0, 20);
793             printByteArray("UV ", mUVBuffer.array(), 0, 20);
794             printByteArray("UV ", mUVBuffer.array(), mBufferWidth * 60, 20);
795         }
796         return size;
797     }
798 
799     /**
800      * Fills input image for encoder from YUV buffers.
801      * @return size of enqueued data.
802      */
queueInputImageEncoder( MediaCodec codec, Image image, int index, int frameCount, int flags, int runId)803     private int queueInputImageEncoder(
804             MediaCodec codec, Image image, int index, int frameCount, int flags, int runId) {
805         assertTrue(image.getFormat() == ImageFormat.YUV_420_888);
806 
807 
808         Point origin = getOrigin(frameCount, runId);
809 
810         // Y color first
811         CodecImage srcImage = new YUVImage(
812                 origin,
813                 mVideoWidth, mVideoHeight,
814                 mBufferWidth, mBufferHeight,
815                 isSrcSemiPlanar(),
816                 mYDirectBuffer, mUVDirectBuffer);
817 
818         CodecUtils.copyFlexYUVImage(image, srcImage);
819 
820         int size = mVideoHeight * mVideoWidth * 3 / 2;
821         long ptsUsec = computePresentationTime(frameCount);
822 
823         codec.queueInputBuffer(index, 0 /* offset */, size, ptsUsec /* timeUs */, flags);
824         if (VERBOSE && (frameCount == 0)) {
825             printByteArray("Y ", mYBuffer.array(), 0, 20);
826             printByteArray("UV ", mUVBuffer.array(), 0, 20);
827             printByteArray("UV ", mUVBuffer.array(), mBufferWidth * 60, 20);
828         }
829         return size;
830     }
831 
832     /**
833      * Dequeue encoded data from output buffer and store for later usage.
834      */
dequeueOutputBufferEncoder( MediaCodec codec, ByteBuffer[] outputBuffers, int index, MediaCodec.BufferInfo info)835     private void dequeueOutputBufferEncoder(
836             MediaCodec codec, ByteBuffer[] outputBuffers,
837             int index, MediaCodec.BufferInfo info) {
838         ByteBuffer output = outputBuffers[index];
839         int l = info.size;
840         ByteBuffer copied = ByteBuffer.allocate(l);
841         output.get(copied.array(), 0, l);
842         BufferInfo savedInfo = new BufferInfo();
843         savedInfo.set(0, l, info.presentationTimeUs, info.flags);
844         mEncodedOutputBuffer.addLast(Pair.create(copied, savedInfo));
845         codec.releaseOutputBuffer(index, false /* render */);
846     }
847 
848     /**
849      * run decoder benchmarking with encoded stream stored from encoding phase
850      * @param decoderName decoder name
851      * @param format format of media to decode
852      * @return returns length-2 array with 0: time for decoding, 1 : rms error of pixels
853      */
runDecoder(String decoderName, MediaFormat format, int runId)854     private RunResult runDecoder(String decoderName, MediaFormat format, int runId) {
855         MediaCodec codec = null;
856         try {
857             codec = MediaCodec.createByCodecName(decoderName);
858         } catch (IOException | NullPointerException e) {
859             Log.i(TAG, "could not find decoder for " + format);
860             return null;
861         }
862         codec.configure(format, null /* surface */, null /* crypto */, 0 /* flags */);
863         codec.start();
864         ByteBuffer[] codecInputBuffers = codec.getInputBuffers();
865 
866         double totalErrorSquared = 0;
867 
868         MediaCodec.BufferInfo info = new MediaCodec.BufferInfo();
869         boolean sawOutputEOS = false;
870         int inputLeft = mEncodedOutputBuffer.size();
871         int inputBufferCount = 0;
872         int outFrameCount = 0;
873         YUVValue expected = new YUVValue();
874         YUVValue decoded = new YUVValue();
875         long lastOutputTimeUs = 0;
876         long start = System.currentTimeMillis();
877         while (!sawOutputEOS) {
878             if (inputLeft > 0) {
879                 int inputBufIndex = codec.dequeueInputBuffer(VIDEO_CODEC_WAIT_TIME_US);
880 
881                 if (inputBufIndex >= 0) {
882                     ByteBuffer dstBuf = codecInputBuffers[inputBufIndex];
883                     dstBuf.clear();
884                     ByteBuffer src = mEncodedOutputBuffer.get(inputBufferCount).first;
885                     BufferInfo srcInfo = mEncodedOutputBuffer.get(inputBufferCount).second;
886                     int writeSize = src.capacity();
887                     dstBuf.put(src.array(), 0, writeSize);
888 
889                     int flags = srcInfo.flags;
890                     if ((System.currentTimeMillis() - start) > mTestConfig.mMaxTimeMs) {
891                         flags |= MediaCodec.BUFFER_FLAG_END_OF_STREAM;
892                     }
893 
894                     codec.queueInputBuffer(
895                             inputBufIndex,
896                             0 /* offset */,
897                             writeSize,
898                             srcInfo.presentationTimeUs,
899                             flags);
900                     inputLeft --;
901                     inputBufferCount ++;
902                 }
903             }
904 
905             int res = codec.dequeueOutputBuffer(info, VIDEO_CODEC_WAIT_TIME_US);
906             if (res >= 0) {
907                 int outputBufIndex = res;
908 
909                 // only do YUV compare on EOS frame if the buffer size is none-zero
910                 if (info.size > 0) {
911                     long nowUs = (System.nanoTime() + 500) / 1000;
912                     int pos = outFrameCount - 1;
913                     if (pos >= 0 && pos < mDecoderFrameTimeUsDiff[mCurrentTestRound].length) {
914                         mDecoderFrameTimeUsDiff[mCurrentTestRound][pos] = nowUs - lastOutputTimeUs;
915                     }
916                     lastOutputTimeUs = nowUs;
917 
918                     if (mTestConfig.mTestPixels) {
919                         Point origin = getOrigin(computeFrameIndex(info.presentationTimeUs), runId);
920                         int i;
921 
922                         // if decoder supports planar or semiplanar, check output with
923                         // ByteBuffer & Image each on half of the points
924                         int pixelCheckPerFrame = PIXEL_CHECK_PER_FRAME;
925                         if (!isDstFlexYUV()) {
926                             pixelCheckPerFrame /= 2;
927                             ByteBuffer buf = codec.getOutputBuffer(outputBufIndex);
928                             if (VERBOSE && (outFrameCount == 0)) {
929                                 printByteBuffer("Y ", buf, 0, 20);
930                                 printByteBuffer("UV ", buf, mVideoWidth * mVideoHeight, 20);
931                                 printByteBuffer("UV ", buf,
932                                         mVideoWidth * mVideoHeight + mVideoWidth * 60, 20);
933                             }
934                             for (i = 0; i < pixelCheckPerFrame; i++) {
935                                 int w = mRandom.nextInt(mVideoWidth);
936                                 int h = mRandom.nextInt(mVideoHeight);
937                                 getPixelValuesFromYUVBuffers(origin.x, origin.y, w, h, expected);
938                                 getPixelValuesFromOutputBuffer(buf, w, h, decoded);
939                                 if (VERBOSE) {
940                                     Log.i(TAG, outFrameCount + "-" + i + "- th round: ByteBuffer:"
941                                             + " expected "
942                                             + expected.mY + "," + expected.mU + "," + expected.mV
943                                             + " decoded "
944                                             + decoded.mY + "," + decoded.mU + "," + decoded.mV);
945                                 }
946                                 totalErrorSquared += expected.calcErrorSquared(decoded);
947                             }
948                         }
949 
950                         Image image = codec.getOutputImage(outputBufIndex);
951                         assertTrue(image != null);
952                         for (i = 0; i < pixelCheckPerFrame; i++) {
953                             int w = mRandom.nextInt(mVideoWidth);
954                             int h = mRandom.nextInt(mVideoHeight);
955                             getPixelValuesFromYUVBuffers(origin.x, origin.y, w, h, expected);
956                             getPixelValuesFromImage(image, w, h, decoded);
957                             if (VERBOSE) {
958                                 Log.i(TAG, outFrameCount + "-" + i + "- th round: FlexYUV:"
959                                         + " expcted "
960                                         + expected.mY + "," + expected.mU + "," + expected.mV
961                                         + " decoded "
962                                         + decoded.mY + "," + decoded.mU + "," + decoded.mV);
963                             }
964                             totalErrorSquared += expected.calcErrorSquared(decoded);
965                         }
966                     }
967                     outFrameCount++;
968                 }
969                 codec.releaseOutputBuffer(outputBufIndex, false /* render */);
970                 if ((info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
971                     Log.d(TAG, "saw output EOS.");
972                     sawOutputEOS = true;
973                 }
974             } else if (res == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
975                 mDecOutputFormat = codec.getOutputFormat();
976                 Log.d(TAG, "output format has changed to " + mDecOutputFormat);
977                 int colorFormat = mDecOutputFormat.getInteger(MediaFormat.KEY_COLOR_FORMAT);
978                 if (colorFormat == CodecCapabilities.COLOR_FormatYUV420SemiPlanar
979                         || colorFormat == CodecCapabilities.COLOR_FormatYUV420Planar) {
980                     mDstColorFormat = colorFormat;
981                 } else {
982                     mDstColorFormat = CodecCapabilities.COLOR_FormatYUV420Flexible;
983                     Log.w(TAG, "output format changed to unsupported one " +
984                             Integer.toHexString(colorFormat) + ", using FlexYUV");
985                 }
986                 mVideoStride = mDecOutputFormat.containsKey(MediaFormat.KEY_STRIDE)
987                         ? mDecOutputFormat.getInteger(MediaFormat.KEY_STRIDE)
988                         : mDecOutputFormat.getInteger(MediaFormat.KEY_WIDTH);
989                 mVideoVStride = mDecOutputFormat.containsKey(MediaFormat.KEY_SLICE_HEIGHT)
990                         ? mDecOutputFormat.getInteger(MediaFormat.KEY_SLICE_HEIGHT)
991                         : mDecOutputFormat.getInteger(MediaFormat.KEY_HEIGHT);
992             }
993         }
994         long finish = System.currentTimeMillis();
995         int validDataNum = Math.min(outFrameCount - 1,
996                 mDecoderFrameTimeUsDiff[mCurrentTestRound].length);
997         mDecoderFrameTimeUsDiff[mCurrentTestRound] =
998                 Arrays.copyOf(mDecoderFrameTimeUsDiff[mCurrentTestRound], validDataNum);
999         codec.stop();
1000         codec.release();
1001         codec = null;
1002 
1003         // divide by 3 as sum is done for Y, U, V.
1004         double errorRms = Math.sqrt(totalErrorSquared / PIXEL_CHECK_PER_FRAME / outFrameCount / 3);
1005         return new RunResult(outFrameCount, (finish - start) / 1000., errorRms);
1006     }
1007 
1008     /**
1009      *  returns origin in the absolute frame for given frame count.
1010      *  The video scene is moving by moving origin per each frame.
1011      */
getOrigin(int frameCount, int runId)1012     private Point getOrigin(int frameCount, int runId) {
1013         // Translation is basically:
1014         //    x = A * sin(B * t) + C * t
1015         //    y = D * cos(E * t) + F * t
1016         //    'bouncing' in a [0, length] regions (constrained to [0, length] by mirroring at 0
1017         //    and length.)
1018         double x = (1 - Math.sin(frameCount / (7. + (runId % 2)))) * 0.1 + frameCount * 0.005;
1019         double y = (1 - Math.cos(frameCount / (10. + (runId & ~1))))
1020                 + frameCount * (0.01 + runId / 1000.);
1021 
1022         // At every 32nd or 13th frame out of 32, an additional varying offset is added to
1023         // produce a jerk.
1024         if (frameCount % 32 == 0) {
1025             x += ((frameCount % 64) / 32) + 0.3 + y;
1026         }
1027         if (frameCount % 32 == 13) {
1028             y += ((frameCount % 64) / 32) + 0.6 + x;
1029         }
1030 
1031         // constrain to region
1032         int xi = (int)((x % 2) * YUV_PLANE_ADDITIONAL_LENGTH);
1033         int yi = (int)((y % 2) * YUV_PLANE_ADDITIONAL_LENGTH);
1034         if (xi > YUV_PLANE_ADDITIONAL_LENGTH) {
1035             xi = 2 * YUV_PLANE_ADDITIONAL_LENGTH - xi;
1036         }
1037         if (yi > YUV_PLANE_ADDITIONAL_LENGTH) {
1038             yi = 2 * YUV_PLANE_ADDITIONAL_LENGTH - yi;
1039         }
1040         return new Point(xi, yi);
1041     }
1042 
1043     /**
1044      * initialize reference YUV plane
1045      * @param w This should be YUV_PLANE_ADDITIONAL_LENGTH pixels bigger than video resolution
1046      *          to allow movements
1047      * @param h This should be YUV_PLANE_ADDITIONAL_LENGTH pixels bigger than video resolution
1048      *          to allow movements
1049      * @param semiPlanarEnc
1050      * @param semiPlanarDec
1051      */
initYUVPlane(int w, int h)1052     private void initYUVPlane(int w, int h) {
1053         int bufferSizeY = w * h;
1054         mYBuffer = ByteBuffer.allocate(bufferSizeY);
1055         mUVBuffer = ByteBuffer.allocate(bufferSizeY / 2);
1056         mYDirectBuffer = ByteBuffer.allocateDirect(bufferSizeY);
1057         mUVDirectBuffer = ByteBuffer.allocateDirect(bufferSizeY / 2);
1058         mBufferWidth = w;
1059         mBufferHeight = h;
1060         final byte[] yArray = mYBuffer.array();
1061         final byte[] uvArray = mUVBuffer.array();
1062         for (int i = 0; i < h; i++) {
1063             for (int j = 0; j < w; j++) {
1064                 yArray[i * w + j]  = clampY((i + j) & 0xff);
1065             }
1066         }
1067         if (isSrcSemiPlanar()) {
1068             for (int i = 0; i < h/2; i++) {
1069                 for (int j = 0; j < w/2; j++) {
1070                     uvArray[i * w + 2 * j]  = (byte) (i & 0xff);
1071                     uvArray[i * w + 2 * j + 1]  = (byte) (j & 0xff);
1072                 }
1073             }
1074         } else { // planar, U first, then V
1075             int vOffset = bufferSizeY / 4;
1076             for (int i = 0; i < h/2; i++) {
1077                 for (int j = 0; j < w/2; j++) {
1078                     uvArray[i * w/2 + j]  = (byte) (i & 0xff);
1079                     uvArray[i * w/2 + vOffset + j]  = (byte) (j & 0xff);
1080                 }
1081             }
1082         }
1083         mYDirectBuffer.put(yArray);
1084         mUVDirectBuffer.put(uvArray);
1085         mYDirectBuffer.rewind();
1086         mUVDirectBuffer.rewind();
1087     }
1088 
1089     /**
1090      * class to store pixel values in YUV
1091      *
1092      */
1093     public class YUVValue {
1094         public byte mY;
1095         public byte mU;
1096         public byte mV;
YUVValue()1097         public YUVValue() {
1098         }
1099 
equalTo(YUVValue other)1100         public boolean equalTo(YUVValue other) {
1101             return (mY == other.mY) && (mU == other.mU) && (mV == other.mV);
1102         }
1103 
calcErrorSquared(YUVValue other)1104         public double calcErrorSquared(YUVValue other) {
1105             // Java's byte is signed but here we want to calculate difference in unsigned bytes.
1106             double yDelta = (mY & 0xFF) - (other.mY & 0xFF);
1107             double uDelta = (mU & 0xFF) - (other.mU & 0xFF);
1108             double vDelta = (mV & 0xFF) - (other.mV & 0xFF);
1109             return yDelta * yDelta + uDelta * uDelta + vDelta * vDelta;
1110         }
1111     }
1112 
1113     /**
1114      * Read YUV values from given position (x,y) for given origin (originX, originY)
1115      * The whole data is already available from YBuffer and UVBuffer.
1116      * @param result pass the result via this. This is for avoiding creating / destroying too many
1117      *               instances
1118      */
getPixelValuesFromYUVBuffers(int originX, int originY, int x, int y, YUVValue result)1119     private void getPixelValuesFromYUVBuffers(int originX, int originY, int x, int y,
1120             YUVValue result) {
1121         result.mY = mYBuffer.get((originY + y) * mBufferWidth + (originX + x));
1122         if (isSrcSemiPlanar()) {
1123             int index = (originY + y) / 2 * mBufferWidth + (originX + x) / 2 * 2;
1124             //Log.d(TAG, "YUV " + originX + "," + originY + "," + x + "," + y + "," + index);
1125             result.mU = mUVBuffer.get(index);
1126             result.mV = mUVBuffer.get(index + 1);
1127         } else {
1128             int vOffset = mBufferWidth * mBufferHeight / 4;
1129             int index = (originY + y) / 2 * mBufferWidth / 2 + (originX + x) / 2;
1130             result.mU = mUVBuffer.get(index);
1131             result.mV = mUVBuffer.get(vOffset + index);
1132         }
1133     }
1134 
1135     /**
1136      * Read YUV pixels from decoded output buffer for give (x, y) position
1137      * Output buffer is composed of Y parts followed by U/V
1138      * @param result pass the result via this. This is for avoiding creating / destroying too many
1139      *               instances
1140      */
getPixelValuesFromOutputBuffer(ByteBuffer buffer, int x, int y, YUVValue result)1141     private void getPixelValuesFromOutputBuffer(ByteBuffer buffer, int x, int y, YUVValue result) {
1142         result.mY = buffer.get(y * mVideoStride + x);
1143         if (isDstSemiPlanar()) {
1144             int index = mVideoStride * mVideoVStride + y / 2 * mVideoStride + x / 2 * 2;
1145             //Log.d(TAG, "Decoded " + x + "," + y + "," + index);
1146             result.mU = buffer.get(index);
1147             result.mV = buffer.get(index + 1);
1148         } else {
1149             int vOffset = mVideoStride * mVideoVStride / 4;
1150             int index = mVideoStride * mVideoVStride + y / 2 * mVideoStride / 2 + x / 2;
1151             result.mU = buffer.get(index);
1152             result.mV = buffer.get(index + vOffset);
1153         }
1154     }
1155 
getPixelValuesFromImage(Image image, int x, int y, YUVValue result)1156     private void getPixelValuesFromImage(Image image, int x, int y, YUVValue result) {
1157         assertTrue(image.getFormat() == ImageFormat.YUV_420_888);
1158 
1159         Plane[] planes = image.getPlanes();
1160         assertTrue(planes.length == 3);
1161 
1162         result.mY = getPixelFromPlane(planes[0], x, y);
1163         result.mU = getPixelFromPlane(planes[1], x / 2, y / 2);
1164         result.mV = getPixelFromPlane(planes[2], x / 2, y / 2);
1165     }
1166 
getPixelFromPlane(Plane plane, int x, int y)1167     private byte getPixelFromPlane(Plane plane, int x, int y) {
1168         ByteBuffer buf = plane.getBuffer();
1169         return buf.get(y * plane.getRowStride() + x * plane.getPixelStride());
1170     }
1171 
1172     /**
1173      * Y cannot have full range. clamp it to prevent invalid value.
1174      */
clampY(int y)1175     private byte clampY(int y) {
1176         if (y < Y_CLAMP_MIN) {
1177             y = Y_CLAMP_MIN;
1178         } else if (y > Y_CLAMP_MAX) {
1179             y = Y_CLAMP_MAX;
1180         }
1181         return (byte) (y & 0xff);
1182     }
1183 
1184     // for debugging
printByteArray(String msg, byte[] data, int offset, int len)1185     private void printByteArray(String msg, byte[] data, int offset, int len) {
1186         StringBuilder builder = new StringBuilder();
1187         builder.append(msg);
1188         builder.append(":");
1189         for (int i = offset; i < offset + len; i++) {
1190             builder.append(Integer.toHexString(data[i]));
1191             builder.append(",");
1192         }
1193         builder.deleteCharAt(builder.length() - 1);
1194         Log.i(TAG, builder.toString());
1195     }
1196 
1197     // for debugging
printByteBuffer(String msg, ByteBuffer data, int offset, int len)1198     private void printByteBuffer(String msg, ByteBuffer data, int offset, int len) {
1199         StringBuilder builder = new StringBuilder();
1200         builder.append(msg);
1201         builder.append(":");
1202         for (int i = offset; i < offset + len; i++) {
1203             builder.append(Integer.toHexString(data.get(i)));
1204             builder.append(",");
1205         }
1206         builder.deleteCharAt(builder.length() - 1);
1207         Log.i(TAG, builder.toString());
1208     }
1209 
1210     /**
1211      * Generates the presentation time for frame N, in microseconds.
1212      */
computePresentationTime(int frameIndex)1213     private long computePresentationTime(int frameIndex) {
1214         return TIMESTAMP_OFFSET + frameIndex * 1000000L / mFrameRate;
1215     }
1216 
1217     /**
1218      * Generates the frameIndex from presentation time
1219      */
computeFrameIndex(long ptsUsec)1220     private int computeFrameIndex(long ptsUsec) {
1221         assertTrue("value for PtsUsec too low: " + ptsUsec, ptsUsec >= TIMESTAMP_OFFSET);
1222         return (int) ((ptsUsec - TIMESTAMP_OFFSET) * mFrameRate / 1000000.0 + 0.5);
1223     }
1224 
1225 }
1226