1 /*
2 * Copyright (C) 2023 The Android Open Source Project
3 *
4 * Licensed under the Apache License, Version 2.0 (the "License");
5 * you may not use this file except in compliance with the License.
6 * You may obtain a copy of the License at
7 *
8 * http://www.apache.org/licenses/LICENSE-2.0
9 *
10 * Unless required by applicable law or agreed to in writing, software
11 * distributed under the License is distributed on an "AS IS" BASIS,
12 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 * See the License for the specific language governing permissions and
14 * limitations under the License.
15 */
16 //#define LOG_NDEBUG 0
17 #define LOG_TAG "GraphicsTracker_test"
18 #include <unistd.h>
19
20 #include <android/hardware_buffer.h>
21 #include <codec2/aidl/GraphicsTracker.h>
22 #include <binder/IPCThreadState.h>
23 #include <binder/IServiceManager.h>
24 #include <binder/ProcessState.h>
25 #include <gtest/gtest.h>
26 #include <gui/BufferQueue.h>
27 #include <gui/IProducerListener.h>
28 #include <gui/IConsumerListener.h>
29 #include <gui/Surface.h>
30 #include <private/android/AHardwareBufferHelpers.h>
31
32 #include <C2BlockInternal.h>
33 #include <C2FenceFactory.h>
34
35 #include <atomic>
36 #include <memory>
37 #include <iostream>
38 #include <thread>
39
40 using ::aidl::android::hardware::media::c2::implementation::GraphicsTracker;
41 using ::android::BufferItem;
42 using ::android::BufferQueue;
43 using ::android::Fence;
44 using ::android::GraphicBuffer;
45 using ::android::IGraphicBufferProducer;
46 using ::android::IGraphicBufferConsumer;
47 using ::android::IProducerListener;
48 using ::android::IConsumerListener;
49 using ::android::OK;
50 using ::android::sp;
51 using ::android::wp;
52
53 namespace {
54 struct BqStatistics {
55 std::atomic<int> mDequeued;
56 std::atomic<int> mQueued;
57 std::atomic<int> mBlocked;
58 std::atomic<int> mDropped;
59 std::atomic<int> mDiscarded;
60 std::atomic<int> mReleased;
61
log__anon432262d10111::BqStatistics62 void log() {
63 ALOGD("Dequeued: %d, Queued: %d, Blocked: %d, "
64 "Dropped: %d, Discarded %d, Released %d",
65 (int)mDequeued, (int)mQueued, (int)mBlocked,
66 (int)mDropped, (int)mDiscarded, (int)mReleased);
67 }
68
clear__anon432262d10111::BqStatistics69 void clear() {
70 mDequeued = 0;
71 mQueued = 0;
72 mBlocked = 0;
73 mDropped = 0;
74 mDiscarded = 0;
75 mReleased = 0;
76 }
77 };
78
79 struct DummyConsumerListener : public android::BnConsumerListener {
onFrameAvailable__anon432262d10111::DummyConsumerListener80 void onFrameAvailable(const BufferItem& /* item */) override {}
onBuffersReleased__anon432262d10111::DummyConsumerListener81 void onBuffersReleased() override {}
onSidebandStreamChanged__anon432262d10111::DummyConsumerListener82 void onSidebandStreamChanged() override {}
83 };
84
85 struct TestConsumerListener : public android::BnConsumerListener {
TestConsumerListener__anon432262d10111::TestConsumerListener86 TestConsumerListener(const sp<IGraphicBufferConsumer> &consumer)
87 : BnConsumerListener(), mConsumer(consumer) {}
onFrameAvailable__anon432262d10111::TestConsumerListener88 void onFrameAvailable(const BufferItem&) override {
89 constexpr static int kRenderDelayUs = 1000000/30; // 30fps
90 BufferItem buffer;
91 // consume buffer
92 sp<IGraphicBufferConsumer> consumer = mConsumer.promote();
93 if (consumer != nullptr && consumer->acquireBuffer(&buffer, 0) == android::NO_ERROR) {
94 ::usleep(kRenderDelayUs);
95 consumer->releaseBuffer(buffer.mSlot, buffer.mFrameNumber, buffer.mFence);
96 }
97 }
onBuffersReleased__anon432262d10111::TestConsumerListener98 void onBuffersReleased() override {}
onSidebandStreamChanged__anon432262d10111::TestConsumerListener99 void onSidebandStreamChanged() override {}
100
101 wp<IGraphicBufferConsumer> mConsumer;
102 };
103
104 struct TestProducerListener : public android::BnProducerListener {
TestProducerListener__anon432262d10111::TestProducerListener105 TestProducerListener(std::shared_ptr<GraphicsTracker> tracker,
106 std::shared_ptr<BqStatistics> &stat,
107 uint32_t generation) : BnProducerListener(),
108 mTracker(tracker), mStat(stat), mGeneration(generation) {}
onBufferReleased__anon432262d10111::TestProducerListener109 virtual void onBufferReleased() override {
110 auto tracker = mTracker.lock();
111 if (tracker) {
112 mStat->mReleased++;
113 tracker->onReleased(mGeneration);
114 }
115 }
needsReleaseNotify__anon432262d10111::TestProducerListener116 virtual bool needsReleaseNotify() override { return true; }
onBuffersDiscarded__anon432262d10111::TestProducerListener117 virtual void onBuffersDiscarded(const std::vector<int32_t>&) override {}
118
119 std::weak_ptr<GraphicsTracker> mTracker;
120 std::shared_ptr<BqStatistics> mStat;
121 uint32_t mGeneration;
122 };
123
124 struct Frame {
125 AHardwareBuffer *buffer_;
126 sp<Fence> fence_;
127
Frame__anon432262d10111::Frame128 Frame() : buffer_{nullptr}, fence_{nullptr} {}
Frame__anon432262d10111::Frame129 Frame(AHardwareBuffer *buffer, sp<Fence> fence)
130 : buffer_(buffer), fence_(fence) {}
~Frame__anon432262d10111::Frame131 ~Frame() {
132 if (buffer_) {
133 AHardwareBuffer_release(buffer_);
134 }
135 }
136 };
137
138 struct FrameQueue {
139 bool mStopped;
140 bool mDrain;
141 std::queue<std::shared_ptr<Frame>> mQueue;
142 std::mutex mMutex;
143 std::condition_variable mCond;
144
FrameQueue__anon432262d10111::FrameQueue145 FrameQueue() : mStopped{false}, mDrain{false} {}
146
queueItem__anon432262d10111::FrameQueue147 bool queueItem(AHardwareBuffer *buffer, sp<Fence> fence) {
148 std::shared_ptr<Frame> frame = std::make_shared<Frame>(buffer, fence);
149 if (mStopped) {
150 return false;
151 }
152 if (!frame) {
153 return false;
154 }
155 std::unique_lock<std::mutex> l(mMutex);
156 mQueue.emplace(frame);
157 l.unlock();
158 mCond.notify_all();
159 return true;
160 }
161
stop__anon432262d10111::FrameQueue162 void stop(bool drain = false) {
163 bool stopped = false;
164 {
165 std::unique_lock<std::mutex> l(mMutex);
166 if (!mStopped) {
167 mStopped = true;
168 mDrain = drain;
169 stopped = true;
170 }
171 l.unlock();
172 if (stopped) {
173 mCond.notify_all();
174 }
175 }
176 }
177
waitItem__anon432262d10111::FrameQueue178 bool waitItem(std::shared_ptr<Frame> *frame) {
179 while(true) {
180 std::unique_lock<std::mutex> l(mMutex);
181 if (!mDrain && mStopped) {
182 // stop without consuming the queue.
183 return false;
184 }
185 if (!mQueue.empty()) {
186 *frame = mQueue.front();
187 mQueue.pop();
188 return true;
189 } else if (mStopped) {
190 // stop after consuming the queue.
191 return false;
192 }
193 mCond.wait(l);
194 }
195 }
196 };
197
198 } // namespace anonymous
199
200 class GraphicsTrackerTest : public ::testing::Test {
201 public:
202 const uint64_t kTestUsageFlag = GRALLOC_USAGE_SW_WRITE_OFTEN;
203
queueBuffer(FrameQueue * queue)204 void queueBuffer(FrameQueue *queue) {
205 while (true) {
206 std::shared_ptr<Frame> frame;
207 if (!queue->waitItem(&frame)) {
208 break;
209 }
210 uint64_t bid;
211 if (__builtin_available(android __ANDROID_API_T__, *)) {
212 if (AHardwareBuffer_getId(frame->buffer_, &bid) !=
213 android::NO_ERROR) {
214 break;
215 }
216 } else {
217 break;
218 }
219 android::status_t ret = frame->fence_->wait(-1);
220 if (ret != android::NO_ERROR) {
221 mTracker->deallocate(bid, frame->fence_);
222 mBqStat->mDiscarded++;
223 continue;
224 }
225
226 std::shared_ptr<C2GraphicBlock> blk =
227 _C2BlockFactory::CreateGraphicBlock(frame->buffer_);
228 if (!blk) {
229 mTracker->deallocate(bid, Fence::NO_FENCE);
230 mBqStat->mDiscarded++;
231 continue;
232 }
233 IGraphicBufferProducer::QueueBufferInput input(
234 0, false,
235 HAL_DATASPACE_UNKNOWN, android::Rect(0, 0, 1, 1),
236 NATIVE_WINDOW_SCALING_MODE_FREEZE, 0, Fence::NO_FENCE);
237 IGraphicBufferProducer::QueueBufferOutput output{};
238 c2_status_t res = mTracker->render(
239 blk->share(C2Rect(1, 1), C2Fence()),
240 input, &output);
241 if (res != C2_OK) {
242 mTracker->deallocate(bid, Fence::NO_FENCE);
243 mBqStat->mDiscarded++;
244 continue;
245 }
246 if (output.bufferReplaced) {
247 mBqStat->mDropped++;
248 }
249 mBqStat->mQueued++;
250 }
251 }
252
stopTrackerAfterUs(int us)253 void stopTrackerAfterUs(int us) {
254 ::usleep(us);
255 mTracker->stop();
256 }
257
258 protected:
init(int maxDequeueCount)259 bool init(int maxDequeueCount) {
260 mTracker = GraphicsTracker::CreateGraphicsTracker(maxDequeueCount);
261 if (!mTracker) {
262 return false;
263 }
264 BufferQueue::createBufferQueue(&mProducer, &mConsumer);
265 if (!mProducer || !mConsumer) {
266 return false;
267 }
268 return true;
269 }
configure(sp<IProducerListener> producerListener,sp<IConsumerListener> consumerListener,int maxAcquiredCount=1,bool controlledByApp=true)270 bool configure(sp<IProducerListener> producerListener,
271 sp<IConsumerListener> consumerListener,
272 int maxAcquiredCount = 1, bool controlledByApp = true) {
273 if (mConsumer->consumerConnect(
274 consumerListener, controlledByApp) != ::android::NO_ERROR) {
275 return false;
276 }
277 if (mConsumer->setMaxAcquiredBufferCount(maxAcquiredCount) != ::android::NO_ERROR) {
278 return false;
279 }
280 IGraphicBufferProducer::QueueBufferOutput qbo{};
281 if (mProducer->connect(producerListener,
282 NATIVE_WINDOW_API_MEDIA, true, &qbo) != ::android::NO_ERROR) {
283 return false;
284 }
285 if (mProducer->setDequeueTimeout(0) != ::android::NO_ERROR) {
286 return false;
287 }
288 return true;
289 }
290
TearDown()291 virtual void TearDown() override {
292 mBqStat->log();
293 mBqStat->clear();
294
295 if (mTracker) {
296 mTracker->stop();
297 mTracker.reset();
298 }
299 if (mProducer) {
300 mProducer->disconnect(NATIVE_WINDOW_API_MEDIA);
301 }
302 mProducer.clear();
303 mConsumer.clear();
304 }
305
306 protected:
307 std::shared_ptr<BqStatistics> mBqStat = std::make_shared<BqStatistics>();
308 sp<IGraphicBufferProducer> mProducer;
309 sp<IGraphicBufferConsumer> mConsumer;
310 std::shared_ptr<GraphicsTracker> mTracker;
311 };
312
313
TEST_F(GraphicsTrackerTest,AllocateAndBlockedTest)314 TEST_F(GraphicsTrackerTest, AllocateAndBlockedTest) {
315 uint32_t generation = 1;
316 const int maxDequeueCount = 10;
317
318 ASSERT_TRUE(init(maxDequeueCount));
319 ASSERT_TRUE(configure(new TestProducerListener(mTracker, mBqStat, generation),
320 new DummyConsumerListener()));
321
322 ASSERT_EQ(OK, mProducer->setGenerationNumber(generation));
323 c2_status_t ret = mTracker->configureGraphics(mProducer, generation);
324 ASSERT_EQ(C2_OK, ret);
325 ASSERT_EQ(maxDequeueCount, mTracker->getCurDequeueable());
326
327 AHardwareBuffer *buf;
328 sp<Fence> fence;
329 uint64_t bid;
330
331 // Allocate and check dequeueable
332 if (__builtin_available(android __ANDROID_API_T__, *)) {
333 for (int i = 0; i < maxDequeueCount; ++i) {
334 ret = mTracker->allocate(0, 0, 0, kTestUsageFlag, &buf, &fence);
335 ASSERT_EQ(C2_OK, ret);
336 mBqStat->mDequeued++;
337 ASSERT_EQ(maxDequeueCount - (i + 1), mTracker->getCurDequeueable());
338 ASSERT_EQ(OK, AHardwareBuffer_getId(buf, &bid));
339 ALOGD("alloced : bufferId: %llu", (unsigned long long)bid);
340 AHardwareBuffer_release(buf);
341 }
342 } else {
343 GTEST_SKIP();
344 }
345
346 // Allocate should be blocked
347 ret = mTracker->allocate(0, 0, 0, kTestUsageFlag, &buf, &fence);
348 ALOGD("alloc : err(%d, %d)", ret, C2_BLOCKING);
349 ASSERT_EQ(C2_BLOCKING, ret);
350 mBqStat->mBlocked++;
351 ASSERT_EQ(0, mTracker->getCurDequeueable());
352 }
353
TEST_F(GraphicsTrackerTest,AllocateAndDeallocateTest)354 TEST_F(GraphicsTrackerTest, AllocateAndDeallocateTest) {
355 uint32_t generation = 1;
356 const int maxDequeueCount = 10;
357
358 ASSERT_TRUE(init(maxDequeueCount));
359 ASSERT_TRUE(configure(new TestProducerListener(mTracker, mBqStat, generation),
360 new DummyConsumerListener()));
361
362 ASSERT_EQ(OK, mProducer->setGenerationNumber(generation));
363 c2_status_t ret = mTracker->configureGraphics(mProducer, generation);
364 ASSERT_EQ(C2_OK, ret);
365
366 ASSERT_EQ(maxDequeueCount, mTracker->getCurDequeueable());
367 AHardwareBuffer *buf;
368 sp<Fence> fence;
369 uint64_t bid;
370 std::vector<uint64_t> bids;
371
372 // Allocate and store buffer id
373 if (__builtin_available(android __ANDROID_API_T__, *)) {
374 for (int i = 0; i < maxDequeueCount; ++i) {
375 ret = mTracker->allocate(0, 0, 0, kTestUsageFlag, &buf, &fence);
376 ASSERT_EQ(C2_OK, ret);
377 mBqStat->mDequeued++;
378 ASSERT_EQ(OK, AHardwareBuffer_getId(buf, &bid));
379 bids.push_back(bid);
380 ALOGD("alloced : bufferId: %llu", (unsigned long long)bid);
381 AHardwareBuffer_release(buf);
382 }
383 } else {
384 GTEST_SKIP();
385 }
386
387 // Deallocate and check dequeueable
388 for (int i = 0; i < maxDequeueCount; ++i) {
389 ALOGD("dealloc : bufferId: %llu", (unsigned long long)bids[i]);
390 ret = mTracker->deallocate(bids[i], Fence::NO_FENCE);
391 ASSERT_EQ(C2_OK, ret);
392 ASSERT_EQ(i + 1, mTracker->getCurDequeueable());
393 mBqStat->mDiscarded++;
394 }
395 }
396
TEST_F(GraphicsTrackerTest,DropAndReleaseTest)397 TEST_F(GraphicsTrackerTest, DropAndReleaseTest) {
398 uint32_t generation = 1;
399 const int maxDequeueCount = 10;
400
401 ASSERT_TRUE(init(maxDequeueCount));
402 ASSERT_TRUE(configure(new TestProducerListener(mTracker, mBqStat, generation),
403 new DummyConsumerListener()));
404
405 ASSERT_EQ(OK, mProducer->setGenerationNumber(generation));
406 c2_status_t ret = mTracker->configureGraphics(mProducer, generation);
407 ASSERT_EQ(C2_OK, ret);
408
409 ASSERT_EQ(maxDequeueCount, mTracker->getCurDequeueable());
410
411 FrameQueue frameQueue;
412 std::thread queueThread(&GraphicsTrackerTest::queueBuffer, this, &frameQueue);
413 AHardwareBuffer *buf1, *buf2;
414 sp<Fence> fence1, fence2;
415
416 ret = mTracker->allocate(0, 0, 0, kTestUsageFlag, &buf1, &fence1);
417 ASSERT_EQ(C2_OK, ret);
418 mBqStat->mDequeued++;
419 ASSERT_EQ(maxDequeueCount - 1, mTracker->getCurDequeueable());
420
421 ret = mTracker->allocate(0, 0, 0, kTestUsageFlag, &buf2, &fence2);
422 ASSERT_EQ(C2_OK, ret);
423 mBqStat->mDequeued++;
424 ASSERT_EQ(maxDequeueCount - 2, mTracker->getCurDequeueable());
425
426 // Queue two buffers without consuming, one should be dropped
427 ASSERT_TRUE(frameQueue.queueItem(buf1, fence1));
428 ASSERT_TRUE(frameQueue.queueItem(buf2, fence2));
429
430 frameQueue.stop(true);
431 if (queueThread.joinable()) {
432 queueThread.join();
433 }
434
435 ASSERT_EQ(maxDequeueCount - 1, mTracker->getCurDequeueable());
436
437 // Consume one buffer and release
438 BufferItem item;
439 ASSERT_EQ(OK, mConsumer->acquireBuffer(&item, 0));
440 ASSERT_EQ(OK, mConsumer->releaseBuffer(item.mSlot, item.mFrameNumber, item.mFence));
441 // Nothing to consume
442 ASSERT_NE(OK, mConsumer->acquireBuffer(&item, 0));
443
444 ASSERT_EQ(maxDequeueCount, mTracker->getCurDequeueable());
445 ASSERT_EQ(1, mBqStat->mReleased);
446 ASSERT_EQ(1, mBqStat->mDropped);
447 }
448
TEST_F(GraphicsTrackerTest,RenderTest)449 TEST_F(GraphicsTrackerTest, RenderTest) {
450 uint32_t generation = 1;
451 const int maxDequeueCount = 10;
452 const int maxNumAlloc = 20;
453
454 ASSERT_TRUE(init(maxDequeueCount));
455 ASSERT_TRUE(configure(new TestProducerListener(mTracker, mBqStat, generation),
456 new TestConsumerListener(mConsumer), 1, false));
457
458 ASSERT_EQ(OK, mProducer->setGenerationNumber(generation));
459
460 ASSERT_EQ(C2_OK, mTracker->configureGraphics(mProducer, generation));
461 ASSERT_EQ(C2_OK, mTracker->configureMaxDequeueCount(maxDequeueCount));
462
463 int waitFd = -1;
464 ASSERT_EQ(C2_OK, mTracker->getWaitableFd(&waitFd));
465 C2Fence waitFence = _C2FenceFactory::CreatePipeFence(waitFd);
466
467
468 FrameQueue frameQueue;
469 std::thread queueThread(&GraphicsTrackerTest::queueBuffer, this, &frameQueue);
470
471 int numAlloc = 0;
472
473 while (numAlloc < maxNumAlloc) {
474 AHardwareBuffer *buf;
475 sp<Fence> fence;
476 c2_status_t ret = mTracker->allocate(0, 0, 0, kTestUsageFlag, &buf, &fence);
477 if (ret == C2_BLOCKING) {
478 mBqStat->mBlocked++;
479 c2_status_t waitRes = waitFence.wait(3000000000);
480 if (waitRes == C2_TIMED_OUT || waitRes == C2_OK) {
481 continue;
482 }
483 ALOGE("alloc wait failed: c2_err(%d)", waitRes);
484 break;
485 }
486 if (ret != C2_OK) {
487 ALOGE("alloc error: c2_err(%d)", ret);
488 break;
489 }
490 mBqStat->mDequeued++;
491 if (!frameQueue.queueItem(buf, fence)) {
492 ALOGE("queue to render failed");
493 break;
494 }
495 ++numAlloc;
496 }
497
498 frameQueue.stop(true);
499 // Wait more than enough time(1 sec) to render all queued frames for sure.
500 ::usleep(1000000);
501
502 if (queueThread.joinable()) {
503 queueThread.join();
504 }
505 ASSERT_EQ(numAlloc, maxNumAlloc);
506 ASSERT_EQ(numAlloc, mBqStat->mDequeued);
507 ASSERT_EQ(mBqStat->mDequeued, mBqStat->mQueued);
508 ASSERT_EQ(mBqStat->mDequeued, mBqStat->mReleased + mBqStat->mDropped);
509 }
510
TEST_F(GraphicsTrackerTest,StopAndWaitTest)511 TEST_F(GraphicsTrackerTest, StopAndWaitTest) {
512 uint32_t generation = 1;
513 const int maxDequeueCount = 2;
514
515 ASSERT_TRUE(init(maxDequeueCount));
516 ASSERT_TRUE(configure(new TestProducerListener(mTracker, mBqStat, generation),
517 new TestConsumerListener(mConsumer), 1, false));
518
519 ASSERT_EQ(OK, mProducer->setGenerationNumber(generation));
520
521 ASSERT_EQ(C2_OK, mTracker->configureGraphics(mProducer, generation));
522 ASSERT_EQ(C2_OK, mTracker->configureMaxDequeueCount(maxDequeueCount));
523
524 int waitFd = -1;
525 ASSERT_EQ(C2_OK, mTracker->getWaitableFd(&waitFd));
526 C2Fence waitFence = _C2FenceFactory::CreatePipeFence(waitFd);
527
528 AHardwareBuffer *buf1, *buf2;
529 sp<Fence> fence;
530
531 ASSERT_EQ(C2_OK, mTracker->allocate(0, 0, 0, kTestUsageFlag, &buf1, &fence));
532 mBqStat->mDequeued++;
533 AHardwareBuffer_release(buf1);
534
535 ASSERT_EQ(C2_OK, mTracker->allocate(0, 0, 0, kTestUsageFlag, &buf2, &fence));
536 mBqStat->mDequeued++;
537 AHardwareBuffer_release(buf2);
538
539 ASSERT_EQ(0, mTracker->getCurDequeueable());
540 ASSERT_EQ(C2_TIMED_OUT, waitFence.wait(3000000000));
541
542 std::thread stopThread(&GraphicsTrackerTest::stopTrackerAfterUs, this, 500000);
543 ASSERT_EQ(C2_BAD_STATE, waitFence.wait(3000000000));
544
545 if (stopThread.joinable()) {
546 stopThread.join();
547 }
548 }
549
TEST_F(GraphicsTrackerTest,SurfaceChangeTest)550 TEST_F(GraphicsTrackerTest, SurfaceChangeTest) {
551 uint32_t generation = 1;
552 const int maxDequeueCount = 10;
553
554 const int maxNumAlloc = 20;
555
556 const int firstPassAlloc = 12;
557 const int firstPassRender = 8;
558
559 ASSERT_TRUE(init(maxDequeueCount));
560 ASSERT_TRUE(configure(new TestProducerListener(mTracker, mBqStat, generation),
561 new TestConsumerListener(mConsumer), 1, false));
562
563 ASSERT_EQ(OK, mProducer->setGenerationNumber(generation));
564
565 ASSERT_EQ(C2_OK, mTracker->configureGraphics(mProducer, generation));
566 ASSERT_EQ(C2_OK, mTracker->configureMaxDequeueCount(maxDequeueCount));
567
568 int waitFd = -1;
569 ASSERT_EQ(C2_OK, mTracker->getWaitableFd(&waitFd));
570 C2Fence waitFence = _C2FenceFactory::CreatePipeFence(waitFd);
571
572 AHardwareBuffer *bufs[maxNumAlloc];
573 sp<Fence> fences[maxNumAlloc];
574
575 FrameQueue frameQueue;
576 std::thread queueThread(&GraphicsTrackerTest::queueBuffer, this, &frameQueue);
577 int numAlloc = 0;
578
579 for (int i = 0; i < firstPassRender; ++i) {
580 ASSERT_EQ(C2_OK, mTracker->allocate(
581 0, 0, 0, kTestUsageFlag, &bufs[i], &fences[i]));
582 mBqStat->mDequeued++;
583 numAlloc++;
584 ASSERT_EQ(true, frameQueue.queueItem(bufs[i], fences[i]));
585 }
586
587 while (numAlloc < firstPassAlloc) {
588 c2_status_t ret = mTracker->allocate(
589 0, 0, 0, kTestUsageFlag, &bufs[numAlloc], &fences[numAlloc]);
590 if (ret == C2_BLOCKING) {
591 mBqStat->mBlocked++;
592 c2_status_t waitRes = waitFence.wait(3000000000);
593 if (waitRes == C2_TIMED_OUT || waitRes == C2_OK) {
594 continue;
595 }
596 ALOGE("alloc wait failed: c2_err(%d)", waitRes);
597 break;
598 }
599 if (ret != C2_OK) {
600 ALOGE("alloc error: c2_err(%d)", ret);
601 break;
602 }
603 mBqStat->mDequeued++;
604 numAlloc++;
605 }
606 ASSERT_EQ(numAlloc, firstPassAlloc);
607
608 // switching surface
609 sp<IGraphicBufferProducer> oldProducer = mProducer;
610 sp<IGraphicBufferConsumer> oldConsumer = mConsumer;
611 mProducer.clear();
612 mConsumer.clear();
613 BufferQueue::createBufferQueue(&mProducer, &mConsumer);
614 ASSERT_TRUE((bool)mProducer && (bool)mConsumer);
615
616 generation += 1;
617
618 ASSERT_TRUE(configure(new TestProducerListener(mTracker, mBqStat, generation),
619 new TestConsumerListener(mConsumer), 1, false));
620 ASSERT_EQ(OK, mProducer->setGenerationNumber(generation));
621 ASSERT_EQ(C2_OK, mTracker->configureGraphics(mProducer, generation));
622 ASSERT_EQ(C2_OK, mTracker->configureMaxDequeueCount(maxDequeueCount));
623
624 ASSERT_EQ(OK, oldProducer->disconnect(NATIVE_WINDOW_API_MEDIA));
625 oldProducer.clear();
626 oldConsumer.clear();
627
628 for (int i = firstPassRender ; i < firstPassAlloc; ++i) {
629 ASSERT_EQ(true, frameQueue.queueItem(bufs[i], fences[i]));
630 }
631
632 while (numAlloc < maxNumAlloc) {
633 AHardwareBuffer *buf;
634 sp<Fence> fence;
635 c2_status_t ret = mTracker->allocate(0, 0, 0, kTestUsageFlag, &buf, &fence);
636 if (ret == C2_BLOCKING) {
637 mBqStat->mBlocked++;
638 c2_status_t waitRes = waitFence.wait(3000000000);
639 if (waitRes == C2_TIMED_OUT || waitRes == C2_OK) {
640 continue;
641 }
642 ALOGE("alloc wait failed: c2_err(%d)", waitRes);
643 break;
644 }
645 if (ret != C2_OK) {
646 ALOGE("alloc error: c2_err(%d)", ret);
647 break;
648 }
649 mBqStat->mDequeued++;
650 if (!frameQueue.queueItem(buf, fence)) {
651 ALOGE("queue to render failed");
652 break;
653 }
654 ++numAlloc;
655 }
656
657 ASSERT_EQ(numAlloc, maxNumAlloc);
658
659 frameQueue.stop(true);
660 // Wait more than enough time(1 sec) to render all queued frames for sure.
661 ::usleep(1000000);
662
663 if (queueThread.joinable()) {
664 queueThread.join();
665 }
666 // mReleased should not be checked. IProducerListener::onBufferReleased()
667 // from the previous Surface could be missing after a new Surface was
668 // configured. Instead check # of dequeueable and queueBuffer() calls.
669 ASSERT_EQ(numAlloc, mBqStat->mQueued);
670 ASSERT_EQ(maxDequeueCount, mTracker->getCurDequeueable());
671
672 for (int i = 0; i < maxDequeueCount; ++i) {
673 AHardwareBuffer *buf;
674 sp<Fence> fence;
675
676 ASSERT_EQ(C2_OK, mTracker->allocate(
677 0, 0, 0, kTestUsageFlag, &buf, &fence));
678 AHardwareBuffer_release(buf);
679 mBqStat->mDequeued++;
680 numAlloc++;
681 }
682 ASSERT_EQ(C2_BLOCKING, mTracker->allocate(
683 0, 0, 0, kTestUsageFlag, &bufs[0], &fences[0]));
684 }
685
TEST_F(GraphicsTrackerTest,maxDequeueIncreaseTest)686 TEST_F(GraphicsTrackerTest, maxDequeueIncreaseTest) {
687 uint32_t generation = 1;
688 int maxDequeueCount = 10;
689 int dequeueIncrease = 4;
690
691 int numAlloc = 0;
692
693 ASSERT_TRUE(init(maxDequeueCount));
694 ASSERT_TRUE(configure(new TestProducerListener(mTracker, mBqStat, generation),
695 new TestConsumerListener(mConsumer), 1, false));
696
697 ASSERT_EQ(OK, mProducer->setGenerationNumber(generation));
698 ASSERT_EQ(C2_OK, mTracker->configureGraphics(mProducer, generation));
699
700 int waitFd = -1;
701 ASSERT_EQ(C2_OK, mTracker->getWaitableFd(&waitFd));
702 C2Fence waitFence = _C2FenceFactory::CreatePipeFence(waitFd);
703
704 AHardwareBuffer *buf;
705 sp<Fence> fence;
706 uint64_t bids[maxDequeueCount];
707 if (__builtin_available(android __ANDROID_API_T__, *)) {
708 for (int i = 0; i < maxDequeueCount; ++i) {
709 ASSERT_EQ(C2_OK, waitFence.wait(1000000000));
710 ASSERT_EQ(C2_OK, mTracker->allocate( 0, 0, 0, kTestUsageFlag, &buf, &fence));
711 ASSERT_EQ(OK, AHardwareBuffer_getId(buf, &bids[i]));
712 AHardwareBuffer_release(buf);
713 mBqStat->mDequeued++;
714 numAlloc++;
715 }
716 } else {
717 GTEST_SKIP();
718 }
719 ASSERT_EQ(C2_TIMED_OUT, waitFence.wait(1000000000));
720 ASSERT_EQ(C2_BLOCKING, mTracker->allocate( 0, 0, 0, kTestUsageFlag, &buf, &fence));
721
722 ASSERT_EQ(C2_OK, mTracker->deallocate(bids[0], Fence::NO_FENCE));
723 mBqStat->mDiscarded++;
724
725 maxDequeueCount += dequeueIncrease;
726 ASSERT_EQ(C2_OK, mTracker->configureMaxDequeueCount(maxDequeueCount));
727 for (int i = 0; i < dequeueIncrease + 1; ++i) {
728 ASSERT_EQ(C2_OK, waitFence.wait(1000000000));
729 ASSERT_EQ(C2_OK, mTracker->allocate( 0, 0, 0, kTestUsageFlag, &buf, &fence));
730 AHardwareBuffer_release(buf);
731 mBqStat->mDequeued++;
732 numAlloc++;
733 }
734 ASSERT_EQ(C2_TIMED_OUT, waitFence.wait(1000000000));
735 ASSERT_EQ(C2_BLOCKING, mTracker->allocate( 0, 0, 0, kTestUsageFlag, &buf, &fence));
736
737 ASSERT_EQ(C2_OK, mTracker->deallocate(bids[1], Fence::NO_FENCE));
738 mBqStat->mDiscarded++;
739
740 maxDequeueCount += dequeueIncrease;
741 ASSERT_EQ(C2_OK, mTracker->configureMaxDequeueCount(maxDequeueCount));
742 for (int i = 0; i < dequeueIncrease + 1; ++i) {
743 ASSERT_EQ(C2_OK, waitFence.wait(1000000000));
744 ASSERT_EQ(C2_OK, mTracker->allocate( 0, 0, 0, kTestUsageFlag, &buf, &fence));
745 AHardwareBuffer_release(buf);
746 mBqStat->mDequeued++;
747 numAlloc++;
748 }
749 ASSERT_EQ(C2_TIMED_OUT, waitFence.wait(1000000000));
750 ASSERT_EQ(C2_BLOCKING, mTracker->allocate( 0, 0, 0, kTestUsageFlag, &buf, &fence));
751 }
752
TEST_F(GraphicsTrackerTest,maxDequeueDecreaseTest)753 TEST_F(GraphicsTrackerTest, maxDequeueDecreaseTest) {
754 uint32_t generation = 1;
755 int maxDequeueCount = 12;
756 int dequeueDecrease = 4;
757
758 int numAlloc = 0;
759
760 ASSERT_TRUE(init(maxDequeueCount));
761 ASSERT_TRUE(configure(new TestProducerListener(mTracker, mBqStat, generation),
762 new TestConsumerListener(mConsumer), 1, false));
763
764 ASSERT_EQ(OK, mProducer->setGenerationNumber(generation));
765 ASSERT_EQ(C2_OK, mTracker->configureGraphics(mProducer, generation));
766
767 int waitFd = -1;
768 ASSERT_EQ(C2_OK, mTracker->getWaitableFd(&waitFd));
769 C2Fence waitFence = _C2FenceFactory::CreatePipeFence(waitFd);
770
771 AHardwareBuffer *buf;
772 sp<Fence> fence;
773 uint64_t bids[maxDequeueCount];
774 if (__builtin_available(android __ANDROID_API_T__, *)) {
775 for (int i = 0; i < maxDequeueCount; ++i) {
776 ASSERT_EQ(C2_OK, waitFence.wait(1000000000));
777 ASSERT_EQ(C2_OK, mTracker->allocate( 0, 0, 0, kTestUsageFlag, &buf, &fence));
778 ASSERT_EQ(OK, AHardwareBuffer_getId(buf, &bids[i]));
779 AHardwareBuffer_release(buf);
780 mBqStat->mDequeued++;
781 numAlloc++;
782 }
783 } else {
784 GTEST_SKIP();
785 }
786 ASSERT_EQ(C2_TIMED_OUT, waitFence.wait(1000000000));
787 ASSERT_EQ(C2_BLOCKING, mTracker->allocate( 0, 0, 0, kTestUsageFlag, &buf, &fence));
788
789 int discardIdx = 0;
790 maxDequeueCount -= dequeueDecrease;
791 ASSERT_EQ(C2_OK, mTracker->configureMaxDequeueCount(maxDequeueCount));
792 for (int i = 0; i < dequeueDecrease + 1; ++i) {
793 ASSERT_EQ(C2_TIMED_OUT, waitFence.wait(1000000000));
794 ASSERT_EQ(C2_BLOCKING, mTracker->allocate( 0, 0, 0, kTestUsageFlag, &buf, &fence));
795 ASSERT_EQ(C2_OK, mTracker->deallocate(bids[discardIdx++], Fence::NO_FENCE));
796 mBqStat->mDiscarded++;
797 }
798 ASSERT_EQ(C2_OK, waitFence.wait(1000000000));
799 ASSERT_EQ(C2_OK, mTracker->allocate( 0, 0, 0, kTestUsageFlag, &buf, &fence));
800 mBqStat->mDequeued++;
801
802 ASSERT_EQ(C2_OK, mTracker->deallocate(bids[discardIdx++], Fence::NO_FENCE));
803 mBqStat->mDiscarded++;
804 ASSERT_EQ(C2_OK, mTracker->deallocate(bids[discardIdx++], Fence::NO_FENCE));
805 mBqStat->mDiscarded++;
806 maxDequeueCount -= dequeueDecrease;
807
808 ASSERT_EQ(C2_OK, mTracker->configureMaxDequeueCount(maxDequeueCount));
809 for (int i = 0; i < dequeueDecrease - 1; ++i) {
810 ASSERT_EQ(C2_TIMED_OUT, waitFence.wait(1000000000));
811 ASSERT_EQ(C2_BLOCKING, mTracker->allocate( 0, 0, 0, kTestUsageFlag, &buf, &fence));
812 ASSERT_EQ(C2_OK, mTracker->deallocate(bids[discardIdx++], Fence::NO_FENCE));
813 mBqStat->mDiscarded++;
814 }
815 ASSERT_EQ(C2_OK, waitFence.wait(1000000000));
816 ASSERT_EQ(C2_OK, mTracker->allocate( 0, 0, 0, kTestUsageFlag, &buf, &fence));
817 mBqStat->mDequeued++;
818 }
819