xref: /aosp_15_r20/external/v4l2_codec2/components/EncodeComponent.cpp (revision 0ec5a0ec62797f775085659156625e7f1bdb369f)
1 // Copyright 2023 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file
4 
5 //#define LOG_NDEBUG 0
6 #define ATRACE_TAG ATRACE_TAG_VIDEO
7 #define LOG_TAG "EncodeComponent"
8 
9 #include <v4l2_codec2/components/EncodeComponent.h>
10 
11 #include <inttypes.h>
12 
13 #include <algorithm>
14 #include <utility>
15 
16 #include <C2AllocatorGralloc.h>
17 #include <C2PlatformSupport.h>
18 #include <C2Work.h>
19 #include <android/hardware/graphics/common/1.0/types.h>
20 #include <base/bind.h>
21 #include <base/bind_helpers.h>
22 #include <log/log.h>
23 #include <media/stagefright/MediaDefs.h>
24 #include <ui/GraphicBuffer.h>
25 #include <ui/Size.h>
26 #include <utils/Trace.h>
27 
28 #include <v4l2_codec2/common/EncodeHelpers.h>
29 #include <v4l2_codec2/common/FormatConverter.h>
30 #include <v4l2_codec2/common/H264.h>
31 #include <v4l2_codec2/components/BitstreamBuffer.h>
32 #include <v4l2_codec2/components/EncodeInterface.h>
33 #include <v4l2_codec2/components/VideoEncoder.h>
34 
35 using android::hardware::graphics::common::V1_0::BufferUsage;
36 
37 namespace android {
38 
39 namespace {
40 // Create an input frame from the specified graphic block.
createInputFrame(const C2ConstGraphicBlock & block,VideoPixelFormat format,const std::vector<VideoFramePlane> & planes,uint64_t index,int64_t timestamp)41 std::unique_ptr<VideoEncoder::InputFrame> createInputFrame(
42         const C2ConstGraphicBlock& block, VideoPixelFormat format,
43         const std::vector<VideoFramePlane>& planes, uint64_t index, int64_t timestamp) {
44     std::vector<int> fds;
45     const C2Handle* const handle = block.handle();
46     for (int i = 0; i < handle->numFds; i++) {
47         fds.emplace_back(handle->data[i]);
48     }
49 
50     return std::make_unique<VideoEncoder::InputFrame>(std::move(fds), planes, format, index,
51                                                       timestamp);
52 }
53 }  // namespace
54 
55 // Get the video frame layout from the specified |inputBlock|.
56 // TODO(dstaessens): Clean up code extracting layout from a C2GraphicBlock.
getVideoFrameLayout(const C2ConstGraphicBlock & block,VideoPixelFormat * format)57 std::optional<std::vector<VideoFramePlane>> getVideoFrameLayout(const C2ConstGraphicBlock& block,
58                                                                 VideoPixelFormat* format) {
59     ALOGV("%s()", __func__);
60 
61     // Get the C2PlanarLayout from the graphics block. The C2GraphicView returned by block.map()
62     // needs to be released before calling getGraphicBlockInfo(), or the lockYCbCr() call will block
63     // Indefinitely.
64     C2PlanarLayout layout = block.map().get().layout();
65 
66     // The above layout() cannot fill layout information and memset 0 instead if the input format is
67     // IMPLEMENTATION_DEFINED and its backed format is RGB. We fill the layout by using
68     // ImplDefinedToRGBXMap in the case.
69     if (layout.type == C2PlanarLayout::TYPE_UNKNOWN) {
70         std::unique_ptr<ImplDefinedToRGBXMap> idMap = ImplDefinedToRGBXMap::create(block);
71         if (idMap == nullptr) {
72             ALOGE("Unable to parse RGBX_8888 from IMPLEMENTATION_DEFINED");
73             return std::nullopt;
74         }
75         layout.type = C2PlanarLayout::TYPE_RGB;
76         // These parameters would be used in TYPE_GRB case below.
77         layout.numPlanes = 3;   // same value as in C2AllocationGralloc::map()
78         layout.rootPlanes = 1;  // same value as in C2AllocationGralloc::map()
79         layout.planes[C2PlanarLayout::PLANE_R].offset = idMap->offset();
80         layout.planes[C2PlanarLayout::PLANE_R].rowInc = idMap->rowInc();
81     }
82 
83     std::vector<uint32_t> offsets(layout.numPlanes, 0u);
84     std::vector<uint32_t> strides(layout.numPlanes, 0u);
85     switch (layout.type) {
86     case C2PlanarLayout::TYPE_YUV: {
87         android_ycbcr ycbcr = getGraphicBlockInfo(block);
88         offsets[C2PlanarLayout::PLANE_Y] =
89                 static_cast<uint32_t>(reinterpret_cast<uintptr_t>(ycbcr.y));
90         offsets[C2PlanarLayout::PLANE_U] =
91                 static_cast<uint32_t>(reinterpret_cast<uintptr_t>(ycbcr.cb));
92         offsets[C2PlanarLayout::PLANE_V] =
93                 static_cast<uint32_t>(reinterpret_cast<uintptr_t>(ycbcr.cr));
94         strides[C2PlanarLayout::PLANE_Y] = static_cast<uint32_t>(ycbcr.ystride);
95         strides[C2PlanarLayout::PLANE_U] = static_cast<uint32_t>(ycbcr.cstride);
96         strides[C2PlanarLayout::PLANE_V] = static_cast<uint32_t>(ycbcr.cstride);
97 
98         bool crcb = false;
99         if (offsets[C2PlanarLayout::PLANE_U] > offsets[C2PlanarLayout::PLANE_V]) {
100             // Swap offsets, no need to swap strides as they are identical for both chroma planes.
101             std::swap(offsets[C2PlanarLayout::PLANE_U], offsets[C2PlanarLayout::PLANE_V]);
102             crcb = true;
103         }
104 
105         bool semiplanar = false;
106         if (ycbcr.chroma_step >
107             offsets[C2PlanarLayout::PLANE_V] - offsets[C2PlanarLayout::PLANE_U]) {
108             semiplanar = true;
109         }
110 
111         if (!crcb && !semiplanar) {
112             *format = VideoPixelFormat::I420;
113         } else if (!crcb && semiplanar) {
114             *format = VideoPixelFormat::NV12;
115         } else if (crcb && !semiplanar) {
116             // HACK: pretend YV12 is I420 now since VEA only accepts I420. (YV12 will be used
117             //       for input byte-buffer mode).
118             // TODO(dstaessens): Is this hack still necessary now we're not using the VEA directly?
119             //format = VideoPixelFormat::YV12;
120             *format = VideoPixelFormat::I420;
121         } else {
122             *format = VideoPixelFormat::NV21;
123         }
124         break;
125     }
126     case C2PlanarLayout::TYPE_RGB: {
127         offsets[C2PlanarLayout::PLANE_R] = layout.planes[C2PlanarLayout::PLANE_R].offset;
128         strides[C2PlanarLayout::PLANE_R] =
129                 static_cast<uint32_t>(layout.planes[C2PlanarLayout::PLANE_R].rowInc);
130         *format = VideoPixelFormat::ARGB;
131         break;
132     }
133     default:
134         ALOGW("Unknown layout type: %u", static_cast<uint32_t>(layout.type));
135         return std::nullopt;
136     }
137 
138     std::vector<VideoFramePlane> planes;
139     for (uint32_t i = 0; i < layout.rootPlanes; ++i) {
140         // The mSize field is not used in our case, so we can safely set it to zero.
141         planes.push_back({strides[i], offsets[i], 0});
142     }
143     return planes;
144 }
145 
146 // Get the video frame stride for the specified |format| and |size|.
getVideoFrameStride(VideoPixelFormat format,ui::Size size)147 std::optional<uint32_t> getVideoFrameStride(VideoPixelFormat format, ui::Size size) {
148     // Fetch a graphic block from the pool to determine the stride.
149     std::shared_ptr<C2BlockPool> pool;
150     c2_status_t status = GetCodec2BlockPool(C2BlockPool::BASIC_GRAPHIC, nullptr, &pool);
151     if (status != C2_OK) {
152         ALOGE("Failed to get basic graphic block pool (err=%d)", status);
153         return std::nullopt;
154     }
155 
156     // Android HAL format doesn't have I420, we use YV12 instead and swap the U and V planes when
157     // converting to NV12. YCBCR_420_888 will allocate NV12 by minigbm.
158     HalPixelFormat halFormat = (format == VideoPixelFormat::I420) ? HalPixelFormat::YV12
159                                                                   : HalPixelFormat::YCBCR_420_888;
160 
161     std::shared_ptr<C2GraphicBlock> block;
162     status = pool->fetchGraphicBlock(size.width, size.height, static_cast<uint32_t>(halFormat),
163                                      C2MemoryUsage(C2MemoryUsage::CPU_READ), &block);
164     if (status != C2_OK) {
165         ALOGE("Failed to fetch graphic block (err=%d)", status);
166         return std::nullopt;
167     }
168 
169     const C2ConstGraphicBlock constBlock = block->share(C2Rect(size.width, size.height), C2Fence());
170     VideoPixelFormat pixelFormat;
171     std::optional<std::vector<VideoFramePlane>> planes =
172             getVideoFrameLayout(constBlock, &pixelFormat);
173     if (!planes || planes.value().empty()) {
174         ALOGE("Failed to get video frame layout from block");
175         return std::nullopt;
176     }
177 
178     return planes.value()[0].mStride;
179 }
180 
EncodeComponent(C2String name,c2_node_id_t id,std::shared_ptr<EncodeInterface> interface)181 EncodeComponent::EncodeComponent(C2String name, c2_node_id_t id,
182                                  std::shared_ptr<EncodeInterface> interface)
183       : mName(name),
184         mId(id),
185         mInterface(std::move(interface)),
186         mComponentState(ComponentState::LOADED) {
187     ALOGV("%s(%s)", __func__, name.c_str());
188 }
189 
~EncodeComponent()190 EncodeComponent::~EncodeComponent() {
191     ALOGV("%s()", __func__);
192 
193     // Stop encoder thread and invalidate pointers if component wasn't stopped before destroying.
194     if (mEncoderThread.IsRunning() && !mEncoderTaskRunner->RunsTasksInCurrentSequence()) {
195         mEncoderTaskRunner->PostTask(
196                 FROM_HERE, ::base::BindOnce(
197                                    [](::base::WeakPtrFactory<EncodeComponent>* weakPtrFactory,
198                                       std::unique_ptr<VideoEncoder>* encoder) {
199                                        weakPtrFactory->InvalidateWeakPtrs();
200                                        encoder->reset();
201                                    },
202                                    &mWeakThisFactory, &mEncoder));
203         mEncoderThread.Stop();
204     }
205 
206     ALOGV("%s(): done", __func__);
207 }
208 
start()209 c2_status_t EncodeComponent::start() {
210     ALOGV("%s()", __func__);
211 
212     // Lock while starting, to synchronize start/stop/reset/release calls.
213     std::lock_guard<std::mutex> lock(mComponentLock);
214 
215     // According to the specification start() should only be called in the LOADED state.
216     if (mComponentState != ComponentState::LOADED) {
217         return C2_BAD_STATE;
218     }
219 
220     if (!mEncoderThread.Start()) {
221         ALOGE("Failed to start encoder thread");
222         return C2_CORRUPTED;
223     }
224     mEncoderTaskRunner = mEncoderThread.task_runner();
225     mWeakThis = mWeakThisFactory.GetWeakPtr();
226 
227     // Initialize the encoder on the encoder thread.
228     ::base::WaitableEvent done;
229     bool success = false;
230     mEncoderTaskRunner->PostTask(
231             FROM_HERE, ::base::Bind(&EncodeComponent::startTask, mWeakThis, &success, &done));
232     done.Wait();
233 
234     if (!success) {
235         ALOGE("Failed to initialize encoder");
236         return C2_CORRUPTED;
237     }
238 
239     setComponentState(ComponentState::RUNNING);
240     return C2_OK;
241 }
242 
stop()243 c2_status_t EncodeComponent::stop() {
244     ALOGV("%s()", __func__);
245 
246     // Lock while stopping, to synchronize start/stop/reset/release calls.
247     std::lock_guard<std::mutex> lock(mComponentLock);
248 
249     if (mComponentState != ComponentState::RUNNING && mComponentState != ComponentState::ERROR) {
250         return C2_BAD_STATE;
251     }
252 
253     // Return immediately if the component is already stopped.
254     if (!mEncoderThread.IsRunning()) {
255         return C2_OK;
256     }
257 
258     // Wait for the component to stop.
259     ::base::WaitableEvent done;
260     mEncoderTaskRunner->PostTask(FROM_HERE,
261                                  ::base::BindOnce(&EncodeComponent::stopTask, mWeakThis, &done));
262     done.Wait();
263     mEncoderThread.Stop();
264 
265     setComponentState(ComponentState::LOADED);
266 
267     ALOGV("%s() - done", __func__);
268     return C2_OK;
269 }
270 
reset()271 c2_status_t EncodeComponent::reset() {
272     ALOGV("%s()", __func__);
273 
274     // The interface specification says: "This method MUST be supported in all (including tripped)
275     // states other than released".
276     if (mComponentState == ComponentState::UNLOADED) {
277         return C2_BAD_STATE;
278     }
279 
280     // TODO(dstaessens): Reset the component's interface to default values.
281     stop();
282 
283     return C2_OK;
284 }
285 
release()286 c2_status_t EncodeComponent::release() {
287     ALOGV("%s()", __func__);
288 
289     // The interface specification says: "This method MUST be supported in stopped state.", but the
290     // release method seems to be called in other states as well.
291     reset();
292 
293     setComponentState(ComponentState::UNLOADED);
294     return C2_OK;
295 }
296 
queue_nb(std::list<std::unique_ptr<C2Work>> * const items)297 c2_status_t EncodeComponent::queue_nb(std::list<std::unique_ptr<C2Work>>* const items) {
298     ALOGV("%s()", __func__);
299 
300     if (mComponentState != ComponentState::RUNNING) {
301         ALOGE("Trying to queue work item while component is not running");
302         return C2_BAD_STATE;
303     }
304 
305     while (!items->empty()) {
306         mEncoderTaskRunner->PostTask(FROM_HERE,
307                                      ::base::BindOnce(&EncodeComponent::queueTask, mWeakThis,
308                                                       std::move(items->front())));
309         items->pop_front();
310     }
311 
312     return C2_OK;
313 }
314 
drain_nb(drain_mode_t mode)315 c2_status_t EncodeComponent::drain_nb(drain_mode_t mode) {
316     ALOGV("%s()", __func__);
317 
318     if (mode == DRAIN_CHAIN) {
319         return C2_OMITTED;  // Tunneling is not supported for now.
320     }
321 
322     if (mComponentState != ComponentState::RUNNING) {
323         return C2_BAD_STATE;
324     }
325 
326     mEncoderTaskRunner->PostTask(FROM_HERE,
327                                  ::base::BindOnce(&EncodeComponent::drainTask, mWeakThis, mode));
328     return C2_OK;
329 }
330 
flush_sm(flush_mode_t mode,std::list<std::unique_ptr<C2Work>> * const flushedWork)331 c2_status_t EncodeComponent::flush_sm(flush_mode_t mode,
332                                       std::list<std::unique_ptr<C2Work>>* const flushedWork) {
333     ALOGV("%s()", __func__);
334 
335     if (mode != FLUSH_COMPONENT) {
336         return C2_OMITTED;  // Tunneling is not supported by now
337     }
338 
339     if (mComponentState != ComponentState::RUNNING) {
340         return C2_BAD_STATE;
341     }
342 
343     // Work that can be immediately discarded should be returned in |flushedWork|. This method may
344     // be momentarily blocking but must return within 5ms, which should give us enough time to
345     // immediately abandon all non-started work on the encoder thread. We can return all work that
346     // can't be immediately discarded using onWorkDone() later.
347     ::base::WaitableEvent done;
348     mEncoderTaskRunner->PostTask(FROM_HERE, ::base::BindOnce(&EncodeComponent::flushTask, mWeakThis,
349                                                              &done, flushedWork));
350     done.Wait();
351 
352     return C2_OK;
353 }
354 
announce_nb(const std::vector<C2WorkOutline> & items)355 c2_status_t EncodeComponent::announce_nb(const std::vector<C2WorkOutline>& items) {
356     return C2_OMITTED;  // Tunneling is not supported by now
357 }
358 
setListener_vb(const std::shared_ptr<Listener> & listener,c2_blocking_t mayBlock)359 c2_status_t EncodeComponent::setListener_vb(const std::shared_ptr<Listener>& listener,
360                                             c2_blocking_t mayBlock) {
361     ALOG_ASSERT(mComponentState != ComponentState::UNLOADED);
362 
363     // Lock so we're sure the component isn't currently starting or stopping.
364     std::lock_guard<std::mutex> lock(mComponentLock);
365 
366     // If the encoder thread is not running it's safe to update the listener directly.
367     if (!mEncoderThread.IsRunning()) {
368         mListener = listener;
369         return C2_OK;
370     }
371 
372     // The listener should be updated before exiting this function. If called while the component is
373     // currently running we should be allowed to block, as we can only change the listener on the
374     // encoder thread.
375     ALOG_ASSERT(mayBlock == c2_blocking_t::C2_MAY_BLOCK);
376 
377     ::base::WaitableEvent done;
378     mEncoderTaskRunner->PostTask(FROM_HERE, ::base::BindOnce(&EncodeComponent::setListenerTask,
379                                                              mWeakThis, listener, &done));
380     done.Wait();
381 
382     return C2_OK;
383 }
384 
intf()385 std::shared_ptr<C2ComponentInterface> EncodeComponent::intf() {
386     return std::make_shared<SimpleInterface<EncodeInterface>>(mName.c_str(), mId, mInterface);
387 }
388 
startTask(bool * success,::base::WaitableEvent * done)389 void EncodeComponent::startTask(bool* success, ::base::WaitableEvent* done) {
390     ATRACE_CALL();
391     ALOGV("%s()", __func__);
392     ALOG_ASSERT(mEncoderTaskRunner->RunsTasksInCurrentSequence());
393 
394     *success = initializeEncoder();
395     done->Signal();
396 }
397 
stopTask(::base::WaitableEvent * done)398 void EncodeComponent::stopTask(::base::WaitableEvent* done) {
399     ATRACE_CALL();
400     ALOGV("%s()", __func__);
401     ALOG_ASSERT(mEncoderTaskRunner->RunsTasksInCurrentSequence());
402 
403     // Flushing the encoder will abort all pending work.
404     flush();
405 
406     mInputFormatConverter.reset();
407     mInputPixelFormat = VideoPixelFormat::UNKNOWN;
408     mInputLayout.clear();
409 
410     mEncoder.reset();
411     mOutputBlockPool.reset();
412 
413     // Invalidate all weak pointers so no more functions will be executed on the encoder thread.
414     mWeakThisFactory.InvalidateWeakPtrs();
415 
416     done->Signal();
417 }
418 
queueTask(std::unique_ptr<C2Work> work)419 void EncodeComponent::queueTask(std::unique_ptr<C2Work> work) {
420     ATRACE_CALL();
421     ALOGV("%s()", __func__);
422     ALOG_ASSERT(mEncoderTaskRunner->RunsTasksInCurrentSequence());
423     ALOG_ASSERT(mEncoder);
424 
425     // Currently only a single worklet per work item is supported. An input buffer should always be
426     // supplied unless this is a drain or CSD request.
427     ALOG_ASSERT(work->input.buffers.size() <= 1u && work->worklets.size() == 1u);
428 
429     // Set the default values for the output worklet.
430     work->worklets.front()->output.flags = static_cast<C2FrameData::flags_t>(0);
431     work->worklets.front()->output.buffers.clear();
432     work->worklets.front()->output.ordinal = work->input.ordinal;
433 
434     uint64_t index = work->input.ordinal.frameIndex.peeku();
435     int64_t timestamp = static_cast<int64_t>(work->input.ordinal.timestamp.peeku());
436     bool endOfStream = work->input.flags & C2FrameData::FLAG_END_OF_STREAM;
437     ALOGV("Queuing next encode (index: %" PRIu64 ", timestamp: %" PRId64 ", EOS: %d)", index,
438           timestamp, endOfStream);
439 
440     // If input buffer list is not empty, it means we have some input to process
441     // on. However, input could be a null buffer. In such case, clear the buffer
442     // list before making call to process().
443     if (!work->input.buffers.empty() && !work->input.buffers[0]) {
444         ALOGD("Encountered null input buffer. Clearing the input buffer");
445         work->input.buffers.clear();
446     }
447 
448     // The codec 2.0 framework might queue an empty CSD request, but this is currently not
449     // supported. We will return the CSD with the first encoded buffer work.
450     if (work->input.buffers.empty() && !endOfStream) {
451         ALOGV("Discarding empty CSD request");
452         reportWork(std::move(work));
453         return;
454     }
455 
456     // By the time we get an input buffer, the output block pool should be configured.
457     if (!mOutputBlockPool && !getBlockPool()) {
458         reportError(C2_CORRUPTED);
459         return;
460     }
461 
462     // If this is the first input frame, create an input format converter if the V4L2 device doesn't
463     // support the requested input format.
464     if ((mInputPixelFormat == VideoPixelFormat::UNKNOWN) && !work->input.buffers.empty()) {
465         VideoPixelFormat format = VideoPixelFormat::UNKNOWN;
466         if (!getVideoFrameLayout(work->input.buffers.front()->data().graphicBlocks().front(),
467                                  &format)) {
468             ALOGE("Failed to get input block's layout");
469             reportError(C2_CORRUPTED);
470             return;
471         }
472         if (mEncoder->inputFormat() != format) {
473             ALOG_ASSERT(!mInputFormatConverter);
474             ALOGV("Creating input format convertor (%s)",
475                   videoPixelFormatToString(mEncoder->inputFormat()).c_str());
476             mInputFormatConverter =
477                     FormatConverter::create(mEncoder->inputFormat(), mEncoder->visibleSize(),
478                                             VideoEncoder::kInputBufferCount, mEncoder->codedSize());
479             if (!mInputFormatConverter) {
480                 ALOGE("Failed to created input format convertor");
481                 reportError(C2_CORRUPTED);
482                 return;
483             }
484         }
485     }
486 
487     // If conversion is required but no free buffers are available we queue the work item.
488     if (mInputFormatConverter && !mInputFormatConverter->isReady()) {
489         ALOGV("Input format convertor ran out of buffers");
490         mInputConverterQueue.push(std::move(work));
491         return;
492     }
493 
494     // If we have data to encode send it to the encoder. If conversion is required we will first
495     // convert the data to the requested pixel format.
496     if (!work->input.buffers.empty()) {
497         C2ConstGraphicBlock inputBlock =
498                 work->input.buffers.front()->data().graphicBlocks().front();
499         if (mInputFormatConverter) {
500             ALOGV("Converting input block (index: %" PRIu64 ")", index);
501             c2_status_t status =
502                     mInputFormatConverter->convertBlock(index, inputBlock, &inputBlock);
503             if (status != C2_OK) {
504                 ALOGE("Failed to convert input block (index: %" PRIu64 ")", index);
505                 reportError(status);
506                 return;
507             }
508         } else {
509             // Android encoder framework reuses the same gpu buffers as
510             // inputs and doesn't call lock/unlock explicitly between writes.
511             // If there is format conversion, this is fine since we will
512             // read back what we've written first and then put it in another
513             // buffer. Whenever there is no format conversion, this causes
514             // sync issue on ARCVM since host side buffers never get updated.
515             // Fix this by explicitly calling lock/unlock before sending buffer
516             // to encoder.
517             const C2Handle* handle = inputBlock.handle();
518             uint32_t width, height, format, stride, generation, igbpSlot;
519             uint64_t usage, igbpId;
520             _UnwrapNativeCodec2GrallocMetadata(handle, &width, &height, &format, &usage, &stride,
521                                                &generation, &igbpId, &igbpSlot);
522             do {
523                 if (!(usage & GRALLOC_USAGE_SW_WRITE_MASK)) break;
524                 native_handle_t* gralloc_handle = UnwrapNativeCodec2GrallocHandle(handle);
525                 if (nullptr == gralloc_handle) break;
526                 sp<GraphicBuffer> buffer =
527                         new GraphicBuffer(gralloc_handle, GraphicBuffer::CLONE_HANDLE, width,
528                                           height, format, 1, usage, stride);
529                 native_handle_delete(gralloc_handle);
530                 void* pixels;
531                 if (buffer->lock(GRALLOC_USAGE_SW_WRITE_OFTEN, &pixels)) break;
532                 buffer->unlock();
533             } while (0);
534         }
535         if (!encode(inputBlock, index, timestamp)) {
536             return;
537         }
538     }
539 
540     mWorkQueue.push_back(std::move(work));
541     if (endOfStream) {
542         mEncoder->drain();
543     }
544 }
545 
drainTask(drain_mode_t)546 void EncodeComponent::drainTask(drain_mode_t /*drainMode*/) {
547     ALOGV("%s()", __func__);
548     ALOG_ASSERT(mEncoderTaskRunner->RunsTasksInCurrentSequence());
549 
550     // We can only start draining if all work has been queued in the encoder, so we mark the last
551     // item waiting for conversion as EOS if required.
552     if (!mInputConverterQueue.empty()) {
553         C2Work* work = mInputConverterQueue.back().get();
554         work->input.flags = static_cast<C2FrameData::flags_t>(work->input.flags |
555                                                               C2FrameData::FLAG_END_OF_STREAM);
556         return;
557     }
558 
559     // Mark the last item in the output work queue as EOS, so we will only report it as finished
560     // after draining has completed.
561     if (!mWorkQueue.empty()) {
562         ALOGV("Starting drain and marking last item in output work queue as EOS");
563         C2Work* work = mWorkQueue.back().get();
564         work->input.flags = static_cast<C2FrameData::flags_t>(work->input.flags |
565                                                               C2FrameData::FLAG_END_OF_STREAM);
566         mEncoder->drain();
567     }
568 }
569 
onDrainDone(bool success)570 void EncodeComponent::onDrainDone(bool success) {
571     ALOGV("%s()", __func__);
572     ALOG_ASSERT(mEncoderTaskRunner->RunsTasksInCurrentSequence());
573     ALOG_ASSERT(!mWorkQueue.empty());
574 
575     if (!success) {
576         ALOGE("draining the encoder failed");
577         reportError(C2_CORRUPTED);
578         return;
579     }
580 
581     // Find the first work item marked as EOS. This might not be the first item in the queue, as
582     // previous buffers in the queue might still be waiting for their associated input buffers.
583     auto it = std::find_if(
584             mWorkQueue.cbegin(), mWorkQueue.cend(), [](const std::unique_ptr<C2Work>& work) {
585                 return ((work->input.flags & C2FrameData::FLAG_END_OF_STREAM) &&
586                         !(work->worklets.back()->output.flags & C2FrameData::FLAG_END_OF_STREAM));
587             });
588     if (it == mWorkQueue.end()) {
589         ALOGW("No EOS work item found in queue");
590         return;
591     }
592 
593     // Mark the item in the output work queue as EOS done.
594     C2Work* eosWork = it->get();
595     eosWork->worklets.back()->output.flags = C2FrameData::FLAG_END_OF_STREAM;
596 
597     // Draining is done which means all buffers on the device output queue have been returned, but
598     // not all buffers on the device input queue might have been returned yet.
599     if ((eosWork != mWorkQueue.front().get()) || !isWorkDone(*eosWork)) {
600         ALOGV("Draining done, waiting for input buffers to be returned");
601         return;
602     }
603 
604     ALOGV("Draining done");
605     reportWork(std::move(mWorkQueue.front()));
606     mWorkQueue.pop_front();
607 }
608 
flushTask(::base::WaitableEvent * done,std::list<std::unique_ptr<C2Work>> * const flushedWork)609 void EncodeComponent::flushTask(::base::WaitableEvent* done,
610                                 std::list<std::unique_ptr<C2Work>>* const flushedWork) {
611     ATRACE_CALL();
612     ALOGV("%s()", __func__);
613     ALOG_ASSERT(mEncoderTaskRunner->RunsTasksInCurrentSequence());
614 
615     // Move all work that can immediately be aborted to flushedWork, and notify the caller.
616     if (flushedWork) {
617         while (!mInputConverterQueue.empty()) {
618             std::unique_ptr<C2Work> work = std::move(mInputConverterQueue.front());
619             work->input.buffers.clear();
620             flushedWork->push_back(std::move(work));
621             mInputConverterQueue.pop();
622         }
623     }
624     done->Signal();
625 
626     flush();
627 }
628 
setListenerTask(const std::shared_ptr<Listener> & listener,::base::WaitableEvent * done)629 void EncodeComponent::setListenerTask(const std::shared_ptr<Listener>& listener,
630                                       ::base::WaitableEvent* done) {
631     ALOGV("%s()", __func__);
632     ALOG_ASSERT(mEncoderTaskRunner->RunsTasksInCurrentSequence());
633 
634     mListener = listener;
635     done->Signal();
636 }
637 
updateEncodingParameters()638 bool EncodeComponent::updateEncodingParameters() {
639     ATRACE_CALL();
640     ALOGV("%s()", __func__);
641     ALOG_ASSERT(mEncoderTaskRunner->RunsTasksInCurrentSequence());
642 
643     // Ask device to change bitrate if it's different from the currently configured bitrate. The C2
644     // framework doesn't offer a parameter to configure the peak bitrate, so we'll use a multiple of
645     // the target bitrate here. The peak bitrate is only used if the bitrate mode is set to VBR.
646     uint32_t bitrate = mInterface->getBitrate();
647     if (mBitrate != bitrate) {
648         ALOG_ASSERT(bitrate > 0u);
649         ALOGV("Setting bitrate to %u", bitrate);
650         if (!mEncoder->setBitrate(bitrate)) {
651             reportError(C2_CORRUPTED);
652             return false;
653         }
654         mBitrate = bitrate;
655 
656         if (mBitrateMode == C2Config::BITRATE_VARIABLE) {
657             ALOGV("Setting peak bitrate to %u", bitrate * VideoEncoder::kPeakBitrateMultiplier);
658             // TODO(b/190336806): Our stack doesn't support dynamic peak bitrate changes yet, ignore
659             // errors for now.
660             mEncoder->setPeakBitrate(bitrate * VideoEncoder::kPeakBitrateMultiplier);
661         }
662     }
663 
664     // Ask device to change framerate if it's different from the currently configured framerate.
665     uint32_t framerate = static_cast<uint32_t>(std::round(mInterface->getFramerate()));
666     if (mFramerate != framerate) {
667         ALOG_ASSERT(framerate > 0u);
668         ALOGV("Setting framerate to %u", framerate);
669         if (!mEncoder->setFramerate(framerate)) {
670             ALOGE("Requesting framerate change failed");
671             reportError(C2_CORRUPTED);
672             return false;
673         }
674         mFramerate = framerate;
675     }
676 
677     // Check whether an explicit key frame was requested, if so reset the key frame counter to
678     // immediately request a key frame.
679     C2StreamRequestSyncFrameTuning::output requestKeyFrame;
680     c2_status_t status = mInterface->query({&requestKeyFrame}, {}, C2_DONT_BLOCK, nullptr);
681     if (status != C2_OK) {
682         ALOGE("Failed to query interface for key frame request (error code: %d)", status);
683         reportError(status);
684         return false;
685     }
686     if (requestKeyFrame.value == C2_TRUE) {
687         mEncoder->requestKeyframe();
688         requestKeyFrame.value = C2_FALSE;
689         std::vector<std::unique_ptr<C2SettingResult>> failures;
690         status = mInterface->config({&requestKeyFrame}, C2_MAY_BLOCK, &failures);
691         if (status != C2_OK) {
692             ALOGE("Failed to reset key frame request on interface (error code: %d)", status);
693             reportError(status);
694             return false;
695         }
696     }
697 
698     C2Config::profile_t outputProfile = mInterface->getOutputProfile();
699     if (isH264Profile(outputProfile)) {
700         C2Config::level_t outputLevel = mInterface->getOutputLevel();
701         ui::Size inputSize = mInterface->getInputVisibleSize();
702         mMaxFramerate = maxFramerateForLevelH264(outputLevel, inputSize);
703     }
704 
705     return true;
706 }
707 
encode(C2ConstGraphicBlock block,uint64_t index,int64_t timestamp)708 bool EncodeComponent::encode(C2ConstGraphicBlock block, uint64_t index, int64_t timestamp) {
709     ATRACE_CALL();
710     ALOGV("%s()", __func__);
711     ALOG_ASSERT(mEncoderTaskRunner->RunsTasksInCurrentSequence());
712     ALOG_ASSERT(mEncoder);
713 
714     ALOGV("Encoding input block (index: %" PRIu64 ", timestamp: %" PRId64 ", size: %dx%d)", index,
715           timestamp, block.width(), block.height());
716 
717     // If this is the first input frame, determine the pixel format and layout.
718     if (mInputPixelFormat == VideoPixelFormat::UNKNOWN) {
719         ALOG_ASSERT(mInputLayout.empty());
720         VideoPixelFormat format = VideoPixelFormat::UNKNOWN;
721         std::optional<std::vector<VideoFramePlane>> inputLayout =
722                 getVideoFrameLayout(block, &format);
723         if (!inputLayout) {
724             ALOGE("Failed to get input block's layout");
725             reportError(C2_CORRUPTED);
726             return false;
727         }
728         mInputPixelFormat = format;
729         mInputLayout = std::move(*inputLayout);
730     }
731 
732     // Dynamically adjust framerate based on the frame's timestamp if required.
733     constexpr int64_t kMaxFramerateDiff = 5;
734     if (mLastFrameTime && (timestamp > *mLastFrameTime)) {
735         int64_t newFramerate = std::max(
736                 static_cast<int64_t>(std::round(1000000.0 / (timestamp - *mLastFrameTime))),
737                 static_cast<int64_t>(1LL));
738         // Clients using input surface may exceed the maximum allowed framerate for the given
739         // profile. One of such examples is android.media.codec.cts.MediaCodecTest#testAbruptStop.
740         // To mitigate that, value is clamped to the maximum framerate for the given level and
741         // current frame size.
742         // See: b/362902868
743         if (newFramerate > mMaxFramerate) {
744             ALOGW("Frames are coming too fast - new framerate (%" PRIi64
745                   ") would exceed the maximum value (%" PRIu32 ")",
746                   newFramerate, mMaxFramerate);
747             newFramerate = mMaxFramerate;
748         }
749 
750         if (abs(mFramerate - newFramerate) > kMaxFramerateDiff) {
751             ALOGV("Adjusting framerate to %" PRId64 " based on frame timestamps", newFramerate);
752             mInterface->setFramerate(static_cast<uint32_t>(newFramerate));
753         }
754     }
755     mLastFrameTime = timestamp;
756 
757     // Update dynamic encoding parameters (bitrate, framerate, key frame) if requested.
758     if (!updateEncodingParameters()) return false;
759 
760     // Create an input frame from the graphic block.
761     std::unique_ptr<VideoEncoder::InputFrame> frame =
762             createInputFrame(block, mInputPixelFormat, mInputLayout, index, timestamp);
763     if (!frame) {
764         ALOGE("Failed to create video frame from input block (index: %" PRIu64
765               ", timestamp: %" PRId64 ")",
766               index, timestamp);
767         reportError(C2_CORRUPTED);
768         return false;
769     }
770 
771     if (!mEncoder->encode(std::move(frame))) {
772         return false;
773     }
774 
775     return true;
776 }
777 
flush()778 void EncodeComponent::flush() {
779     ATRACE_CALL();
780     ALOGV("%s()", __func__);
781     ALOG_ASSERT(mEncoderTaskRunner->RunsTasksInCurrentSequence());
782 
783     mEncoder->flush();
784 
785     // Report all queued work items as aborted.
786     std::list<std::unique_ptr<C2Work>> abortedWorkItems;
787     while (!mInputConverterQueue.empty()) {
788         std::unique_ptr<C2Work> work = std::move(mInputConverterQueue.front());
789         work->result = C2_NOT_FOUND;
790         work->input.buffers.clear();
791         abortedWorkItems.push_back(std::move(work));
792         mInputConverterQueue.pop();
793     }
794     while (!mWorkQueue.empty()) {
795         std::unique_ptr<C2Work> work = std::move(mWorkQueue.front());
796         // Return buffer to the input format convertor if required.
797         if (mInputFormatConverter && work->input.buffers.empty()) {
798             mInputFormatConverter->returnBlock(work->input.ordinal.frameIndex.peeku());
799         }
800         work->result = C2_NOT_FOUND;
801         work->input.buffers.clear();
802         abortedWorkItems.push_back(std::move(work));
803         mWorkQueue.pop_front();
804     }
805     if (!abortedWorkItems.empty()) {
806         mListener->onWorkDone_nb(weak_from_this(), std::move(abortedWorkItems));
807     }
808 }
809 
fetchOutputBlock(uint32_t size,std::unique_ptr<BitstreamBuffer> * buffer)810 void EncodeComponent::fetchOutputBlock(uint32_t size, std::unique_ptr<BitstreamBuffer>* buffer) {
811     ATRACE_CALL();
812     ALOGV("Fetching linear block (size: %u)", size);
813     std::shared_ptr<C2LinearBlock> block;
814     c2_status_t status = mOutputBlockPool->fetchLinearBlock(
815             size,
816             C2MemoryUsage(C2MemoryUsage::CPU_READ |
817                           static_cast<uint64_t>(BufferUsage::VIDEO_ENCODER)),
818             &block);
819     if (status != C2_OK) {
820         ALOGE("Failed to fetch linear block (error: %d)", status);
821         reportError(status);
822     }
823 
824     *buffer = std::make_unique<BitstreamBuffer>(std::move(block), 0, size);
825 }
826 
onInputBufferDone(uint64_t index)827 void EncodeComponent::onInputBufferDone(uint64_t index) {
828     ALOGV("%s(): Input buffer done (index: %" PRIu64 ")", __func__, index);
829     ALOG_ASSERT(mEncoderTaskRunner->RunsTasksInCurrentSequence());
830     ALOG_ASSERT(mEncoder);
831 
832     // There are no guarantees the input buffers are returned in order, so we need to find the work
833     // item which this buffer belongs to.
834     C2Work* work = getWorkByIndex(index);
835     if (!work) {
836         ALOGE("Failed to find work associated with input buffer %" PRIu64, index);
837         reportError(C2_CORRUPTED);
838         return;
839     }
840 
841     // We're done using the input block, release reference to return the block to the client.
842     LOG_ASSERT(!work->input.buffers.empty());
843     work->input.buffers.front().reset();
844 
845     // Return the block to the convertor if required. If we have buffers awaiting conversion, we can
846     // now attempt to convert and encode them again.
847     if (mInputFormatConverter) {
848         c2_status_t status = mInputFormatConverter->returnBlock(index);
849         if (status != C2_OK) {
850             reportError(status);
851             return;
852         }
853         while (!mInputConverterQueue.empty() && mInputFormatConverter->isReady()) {
854             std::unique_ptr<C2Work> work = std::move(mInputConverterQueue.front());
855             mInputConverterQueue.pop();
856             queueTask(std::move(work));
857         }
858     }
859 
860     // Return all completed work items. The work item might have been waiting for it's input buffer
861     // to be returned, in which case we can report it as completed now. As input buffers are not
862     // necessarily returned in order we might be able to return multiple ready work items now.
863     while (!mWorkQueue.empty() && isWorkDone(*mWorkQueue.front())) {
864         reportWork(std::move(mWorkQueue.front()));
865         mWorkQueue.pop_front();
866     }
867 }
868 
onOutputBufferDone(size_t dataSize,int64_t timestamp,bool keyFrame,std::unique_ptr<BitstreamBuffer> buffer)869 void EncodeComponent::onOutputBufferDone(size_t dataSize, int64_t timestamp, bool keyFrame,
870                                          std::unique_ptr<BitstreamBuffer> buffer) {
871     ALOGV("%s(): output buffer done (timestamp: %" PRId64 ", size: %zu, keyframe: %d)", __func__,
872           timestamp, dataSize, keyFrame);
873     ALOG_ASSERT(mEncoderTaskRunner->RunsTasksInCurrentSequence());
874     ALOG_ASSERT(buffer->dmabuf);
875 
876     C2ConstLinearBlock constBlock =
877             buffer->dmabuf->share(buffer->dmabuf->offset(), dataSize, C2Fence());
878 
879     // If no CSD (content-specific-data, e.g. SPS for H.264) has been submitted yet, we expect this
880     // output block to contain CSD. We only submit the CSD once, even if it's attached to each key
881     // frame.
882     if (mExtractCSD) {
883         ALOGV("No CSD submitted yet, extracting CSD");
884         std::unique_ptr<C2StreamInitDataInfo::output> csd;
885         C2ReadView view = constBlock.map().get();
886         if (!extractCSDInfo(&csd, view.data(), view.capacity())) {
887             ALOGE("Failed to extract CSD");
888             reportError(C2_CORRUPTED);
889             return;
890         }
891 
892         // Attach the CSD to the first item in our output work queue.
893         LOG_ASSERT(!mWorkQueue.empty());
894         C2Work* work = mWorkQueue.front().get();
895         work->worklets.front()->output.configUpdate.push_back(std::move(csd));
896         mExtractCSD = false;
897     }
898 
899     // Get the work item associated with the timestamp.
900     C2Work* work = getWorkByTimestamp(timestamp);
901     if (!work) {
902         // It's possible we got an empty CSD request with timestamp 0, which we currently just
903         // discard.
904         if (timestamp != 0) {
905             reportError(C2_CORRUPTED);
906         }
907         return;
908     }
909 
910     std::shared_ptr<C2Buffer> linearBuffer = C2Buffer::CreateLinearBuffer(std::move(constBlock));
911     if (!linearBuffer) {
912         ALOGE("Failed to create linear buffer from block");
913         reportError(C2_CORRUPTED);
914         return;
915     }
916 
917     if (keyFrame) {
918         linearBuffer->setInfo(
919                 std::make_shared<C2StreamPictureTypeMaskInfo::output>(0u, C2Config::SYNC_FRAME));
920     }
921     work->worklets.front()->output.buffers.emplace_back(std::move(linearBuffer));
922 
923     // We can report the work item as completed if its associated input buffer has also been
924     // released. As output buffers are not necessarily returned in order we might be able to return
925     // multiple ready work items now.
926     while (!mWorkQueue.empty() && isWorkDone(*mWorkQueue.front())) {
927         reportWork(std::move(mWorkQueue.front()));
928         mWorkQueue.pop_front();
929     }
930 }
931 
getWorkByIndex(uint64_t index)932 C2Work* EncodeComponent::getWorkByIndex(uint64_t index) {
933     ALOGV("%s(): getting work item (index: %" PRIu64 ")", __func__, index);
934     ALOG_ASSERT(mEncoderTaskRunner->RunsTasksInCurrentSequence());
935 
936     auto it = std::find_if(mWorkQueue.begin(), mWorkQueue.end(),
937                            [index](const std::unique_ptr<C2Work>& w) {
938                                return w->input.ordinal.frameIndex.peeku() == index;
939                            });
940     if (it == mWorkQueue.end()) {
941         ALOGE("Failed to find work (index: %" PRIu64 ")", index);
942         return nullptr;
943     }
944     return it->get();
945 }
946 
getWorkByTimestamp(int64_t timestamp)947 C2Work* EncodeComponent::getWorkByTimestamp(int64_t timestamp) {
948     ALOGV("%s(): getting work item (timestamp: %" PRId64 ")", __func__, timestamp);
949     ALOG_ASSERT(mEncoderTaskRunner->RunsTasksInCurrentSequence());
950     ALOG_ASSERT(timestamp >= 0);
951 
952     // Find the work with specified timestamp by looping over the output work queue. This should be
953     // very fast as the output work queue will never be longer then a few items. Ignore empty work
954     // items that are marked as EOS, as their timestamp might clash with other work items.
955     auto it = std::find_if(
956             mWorkQueue.begin(), mWorkQueue.end(), [timestamp](const std::unique_ptr<C2Work>& w) {
957                 return !(w->input.flags & C2FrameData::FLAG_END_OF_STREAM) &&
958                        w->input.ordinal.timestamp.peeku() == static_cast<uint64_t>(timestamp);
959             });
960     if (it == mWorkQueue.end()) {
961         ALOGE("Failed to find work (timestamp: %" PRIu64 ")", timestamp);
962         return nullptr;
963     }
964     return it->get();
965 }
966 
isWorkDone(const C2Work & work) const967 bool EncodeComponent::isWorkDone(const C2Work& work) const {
968     ALOGV("%s()", __func__);
969     ALOG_ASSERT(mEncoderTaskRunner->RunsTasksInCurrentSequence());
970 
971     if ((work.input.flags & C2FrameData::FLAG_END_OF_STREAM) &&
972         !(work.worklets.front()->output.flags & C2FrameData::FLAG_END_OF_STREAM)) {
973         ALOGV("Work item %" PRIu64 " is marked as EOS but draining has not finished yet",
974               work.input.ordinal.frameIndex.peeku());
975         return false;
976     }
977 
978     if (!work.input.buffers.empty() && work.input.buffers.front()) {
979         ALOGV("Input buffer associated with work item %" PRIu64 " not returned yet",
980               work.input.ordinal.frameIndex.peeku());
981         return false;
982     }
983 
984     // If the work item had an input buffer to be encoded, it should have an output buffer set.
985     if (!work.input.buffers.empty() && work.worklets.front()->output.buffers.empty()) {
986         ALOGV("Output buffer associated with work item %" PRIu64 " not returned yet",
987               work.input.ordinal.frameIndex.peeku());
988         return false;
989     }
990 
991     return true;
992 }
993 
reportWork(std::unique_ptr<C2Work> work)994 void EncodeComponent::reportWork(std::unique_ptr<C2Work> work) {
995     ATRACE_CALL();
996     ALOG_ASSERT(work);
997     ALOGV("%s(): Reporting work item as finished (index: %llu, timestamp: %llu)", __func__,
998           work->input.ordinal.frameIndex.peekull(), work->input.ordinal.timestamp.peekull());
999     ALOG_ASSERT(mEncoderTaskRunner->RunsTasksInCurrentSequence());
1000 
1001     work->result = C2_OK;
1002     work->workletsProcessed = static_cast<uint32_t>(work->worklets.size());
1003 
1004     std::list<std::unique_ptr<C2Work>> finishedWorkList;
1005     finishedWorkList.emplace_back(std::move(work));
1006     mListener->onWorkDone_nb(weak_from_this(), std::move(finishedWorkList));
1007 }
1008 
getBlockPool()1009 bool EncodeComponent::getBlockPool() {
1010     ALOG_ASSERT(mEncoderTaskRunner->RunsTasksInCurrentSequence());
1011 
1012     auto sharedThis = weak_from_this().lock();
1013     if (!sharedThis) {
1014         ALOGI("%s(): V4L2EncodeComponent instance is already destroyed", __func__);
1015         return false;
1016     }
1017 
1018     C2BlockPool::local_id_t poolId = mInterface->getBlockPoolId();
1019     if (poolId == C2BlockPool::BASIC_LINEAR) {
1020         ALOGW("Using unoptimized linear block pool");
1021     }
1022     c2_status_t status = GetCodec2BlockPool(poolId, std::move(sharedThis), &mOutputBlockPool);
1023     if (status != C2_OK || !mOutputBlockPool) {
1024         ALOGE("Failed to get output block pool, error: %d", status);
1025         return false;
1026     }
1027     return true;
1028 }
1029 
reportError(c2_status_t error)1030 void EncodeComponent::reportError(c2_status_t error) {
1031     ALOGV("%s()", __func__);
1032     ALOG_ASSERT(mEncoderTaskRunner->RunsTasksInCurrentSequence());
1033 
1034     // TODO(dstaessens): Report all pending work items as finished upon failure.
1035     std::lock_guard<std::mutex> lock(mComponentLock);
1036     if (mComponentState != ComponentState::ERROR) {
1037         setComponentState(ComponentState::ERROR);
1038         mListener->onError_nb(weak_from_this(), static_cast<uint32_t>(error));
1039     }
1040 }
1041 
setComponentState(ComponentState state)1042 void EncodeComponent::setComponentState(ComponentState state) {
1043     // Check whether the state change is valid.
1044     switch (state) {
1045     case ComponentState::UNLOADED:
1046         ALOG_ASSERT(mComponentState == ComponentState::LOADED);
1047         break;
1048     case ComponentState::LOADED:
1049         ALOG_ASSERT(mComponentState == ComponentState::UNLOADED ||
1050                     mComponentState == ComponentState::RUNNING ||
1051                     mComponentState == ComponentState::ERROR);
1052         break;
1053     case ComponentState::RUNNING:
1054         ALOG_ASSERT(mComponentState == ComponentState::LOADED);
1055         break;
1056     case ComponentState::ERROR:
1057         break;
1058     }
1059 
1060     ALOGV("Changed component state from %s to %s", componentStateToString(mComponentState),
1061           componentStateToString(state));
1062     mComponentState = state;
1063 }
1064 
componentStateToString(EncodeComponent::ComponentState state)1065 const char* EncodeComponent::componentStateToString(EncodeComponent::ComponentState state) {
1066     switch (state) {
1067     case ComponentState::UNLOADED:
1068         return "UNLOADED";
1069     case ComponentState::LOADED:
1070         return "LOADED";
1071     case ComponentState::RUNNING:
1072         return "RUNNING";
1073     case ComponentState::ERROR:
1074         return "ERROR";
1075     }
1076 }
1077 
1078 }  // namespace android
1079